You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

munge.rs 11KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341
  1. #![allow(unused)]
  2. #[macro_use]
  3. extern crate slog;
  4. #[macro_use]
  5. extern crate markets;
  6. use std::io::{self, prelude::*};
  7. use std::fs;
  8. use std::path::{Path, PathBuf};
  9. use std::time::*;
  10. use pretty_toa::ThousandsSep;
  11. use structopt::StructOpt;
  12. use serde::{Serialize, Deserialize};
  13. use slog::Drain;
  14. use chrono::{DateTime, Utc, NaiveDateTime};
  15. use markets::crypto::{Exchange, Ticker, Side, Currency};
  16. macro_rules! fatal { ($fmt:expr, $($args:tt)*) => {{
  17. eprintln!($fmt, $($args)*);
  18. std::process::exit(1);
  19. }}}
  20. const PROGRESS_EVERY: usize = 1024 * 1024 * 4;
  21. #[structopt(rename_all="kebab-case")]
  22. #[derive(Debug, StructOpt)]
  23. enum Opt {
  24. /// Filter trades-csv by start,end range and save subset to output-path
  25. ///
  26. /// Note: csv assumed to be pre-sorted by time (ascending)
  27. ///
  28. Range {
  29. /// Path to CSV file with trades data
  30. #[structopt(short = "f", long = "trades-csv")]
  31. #[structopt(parse(from_os_str))]
  32. trades_csv: PathBuf,
  33. /// Where to save the query results (CSV output)
  34. #[structopt(short = "o", long = "output-path")]
  35. #[structopt(parse(from_os_str))]
  36. output_path: PathBuf,
  37. /// rfc3339 format ("YYYY-MM-DDTHH:MM:SSZ")
  38. #[structopt(short = "s", long = "start")]
  39. start: DateTime<Utc>,
  40. /// rfc3339 format ("YYYY-MM-DDTHH:MM:SSZ")
  41. #[structopt(short = "e", long = "end")]
  42. end: DateTime<Utc>,
  43. },
  44. /// Convert the original csv info a format ready to be ingested via COPY
  45. ///
  46. /// 1. server_time of 0 -> NULL
  47. /// 2. side of "na" -> NULL
  48. PrepPostgres {
  49. /// Path to CSV file with trades data
  50. #[structopt(short = "f", long = "trades-csv")]
  51. #[structopt(parse(from_os_str))]
  52. trades_csv: PathBuf,
  53. /// Where to save the query results (CSV output)
  54. #[structopt(short = "o", long = "output-path")]
  55. #[structopt(parse(from_os_str))]
  56. output_path: PathBuf,
  57. },
  58. ListCodes,
  59. /*
  60. Binarize {
  61. /// Path to CSV file with trades data
  62. #[structopt(short = "f", long = "trades-csv")]
  63. #[structopt(parse(from_os_str))]
  64. trades_csv: PathBuf,
  65. /// Where to save the binary-serialized data
  66. #[structopt(short = "o", long = "output-path")]
  67. #[structopt(parse(from_os_str))]
  68. output_path: PathBuf,
  69. }
  70. */
  71. }
  72. #[derive(Deserialize)]
  73. struct Trade {
  74. /// Unix nanoseconds
  75. pub time: u64,
  76. pub exch: Exchange,
  77. pub ticker: Ticker,
  78. //pub side: Option<Side>,
  79. pub price: f64,
  80. pub amount: f64,
  81. }
  82. #[derive(Deserialize, Debug)]
  83. struct PgBuilder<'a> {
  84. pub time: u64,
  85. pub exch: Exchange,
  86. pub ticker: Ticker,
  87. pub side: Option<&'a str>,
  88. pub price: f64,
  89. pub amount: f64,
  90. pub server_time: u64,
  91. }
  92. #[derive(Serialize, Debug)]
  93. struct PgRow {
  94. pub time: DateTime<Utc>,
  95. pub exch: u8,
  96. pub base: u8,
  97. pub quote: u8,
  98. pub amount: f64,
  99. pub price: f64,
  100. pub side: Option<u8>,
  101. pub server_time: Option<DateTime<Utc>>,
  102. }
  103. fn nanos_to_utc(nanos: u64) -> DateTime<Utc> {
  104. const ONE_SECOND: u64 = 1_000_000_000;
  105. let sec: i64 = (nanos / ONE_SECOND) as i64;
  106. let nsec: u32 = (nanos % ONE_SECOND) as u32;
  107. let naive = NaiveDateTime::from_timestamp(sec, nsec);
  108. DateTime::from_utc(naive, Utc)
  109. }
  110. fn per_sec(n: usize, span: Duration) -> f64 {
  111. if n == 0 || span < Duration::from_micros(1) { return 0.0 }
  112. let s: f64 = span.as_nanos() as f64 / 1e9f64;
  113. n as f64 / s
  114. }
  115. fn nanos(utc: DateTime<Utc>) -> u64 {
  116. (utc.timestamp() as u64) * 1_000_000_000_u64 + (utc.timestamp_subsec_nanos() as u64)
  117. }
  118. fn run(start: Instant, logger: &slog::Logger) -> Result<usize, String> {
  119. let opt = Opt::from_args();
  120. let mut n = 0;
  121. match opt {
  122. Opt::PrepPostgres { trades_csv, output_path } => {
  123. let logger = logger.new(o!("cmd" => "prep-postgres"));
  124. info!(logger, "beginning prep-postgres cmd";
  125. "trades_csv" => %trades_csv.display(),
  126. "output_path" => %output_path.display(),
  127. );
  128. if ! trades_csv.exists() { return Err(format!("--trades-csv path does not exist: {}", trades_csv.display())) }
  129. info!(logger, "opening trades_csv file");
  130. let rdr = fs::File::open(&trades_csv)
  131. .map_err(|e| format!("opening trades csv file failed: {} (tried to open {})", e, trades_csv.display()))?;
  132. let rdr = io::BufReader::new(rdr);
  133. let mut rdr = csv::Reader::from_reader(rdr);
  134. info!(logger, "opening output file for writing");
  135. let wtr = fs::File::create(&output_path)
  136. .map_err(|e| format!("opening output file failed: {} (tried to open {} for writing)", e, output_path.display()))?;
  137. let wtr = io::BufWriter::new(wtr);
  138. let mut wtr = csv::Writer::from_writer(wtr);
  139. let headers: csv::StringRecord = rdr.headers().map_err(|e| format!("failed to parse CSV headers: {}", e))?.clone();
  140. let mut row = csv::StringRecord::new();
  141. //wtr.write_record(&headers).map_err(|e| format!("writing headers row failed: {}", e))?;
  142. while rdr.read_record(&mut row)
  143. .map_err(|e| {
  144. format!("reading row {} failed: {}", (n+1).thousands_sep(), e)
  145. })?
  146. {
  147. let bldr: PgBuilder = row.deserialize(Some(&headers)).map_err(|e| format!("deser failed: {}", e))?;
  148. let PgBuilder { time, exch, ticker, side, price, amount, server_time } = bldr;
  149. let time = nanos_to_utc(time);
  150. let exch = u8::from(exch);
  151. let base = u8::from(ticker.base);
  152. let quote = u8::from(ticker.quote);
  153. let side: Option<u8> = match side {
  154. Some("bid") => Some(1),
  155. Some("ask") => Some(2),
  156. _ => None,
  157. };
  158. let server_time = match server_time {
  159. 0 => None,
  160. x => Some(nanos_to_utc(x)),
  161. };
  162. let pg_row = PgRow { time, exch, base, quote, amount, price, side, server_time };
  163. wtr.serialize(&pg_row).map_err(|e| format!("serializing PgRow to csv failed: {}", e))?;
  164. n += 1;
  165. if n % PROGRESS_EVERY == 0 {
  166. info!(logger, "parsing/writing csv rows"; "n" => %n.thousands_sep());
  167. }
  168. }
  169. }
  170. Opt::ListCodes => {
  171. println!("side: {:?} {}", Side::Bid, u8::from(Side::Bid));
  172. println!("side: {:?} {}", Side::Ask, u8::from(Side::Ask));
  173. println!();
  174. for exch in Exchange::all() {
  175. println!("INSERT INTO exchanges (id, symbol) VALUES ({}, '{}');", u8::from(exch), exch.as_str());
  176. }
  177. for currency in Currency::all() {
  178. println!("INSERT INTO currencies (id, symbol) VALUES ({}, '{}');", u8::from(currency), currency.as_str());
  179. }
  180. }
  181. Opt::Range { trades_csv, output_path, start, end } => {
  182. let logger = logger.new(o!("cmd" => "range"));
  183. info!(logger, "beginning range cmd";
  184. "trades_csv" => %trades_csv.display(),
  185. "output_path" => %output_path.display(),
  186. "start" => %start,
  187. "end" => %end,
  188. );
  189. if ! trades_csv.exists() { return Err(format!("--trades-csv path does not exist: {}", trades_csv.display())) }
  190. info!(logger, "opening trades_csv file");
  191. let rdr = fs::File::open(&trades_csv)
  192. .map_err(|e| format!("opening trades csv file failed: {} (tried to open {})", e, trades_csv.display()))?;
  193. let rdr = io::BufReader::new(rdr);
  194. let mut rdr = csv::Reader::from_reader(rdr);
  195. info!(logger, "opening output file for writing");
  196. let wtr = fs::File::create(&output_path)
  197. .map_err(|e| format!("opening output file failed: {} (tried to open {} for writing)", e, output_path.display()))?;
  198. let wtr = io::BufWriter::new(wtr);
  199. let mut wtr = csv::Writer::from_writer(wtr);
  200. let headers: csv::ByteRecord = rdr.byte_headers().map_err(|e| format!("failed to parse CSV headers: {}", e))?.clone();
  201. let time_col: usize = headers.iter().position(|x| x == b"time").ok_or_else(|| {
  202. String::from("no column in headers named 'time'")
  203. })?;
  204. let mut row = csv::ByteRecord::new();
  205. let start_nanos = nanos(start);
  206. let end_nanos = nanos(end);
  207. let mut n_written = 0;
  208. let mut time: u64 = 0;
  209. info!(logger, "writing headers row to output file");
  210. wtr.write_byte_record(&headers).map_err(|e| format!("writing csv headers row failed: {}", e))?;
  211. info!(logger, "entering csv parsing loop");
  212. 'a: while rdr.read_byte_record(&mut row)
  213. .map_err(|e| {
  214. format!("reading row {} failed: {}", (n+1).thousands_sep(), e)
  215. })?
  216. {
  217. let time_bytes = row.get(time_col).ok_or_else(|| "time column not present for row")?;
  218. time = atoi::atoi(time_bytes).ok_or_else(|| {
  219. format!("failed to parse 'time' col value '{}' as integer", std::str::from_utf8(time_bytes).unwrap_or("utf8err"))
  220. })?;
  221. n += 1;
  222. if n % PROGRESS_EVERY == 0 {
  223. info!(logger, "parsing csv rows"; "n" => %n.thousands_sep(), "n_written" => %n_written.thousands_sep());
  224. }
  225. if time < start_nanos { continue 'a }
  226. if time > end_nanos { break 'a }
  227. wtr.write_byte_record(&row).map_err(|e| format!("writing parsed csv row to output file failed: {}", e))?;
  228. n_written += 1;
  229. }
  230. info!(logger, "broke out of read csv loop"; "time" => time, "end_nanos" => end_nanos, "n" => %n.thousands_sep(), "n_written" => %n_written.thousands_sep());
  231. info!(logger, "dropping wtr");
  232. drop(wtr);
  233. }
  234. }
  235. Ok(n)
  236. }
  237. fn main() {
  238. let start = Instant::now();
  239. let decorator = slog_term::TermDecorator::new().stdout().force_color().build();
  240. let drain = slog_term::FullFormat::new(decorator).use_utc_timestamp().build().fuse();
  241. let drain = slog_async::Async::new(drain).chan_size(1024 * 64).thread_name("recv".into()).build().fuse();
  242. let logger = slog::Logger::root(drain, o!("version" => structopt::clap::crate_version!()));
  243. match run(start, &logger) {
  244. Ok(n) => {
  245. let took = Instant::now() - start;
  246. info!(logger, "finished in {:?}", took;
  247. "n rows" => %n.thousands_sep(),
  248. "rows/sec" => &((per_sec(n, took) * 100.0).round() / 10.0).thousands_sep(),
  249. );
  250. }
  251. Err(e) => {
  252. crit!(logger, "run failed: {:?}", e);
  253. eprintln!("\n\nError: {}", e);
  254. std::thread::sleep(Duration::from_millis(100));
  255. std::process::exit(1);
  256. }
  257. }
  258. }