You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

373 lines
12KB

  1. #![allow(unused_imports)]
  2. #[macro_use]
  3. extern crate slog;
  4. #[macro_use]
  5. extern crate markets;
  6. use std::path::PathBuf;
  7. use std::time::*;
  8. use std::io::{self, prelude::*};
  9. use std::fs;
  10. use structopt::StructOpt;
  11. use serde::{Serialize, Deserialize};
  12. use slog::Drain;
  13. use pretty_toa::ThousandsSep;
  14. use markets::crypto::{Exchange, Ticker, Side};
  15. // equivalent to panic! but without the ugly 'thread main panicked' yada yada
  16. macro_rules! fatal { ($fmt:expr, $($args:tt)*) => {{
  17. eprintln!($fmt, $($args)*);
  18. std::process::exit(1);
  19. }}}
  20. const PROGRESS_EVERY: usize = 1024 * 1024;
  21. const ONE_SECOND: u64 = 1_000_000_000;
  22. const ONE_HOUR: u64 = ONE_SECOND * 60 * 60;
  23. #[derive(Debug, StructOpt)]
  24. struct Opt {
  25. /// Path to CSV file with trades data
  26. #[structopt(short = "f", long = "trades-csv")]
  27. #[structopt(parse(from_os_str))]
  28. trades_csv: PathBuf,
  29. /// Where to save the query results (CSV output)
  30. #[structopt(short = "o", long = "output-path")]
  31. #[structopt(parse(from_os_str))]
  32. output_path: PathBuf,
  33. }
  34. #[derive(Deserialize)]
  35. struct Trade {
  36. /// Time of trade in unix nanoseconds
  37. pub time: u64,
  38. /// Exchange where trade executed
  39. pub exch: Exchange,
  40. /// Currency rate of trade (base/quote)
  41. pub ticker: Ticker,
  42. /// Price of trade, in quote denomination
  43. pub price: f64,
  44. /// Size/Volume of trade, in base denomination
  45. pub amount: f64,
  46. }
  47. fn per_sec(n: usize, span: Duration) -> f64 {
  48. if n == 0 || span < Duration::from_micros(1) { return 0.0 }
  49. let s: f64 = span.as_nanos() as f64 / 1e9f64;
  50. n as f64 / s
  51. }
  52. #[allow(dead_code)]
  53. #[inline(always)]
  54. fn manual_deserialize_bytes(row: &csv::ByteRecord) -> Result<Trade, &'static str> {
  55. let time: u64 = atoi::atoi(row.get(0).ok_or("no time")?)
  56. .ok_or("parsing time failed")?;
  57. let amount: f64 = lexical::parse(row.get(1).ok_or("no amount")?)
  58. .map_err(|_| "parsing amount failed")?;
  59. let exch = match row.get(2).ok_or("no exch")? {
  60. b"bmex" => e!(bmex),
  61. b"bnce" => e!(bnce),
  62. b"btfx" => e!(btfx),
  63. b"gdax" => e!(gdax),
  64. b"okex" => e!(okex),
  65. b"bits" => e!(bits),
  66. b"plnx" => e!(plnx),
  67. b"krkn" => e!(krkn),
  68. _ => return Err("illegal exch"),
  69. };
  70. let price: f64 = lexical::parse(row.get(3).ok_or("no price")?)
  71. .map_err(|_| "parsing price failed")?;
  72. let ticker = match row.get(6).ok_or("no ticker")? {
  73. b"btc_usd" => t!(btc-usd),
  74. b"eth_usd" => t!(eth-usd),
  75. b"ltc_usd" => t!(ltc-usd),
  76. b"etc_usd" => t!(etc-usd),
  77. b"bch_usd" => t!(bch-usd),
  78. b"xmr_usd" => t!(xmr-usd),
  79. b"usdt_usd" => t!(usdt-usd),
  80. _ => return Err("illegal ticker"),
  81. };
  82. Ok(Trade { time, amount, exch, price, ticker })
  83. }
  84. #[allow(dead_code)]
  85. #[inline(always)]
  86. fn manual_deserialize_str(row: &csv::StringRecord) -> Result<Trade, &'static str> {
  87. let time: u64 = atoi::atoi(row.get(0).ok_or("no time")?.as_bytes())
  88. .ok_or("parsing time failed")?;
  89. let amount: f64 = lexical::parse(row.get(1).ok_or("no amount")?)
  90. .map_err(|_| "parsing amount failed")?;
  91. let exch = match row.get(2).ok_or("no exch")? {
  92. "bmex" => e!(bmex),
  93. "bnce" => e!(bnce),
  94. "btfx" => e!(btfx),
  95. "gdax" => e!(gdax),
  96. "okex" => e!(okex),
  97. "bits" => e!(bits),
  98. "plnx" => e!(plnx),
  99. "krkn" => e!(krkn),
  100. _ => return Err("illegal exch"),
  101. };
  102. let price: f64 = lexical::parse(row.get(3).ok_or("no price")?)
  103. .map_err(|_| "parsing price failed")?;
  104. let ticker = match row.get(6).ok_or("no ticker")? {
  105. "btc_usd" => t!(btc-usd),
  106. "eth_usd" => t!(eth-usd),
  107. "ltc_usd" => t!(ltc-usd),
  108. "etc_usd" => t!(etc-usd),
  109. "bch_usd" => t!(bch-usd),
  110. "xmr_usd" => t!(xmr-usd),
  111. "usdt_usd" => t!(usdt-usd),
  112. _ => return Err("illegal ticker"),
  113. };
  114. Ok(Trade { time, amount, exch, price, ticker })
  115. }
  116. /// Example of code used in discussion of increasing CSV parsing performance
  117. #[allow(dead_code)]
  118. fn fast_parse_bytes<R: Read>(mut rdr: csv::Reader<R>) -> Result<usize, String> {
  119. // our data is ascii, so parsing with the slightly faster ByteRecord is fine
  120. let headers: csv::ByteRecord = rdr.byte_headers().map_err(|e| format!("failed to parse CSV headers: {}", e))?.clone();
  121. let mut row = csv::ByteRecord::new();
  122. // manual_deserialize_bytes assumes the column order of the CSV,
  123. // so here we verify that it actually matches that assumption
  124. assert_eq!(headers.get(0), Some(&b"time"[..]));
  125. assert_eq!(headers.get(1), Some(&b"amount"[..]));
  126. assert_eq!(headers.get(2), Some(&b"exch"[..]));
  127. assert_eq!(headers.get(3), Some(&b"price"[..]));
  128. assert_eq!(headers.get(6), Some(&b"ticker"[..]));
  129. let mut n = 0;
  130. let mut last_time = 0;
  131. while rdr.read_byte_record(&mut row)
  132. .map_err(|e| {
  133. format!("reading row {} failed: {}", (n+1).thousands_sep(), e)
  134. })?
  135. {
  136. let trade: Trade = manual_deserialize_bytes(&row)
  137. .map_err(|e| {
  138. format!("deserializing row failed: {}\n\nFailing row:\n{:?}", e, row)
  139. })?;
  140. assert!(trade.time >= last_time);
  141. last_time = trade.time;
  142. n += 1;
  143. }
  144. Ok(n)
  145. }
  146. fn run(start: Instant, logger: &slog::Logger) -> Result<usize, String> {
  147. let opt = Opt::from_args();
  148. info!(logger, "initializing...";
  149. "trades-csv" => %opt.trades_csv.display(),
  150. "output-path" => %opt.output_path.display()
  151. );
  152. if ! opt.trades_csv.exists() {
  153. error!(logger, "path does not exist: {}", opt.trades_csv.display());
  154. fatal!("Error: path does not exist: {}", opt.trades_csv.display());
  155. }
  156. debug!(logger, "verified csv path exists"; "trades_csv" => %opt.trades_csv.display());
  157. let rdr = fs::File::open(&opt.trades_csv)
  158. .map_err(|e| format!("opening trades csv file failed: {} (tried to open {})", e, opt.trades_csv.display()))?;
  159. let rdr = io::BufReader::new(rdr);
  160. let mut rdr = csv::Reader::from_reader(rdr);
  161. // initializing --output-path CSV
  162. let wtr = fs::File::create(&opt.output_path)
  163. .map_err(|e| format!("creating output csv file failed: {} (tried to create {})", e, opt.output_path.display()))?;
  164. let wtr = io::BufWriter::new(wtr);
  165. let mut wtr = csv::Writer::from_writer(wtr);
  166. wtr.write_record(&[
  167. "time",
  168. "ratio",
  169. "bmex",
  170. "gdax",
  171. ]).map_err(|e| format!("writing CSV headers to output file failed: {}", e))?;
  172. let headers: csv::StringRecord = rdr.headers().map_err(|e| format!("failed to parse CSV headers: {}", e))?.clone();
  173. let mut row = csv::StringRecord::new();
  174. // pull out first row to initialize query calculations
  175. rdr.read_record(&mut row).map_err(|e| format!("reading first row failed: {}", e))?;
  176. let trade: Trade = row.deserialize(Some(&headers))
  177. .map_err(|e| {
  178. format!("deserializing first row failed: {}\n\nFailing row:\n{:?}", e, row)
  179. })?;
  180. let mut cur_hour = trade.time - trade.time % ONE_HOUR;
  181. let mut next_hour = cur_hour + ONE_HOUR;
  182. let mut bmex_total = if trade.exch == e!(bmex) { trade.price * trade.amount } else { 0.0 };
  183. let mut bmex_amt = if trade.exch == e!(bmex) { trade.amount } else { 0.0 };
  184. let mut n_bmex = 0;
  185. let mut gdax_total = if trade.exch == e!(gdax) { trade.price * trade.amount } else { 0.0 };
  186. let mut gdax_amt = if trade.exch == e!(gdax) { trade.amount } else { 0.0 };
  187. let mut n_gdax = 0;
  188. let mut n = 0;
  189. let mut n_written = 0;
  190. let mut last_time = 0;
  191. while rdr.read_record(&mut row)
  192. .map_err(|e| {
  193. format!("reading row {} failed: {}", (n+1).thousands_sep(), e)
  194. })?
  195. {
  196. let trade: Trade = row.deserialize(Some(&headers))
  197. .map_err(|e| {
  198. format!("deserializing row failed: {}\n\nFailing row:\n{:?}", e, row)
  199. })?;
  200. n += 1;
  201. // verify data is sorted by time
  202. assert!(trade.time >= last_time);
  203. last_time = trade.time;
  204. if trade.ticker != t!(btc-usd) { continue }
  205. if trade.time >= next_hour { // finalize last hour, and prepare for this hour
  206. if n_bmex == 0 || n_gdax == 0 {
  207. wtr.write_record(&[
  208. &format!("{}", cur_hour),
  209. "NaN",
  210. "NaN",
  211. "NaN",
  212. ]).map_err(|e| format!("writing output row failed: {}", e))?;
  213. } else {
  214. let bmex_wt_avg = bmex_total / bmex_amt;
  215. let gdax_wt_avg = gdax_total / gdax_amt;
  216. let ratio = bmex_wt_avg / gdax_wt_avg;
  217. wtr.write_record(&[
  218. &format!("{}", cur_hour),
  219. &format!("{}", ratio),
  220. &format!("{}", bmex_wt_avg),
  221. &format!("{}", gdax_wt_avg),
  222. ]).map_err(|e| format!("writing output row failed: {}", e))?;
  223. }
  224. n_written += 1;
  225. // reset state
  226. bmex_total = 0.0;
  227. bmex_amt = 0.0;
  228. gdax_total = 0.0;
  229. gdax_amt = 0.0;
  230. n_bmex = 0;
  231. n_gdax = 0;
  232. cur_hour = next_hour;
  233. next_hour += ONE_HOUR;
  234. // if we are skipping hours in between the last and current row, we
  235. // need to write a NaN row for the hours that had no data
  236. while next_hour <= trade.time {
  237. wtr.write_record(&[
  238. &format!("{}", cur_hour),
  239. "NaN",
  240. "NaN",
  241. "NaN",
  242. ]).map_err(|e| format!("writing output row failed: {}", e))?;
  243. n_written += 1;
  244. cur_hour = next_hour;
  245. next_hour += ONE_HOUR;
  246. }
  247. }
  248. match trade.exch {
  249. e!(bmex) => {
  250. bmex_total += trade.price * trade.amount;
  251. bmex_amt += trade.amount;
  252. n_bmex += 1;
  253. }
  254. e!(gdax) => {
  255. gdax_total += trade.price * trade.amount;
  256. gdax_amt += trade.amount;
  257. n_gdax += 1;
  258. }
  259. _ => {}
  260. }
  261. if n % PROGRESS_EVERY == 0 || (cfg!(debug_assertions) && n % (1024 * 96) == 0) {
  262. info!(logger, "parsing csv file";
  263. "n rows" => %n.thousands_sep(),
  264. "n written" => %n_written.thousands_sep(),
  265. "elapsed" => ?(Instant::now() - start),
  266. );
  267. }
  268. if cfg!(debug_assertions) && n > PROGRESS_EVERY {
  269. warn!(logger, "debug mode: exiting early";
  270. "n rows" => %n.thousands_sep(),
  271. "n written" => %n_written.thousands_sep(),
  272. "elapsed" => ?(Instant::now() - start),
  273. );
  274. break
  275. }
  276. }
  277. // intentionally skipping the partial hour here
  278. info!(logger, "finished parsing CSV/calculating query. closing output file");
  279. drop(wtr);
  280. Ok(n)
  281. }
  282. fn main() {
  283. let start = Instant::now();
  284. let decorator = slog_term::TermDecorator::new().stdout().force_color().build();
  285. let drain = slog_term::FullFormat::new(decorator).use_utc_timestamp().build().fuse();
  286. let drain = slog_async::Async::new(drain).chan_size(1024 * 64).thread_name("recv".into()).build().fuse();
  287. let logger = slog::Logger::root(drain, o!("version" => structopt::clap::crate_version!()));
  288. match run(start, &logger) {
  289. Ok(n) => {
  290. let took = Instant::now() - start;
  291. let took_secs = took.as_millis() as f64 / 1000.0;
  292. let took_str = format!("{}min, {:.1}sec", took.as_secs() / 60, took_secs % 60.0);
  293. info!(logger, "finished in {}", took_str;
  294. "n rows" => %n.thousands_sep(),
  295. "rows/sec" => &((per_sec(n, took) * 100.0).round() / 10.0).thousands_sep(),
  296. );
  297. }
  298. Err(e) => {
  299. crit!(logger, "run failed: {:?}", e);
  300. eprintln!("\n\nError: {}", e);
  301. std::thread::sleep(Duration::from_millis(100));
  302. std::process::exit(1);
  303. }
  304. }
  305. }