Add run analysis code, and clean up most warnings

Cap'n Proto has a bug in deserialization; both in packed and unpacked, it gets to 43116 of 62253 messages.
This commit is contained in:
Bradlee Speice 2019-09-06 22:23:46 -04:00
parent f151d86604
commit 369613843d
12 changed files with 2160 additions and 1653 deletions

132
Cargo.lock generated
View File

@ -1,5 +1,10 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
[[package]]
name = "adler32"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "alloc_counter"
version = "0.0.2"
@ -35,11 +40,29 @@ dependencies = [
"winapi 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "autocfg"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "base64"
version = "0.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "bitflags"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "byteorder"
version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "capnp"
version = "0.10.1"
@ -53,6 +76,11 @@ dependencies = [
"capnp 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "cc"
version = "1.0.42"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "cfg-if"
version = "0.1.9"
@ -72,6 +100,31 @@ dependencies = [
"vec_map 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "crc32fast"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "crossbeam-channel"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"crossbeam-utils 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "crossbeam-utils"
version = "0.6.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "flatbuffers"
version = "0.6.0"
@ -88,6 +141,35 @@ dependencies = [
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "flate2"
version = "1.0.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"crc32fast 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)",
"miniz-sys 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)",
"miniz_oxide 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "hdrhistogram"
version = "6.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"base64 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)",
"byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
"crossbeam-channel 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
"flate2 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)",
"nom 4.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
"num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "lazy_static"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "lexical-core"
version = "0.4.3"
@ -123,6 +205,7 @@ dependencies = [
"clap 2.33.0 (registry+https://github.com/rust-lang/crates.io-index)",
"flatbuffers 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"flatc-rust 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"hdrhistogram 6.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
"nom 5.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"smallvec 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -132,6 +215,32 @@ name = "memchr"
version = "2.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "miniz-sys"
version = "0.1.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cc 1.0.42 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "miniz_oxide"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"adler32 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "nom"
version = "4.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "nom"
version = "5.0.0"
@ -142,6 +251,14 @@ dependencies = [
"version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "num-traits"
version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "proc-macro2"
version = "0.4.30"
@ -279,22 +396,37 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[metadata]
"checksum adler32 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7e522997b529f05601e05166c07ed17789691f562762c7f3b987263d2dedee5c"
"checksum alloc_counter 0.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a169230586814a38a47b9764bb5e5310120df93952df01ee3ea9d832aef10e2f"
"checksum alloc_counter_macro 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c37d9ddd812e5223e8de74a2152fa79dce52ca9f6af38a54c6bcd1ae8b26a05d"
"checksum ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b"
"checksum atty 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)" = "1803c647a3ec87095e7ae7acfca019e98de5ec9a7d01343f611cf3152ed71a90"
"checksum autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "b671c8fb71b457dd4ae18c4ba1e59aa81793daacc361d82fcd410cef0d491875"
"checksum base64 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0b25d992356d2eb0ed82172f5248873db5560c4721f564b13cb5193bda5e668e"
"checksum bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3d155346769a6855b86399e9bc3814ab343cd3d62c7e985113d46a0ec3c281fd"
"checksum byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a7c3dd8985a7111efc5c80b44e23ecdd8c007de8ade3b96595387e812b957cf5"
"checksum capnp 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3a31dc984081bce3531cb7e1c69b5b926904095baf90be9da42df07af54e123a"
"checksum capnpc 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6c1a2eead5ee094e8adb30edd6ae55a139b42b8dbe27cc7e4007553ea2b5eb01"
"checksum cc 1.0.42 (registry+https://github.com/rust-lang/crates.io-index)" = "a61c7bce55cd2fae6ec8cb935ebd76256c2959a1f95790f6118a441c2cd5b406"
"checksum cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "b486ce3ccf7ffd79fdeb678eac06a9e6c09fc88d33836340becb8fffe87c5e33"
"checksum clap 2.33.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5067f5bb2d80ef5d68b4c87db81601f0b75bca627bc2ef76b141d7b846a3c6d9"
"checksum crc32fast 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ba125de2af0df55319f41944744ad91c71113bf74a4646efff39afe1f6842db1"
"checksum crossbeam-channel 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "c8ec7fcd21571dc78f96cc96243cab8d8f035247c3efd16c687be154c3fa9efa"
"checksum crossbeam-utils 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)" = "04973fa96e96579258a5091af6003abde64af786b860f18622b82e026cca60e6"
"checksum flatbuffers 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2fc1af59fd8248b59beb048d614a869ce211315c195f5412334e47f5b7e22726"
"checksum flatc-rust 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "b37a2ed85bee7b6aa0d5305b4765bf4cc0f0cfbc25b86d524126a1ab755f6aed"
"checksum flate2 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)" = "2adaffba6388640136149e18ed080b77a78611c1e1d6de75aedcdf78df5d4682"
"checksum hdrhistogram 6.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "08d331ebcdbca4acbefe5da8c3299b2e246f198a8294cc5163354e743398b89d"
"checksum lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
"checksum lexical-core 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "b8b0f90c979adde96d19eb10eb6431ba0c441e2f9e9bdff868b2f6f5114ff519"
"checksum libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)" = "34fcd2c08d2f832f376f4173a231990fa5aef4e99fb569867318a227ef4c06ba"
"checksum log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)" = "14b6052be84e6b71ab17edffc2eeabf5c2c3ae1fdb464aae35ac50c67a44e1f7"
"checksum memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "88579771288728879b57485cc7d6b07d648c9f0141eb955f8ab7f9d45394468e"
"checksum miniz-sys 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "1e9e3ae51cea1576ceba0dde3d484d30e6e5b86dee0b2d412fe3a16a15c98202"
"checksum miniz_oxide 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7108aff85b876d06f22503dcce091e29f76733b2bfdd91eebce81f5e68203a10"
"checksum nom 4.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2ad2a91a8e869eeb30b9cb3119ae87773a8f4ae617f41b1eb9c154b2905f7bd6"
"checksum nom 5.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e9761d859320e381010a4f7f8ed425f2c924de33ad121ace447367c713ad561b"
"checksum num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "6ba9a427cfca2be13aa6f6403b0b7e7368fe982bfa16fccc450ce74c46cd9b32"
"checksum proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)" = "cf3d2011ab5c909338f7887f4fc896d35932e29146c12c8d01da6b22a80ba759"
"checksum quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)" = "6ce23b6b870e8f94f81fb0a363d65d86675884b34a09043c81e5562f11c1f8e1"
"checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a"

View File

@ -8,6 +8,7 @@ edition = "2018"
capnp = "0.10.1"
clap = "2.33.0"
flatbuffers = "0.6.0"
hdrhistogram = "6.3.4"
nom = "5.0.0"
smallvec = "0.6.10"

View File

@ -15,7 +15,8 @@ fn main() {
inputs: &[Path::new("marketdata.fbs")],
out_dir: Path::new("src/"),
..Default::default()
}).expect("Unable to compile flatc");
})
.expect("Unable to compile flatc");
// There's no Rust-style builder crate for SBE,
// so we need to run the command by hand.
@ -25,7 +26,8 @@ fn main() {
.arg("-Dsbe.xinclude.aware=true")
.arg("-Dsbe.target.language=uk.co.real_logic.sbe.generation.rust.Rust")
.arg("-Dsbe.target.namespace=marketdata_sbe")
.arg("-jar").arg("sbe-all-1.13.2-all.jar")
.arg("-jar")
.arg("sbe-all-1.13.2-all.jar")
.arg("marketdata.xml")
.output()
.expect("Unable to execute SBE compiler");

View File

@ -1,26 +1,25 @@
use std::convert::TryInto;
use std::io::{BufRead, Read};
use std::mem::size_of;
use std::str::from_utf8_unchecked;
use capnp::Error;
use capnp::message::{Builder, ReaderOptions, ScratchSpace, ScratchSpaceHeapAllocator};
use capnp::message::{ReaderOptions, ScratchSpace, ScratchSpaceHeapAllocator};
use capnp::serialize::write_message;
use capnp::serialize_packed::{read_message as read_message_packed, write_message as write_message_packed};
use nom::bytes::complete::take_until;
use nom::IResult;
use capnp::serialize_packed::{
read_message as read_message_packed, write_message as write_message_packed,
};
use capnp::Error;
use crate::{RunnerDeserialize, RunnerSerialize, StreamVec, Summarizer};
use crate::iex::{IexMessage, IexPayload};
use crate::marketdata_capnp::{multi_message, Side};
use crate::marketdata_capnp::message;
use crate::marketdata_capnp::{multi_message, Side};
use crate::{RunnerDeserialize, RunnerSerialize, StreamVec, Summarizer};
pub struct CapnpWriter<'a> {
// We have to be very careful with how messages are built, as running
// `init_root` and rebuilding will still accumulate garbage if using
// the standard HeapAllocator.
// https://github.com/capnproto/capnproto-rust/issues/111
words: Vec<capnp::Word>,
_words: Vec<capnp::Word>,
scratch: ScratchSpace<'a>,
packed: bool,
}
@ -31,14 +30,12 @@ impl<'a> CapnpWriter<'a> {
// In practice, let's just make sure everything fits.
let mut words = capnp::Word::allocate_zeroed_vec(1024);
let mut scratch = ScratchSpace::new(unsafe {
std::mem::transmute(&mut words[..])
});
let scratch = ScratchSpace::new(unsafe { std::mem::transmute(&mut words[..]) });
CapnpWriter {
words,
_words: words,
scratch,
packed
packed,
}
}
@ -56,12 +53,14 @@ impl<'a> CapnpWriter<'a> {
impl<'a> RunnerSerialize for CapnpWriter<'a> {
fn serialize(&mut self, payload: &IexPayload, mut output: &mut Vec<u8>) {
// First, count the messages we actually care about.
let num_msgs = payload.messages.iter().map(|m| {
match m {
let num_msgs = payload
.messages
.iter()
.map(|m| match m {
IexMessage::TradeReport(_) | IexMessage::PriceLevelUpdate(_) => 1,
_ => 0
}
}).fold(0, |sum, i| sum + i);
_ => 0,
})
.fold(0, |sum, i| sum + i);
if num_msgs == 0 {
return;
@ -109,13 +108,21 @@ impl<'a> RunnerSerialize for CapnpWriter<'a> {
msg_plu.set_price(plu.price);
msg_plu.set_size(plu.size);
msg_plu.set_flags(plu.event_flags);
msg_plu.set_side(if plu.msg_type == 0x38 { Side::Buy } else { Side::Sell });
msg_plu.set_side(if plu.msg_type == 0x38 {
Side::Buy
} else {
Side::Sell
});
}
_ => ()
_ => (),
}
}
let write_fn = if self.packed { write_message_packed } else { write_message };
let write_fn = if self.packed {
write_message_packed
} else {
write_message
};
write_fn(&mut output, &builder).unwrap();
}
@ -123,25 +130,28 @@ impl<'a> RunnerSerialize for CapnpWriter<'a> {
pub struct CapnpReader {
read_opts: ReaderOptions,
packed: bool
packed: bool,
}
impl CapnpReader {
pub fn new(packed: bool) -> CapnpReader {
CapnpReader {
read_opts: ReaderOptions::new(),
packed
packed,
}
}
}
impl CapnpReader {
fn deserialize_packed<'a>(&self, buf: &'a mut StreamVec, stats: &mut Summarizer) -> Result<(), ()> {
fn deserialize_packed<'a>(
&self,
buf: &'a mut StreamVec,
stats: &mut Summarizer,
) -> Result<(), ()> {
// Because `capnp::serialize_packed::PackedRead` is hidden from us, packed reads
// *have* to both allocate new segments every read, and copy the buffer into
// those same segments, no ability to re-use allocated memory.
let reader = read_message_packed(buf, self.read_opts)
.map_err(|_| ())?;
let reader = read_message_packed(buf, self.read_opts).map_err(|_| ())?;
let multimsg = reader.get_root::<multi_message::Reader>().unwrap();
for msg in multimsg.get_messages().unwrap().iter() {
@ -149,18 +159,18 @@ impl CapnpReader {
Ok(message::Trade(tr)) => {
let tr = tr.unwrap();
stats.append_trade_volume(msg.get_symbol().unwrap(), tr.get_size() as u64);
},
}
Ok(message::Quote(q)) => {
let q = q.unwrap();
let is_bid = match q.get_side().unwrap() {
Side::Buy => true,
_ => false
_ => false,
};
stats.update_quote_prices(msg.get_symbol().unwrap(), q.get_price(), is_bid);
},
_ => panic!("Unrecognized message type!")
}
_ => panic!("Unrecognized message type!"),
}
};
}
Ok(())
}
@ -192,8 +202,7 @@ impl CapnpReader {
There is no documentation on how to calculate `bytes_consumed` when parsing by hand
that I could find, you just have to guess and check until you figure this one out.
*/
let (num_words, offsets) = read_segment_table(&mut data, reader_opts)
.map_err(|_| ())?;
let (num_words, offsets) = read_segment_table(&mut data, reader_opts).map_err(|_| ())?;
let words = unsafe { capnp::Word::bytes_to_words(data) };
let reader = capnp::message::Reader::new(
SliceSegments {
@ -206,8 +215,7 @@ impl CapnpReader {
let msg_bytes = num_words * size_of::<capnp::Word>();
let bytes_consumed = segment_table_bytes + msg_bytes;
let multimsg = reader.get_root::<multi_message::Reader>()
.map_err(|_| ())?;
let multimsg = reader.get_root::<multi_message::Reader>().map_err(|_| ())?;
for msg in multimsg.get_messages().map_err(|_| ())?.iter() {
let sym = msg.get_symbol().map_err(|_| ())?;
@ -215,15 +223,15 @@ impl CapnpReader {
message::Trade(trade) => {
let trade = trade.unwrap();
stats.append_trade_volume(sym, trade.get_size().into());
},
}
message::Quote(quote) => {
let quote = quote.unwrap();
let is_buy = match quote.get_side().unwrap() {
Side::Buy => true,
_ => false
_ => false,
};
stats.update_quote_prices(sym, quote.get_price(), is_buy);
},
}
}
}
@ -244,7 +252,6 @@ impl RunnerDeserialize for CapnpReader {
}
}
pub struct SliceSegments<'a> {
words: &'a [capnp::Word],
segment_slices: Vec<(usize, usize)>,
@ -265,10 +272,12 @@ impl<'a> capnp::message::ReaderSegments for SliceSegments<'a> {
}
}
fn read_segment_table<R>(read: &mut R,
options: capnp::message::ReaderOptions)
-> capnp::Result<(usize, Vec<(usize, usize)>)>
where R: Read
fn read_segment_table<R>(
read: &mut R,
options: capnp::message::ReaderOptions,
) -> capnp::Result<(usize, Vec<(usize, usize)>)>
where
R: Read,
{
let mut buf: [u8; 8] = [0; 8];
@ -277,9 +286,15 @@ fn read_segment_table<R>(read: &mut R,
let segment_count = u32::from_le_bytes(buf[0..4].try_into().unwrap()).wrapping_add(1) as usize;
if segment_count >= 512 {
return Err(Error::failed(format!("Too many segments: {}", segment_count)))
return Err(Error::failed(format!(
"Too many segments: {}",
segment_count
)));
} else if segment_count == 0 {
return Err(Error::failed(format!("Too few segments: {}", segment_count)))
return Err(Error::failed(format!(
"Too few segments: {}",
segment_count
)));
}
let mut segment_slices = Vec::with_capacity(segment_count);
@ -301,7 +316,8 @@ fn read_segment_table<R>(read: &mut R,
read.read_exact(&mut segment_sizes[..])?;
for idx in 0..(segment_count - 1) {
let segment_len =
u32::from_le_bytes(segment_sizes[(idx * 4)..(idx + 1) * 4].try_into().unwrap()) as usize;
u32::from_le_bytes(segment_sizes[(idx * 4)..(idx + 1) * 4].try_into().unwrap())
as usize;
segment_slices.push((total_words, total_words + segment_len));
total_words += segment_len;
@ -313,9 +329,11 @@ fn read_segment_table<R>(read: &mut R,
// traversal limit. Without this check, a malicious client could transmit a very large segment
// size to make the receiver allocate excessive space and possibly crash.
if total_words as u64 > options.traversal_limit_in_words {
return Err(Error::failed(
format!("Message has {} words, which is too large. To increase the limit on the \
receiving end, see capnp::message::ReaderOptions.", total_words)))
return Err(Error::failed(format!(
"Message has {} words, which is too large. To increase the limit on the \
receiving end, see capnp::message::ReaderOptions.",
total_words
)));
}
Ok((total_words, segment_slices))

View File

@ -1,15 +1,10 @@
use std::convert::TryInto;
use std::io::{BufRead, Error, Write};
use std::io::{BufRead, Write};
use std::mem::size_of;
use std::str::from_utf8_unchecked;
use capnp::data::new_builder;
use flatbuffers::buffer_has_identifier;
use nom::{bytes::complete::take_until, IResult};
use crate::{RunnerDeserialize, RunnerSerialize, StreamVec, Summarizer};
use crate::iex::{IexMessage, IexPayload};
use crate::marketdata_generated::md_shootout;
use crate::{RunnerDeserialize, RunnerSerialize, StreamVec, Summarizer};
pub struct FlatbuffersWriter<'a> {
builder: flatbuffers::FlatBufferBuilder<'a>,
@ -27,7 +22,6 @@ impl<'a> FlatbuffersWriter<'a> {
impl<'a> RunnerSerialize for FlatbuffersWriter<'a> {
fn serialize(&mut self, payload: &IexPayload, output: &mut Vec<u8>) {
// Because FlatBuffers can't handle nested vectors (specifically, we can't track
// both the variable-length vector of messages, and the variable-length strings
// within those messages), we have to cache the messages as they get built
@ -61,7 +55,11 @@ impl<'a> RunnerSerialize for FlatbuffersWriter<'a> {
price: plu.price,
size_: plu.size,
flags: plu.event_flags,
side: if plu.msg_type == 0x38 { md_shootout::Side::Buy } else { md_shootout::Side::Sell },
side: if plu.msg_type == 0x38 {
md_shootout::Side::Buy
} else {
md_shootout::Side::Sell
},
},
);
@ -73,7 +71,7 @@ impl<'a> RunnerSerialize for FlatbuffersWriter<'a> {
body: Some(level_update.as_union_value()),
})
}
_ => None
_ => None,
};
msg_args.map(|a| {
@ -118,7 +116,7 @@ impl RunnerDeserialize for FlatbuffersReader {
// a view over the underlying buffer.
let data = buf.fill_buf().unwrap();
if data.len() == 0 {
return Err(())
return Err(());
}
let msg_len_buf: [u8; 4] = data[..size_of::<u32>()].try_into().unwrap();
@ -127,7 +125,7 @@ impl RunnerDeserialize for FlatbuffersReader {
let multimsg = flatbuffers::get_size_prefixed_root::<md_shootout::MultiMessage>(data);
let msg_vec = match multimsg.messages() {
Some(m) => m,
None => panic!("Couldn't find messages")
None => panic!("Couldn't find messages"),
};
for i in 0..msg_vec.len() {
@ -136,16 +134,16 @@ impl RunnerDeserialize for FlatbuffersReader {
md_shootout::MessageBody::Trade => {
let trade = msg.body_as_trade().unwrap();
stats.append_trade_volume(msg.symbol().unwrap(), trade.size_().into());
},
}
md_shootout::MessageBody::LevelUpdate => {
let lu = msg.body_as_level_update().unwrap();
let is_bid = match lu.side() {
md_shootout::Side::Buy => true,
_ => false
_ => false,
};
stats.update_quote_prices(msg.symbol().unwrap(), lu.price(), is_bid);
},
md_shootout::MessageBody::NONE => panic!("Unrecognized message type")
}
md_shootout::MessageBody::NONE => panic!("Unrecognized message type"),
}
}

View File

@ -1,8 +1,8 @@
use std::convert::TryInto;
use nom::{bytes::complete::take, IResult, number::complete::*, sequence::tuple};
use nom::{bytes::complete::take, number::complete::*, sequence::tuple, IResult};
use crate::parsers::{Block, extract_iex_data, read_block};
use crate::parsers::{extract_iex_data, read_block, Block};
pub struct IexParser<'a> {
pcap_buffer: &'a [u8],

View File

@ -2,13 +2,14 @@ use std::cmp::{max, min};
use std::collections::hash_map::{DefaultHasher, HashMap};
use std::fs::File;
use std::hash::Hasher;
use std::io::{BufRead, Read};
use std::io::Error;
use std::io::{BufRead, Read};
use std::path::Path;
use std::str::from_utf8_unchecked;
use std::time::{Instant, SystemTime};
use std::time::Instant;
use clap::{App, Arg};
use hdrhistogram::Histogram;
use nom::{bytes::complete::take_until, IResult};
use crate::iex::{IexParser, IexPayload};
@ -19,13 +20,14 @@ use crate::iex::{IexParser, IexPayload};
pub mod marketdata_capnp;
#[allow(unused_imports)]
pub mod marketdata_generated; // Flatbuffers
#[allow(dead_code)]
pub mod marketdata_sbe;
mod capnp_runner;
mod flatbuffers_runner;
mod sbe_runner;
mod iex;
mod parsers;
mod sbe_runner;
fn main() {
let matches = App::new("Marketdata Shootout")
@ -48,84 +50,32 @@ fn main() {
file.read_to_end(&mut buf)
.expect(&format!("Unable to read file={}", path.display()));
let _start = SystemTime::now();
let mut summarizer = Summarizer::default();
let mut parser = IexParser::new(&buf[..]);
let _capnp_unpacked = run_analysis(
&buf,
&mut capnp_runner::CapnpWriter::new(false),
&mut capnp_runner::CapnpReader::new(false),
);
// Pre-allocate the same size as the backing file. Will be way more than
// necessary, but makes sure there's no re-allocation not related to
// actual parsing/serialization code
let mut output_buf: Vec<u8> = Vec::with_capacity(buf.capacity());
let _capnp_packed = run_analysis(
&buf,
&mut capnp_runner::CapnpWriter::new(true),
&mut capnp_runner::CapnpReader::new(true),
);
/*
let mut capnp_writer = capnp_runner::CapnpWriter::new();
for iex_payload in parser {
//let iex_payload = parser.next().unwrap();
capnp_writer.serialize(&iex_payload, &mut output_buf, true);
}
let _flatbuffers = run_analysis(
&buf,
&mut flatbuffers_runner::FlatbuffersWriter::new(),
&mut flatbuffers_runner::FlatbuffersReader::new(),
);
let capnp_reader = capnp_runner::CapnpReader::new();
let mut read_buf = StreamVec::new(output_buf);
let mut parsed_msgs: u64 = 0;
while let Ok(_) = capnp_reader.deserialize_packed(&mut read_buf, &mut summarizer) {
parsed_msgs += 1;
}
*/
let mut fb_writer = flatbuffers_runner::FlatbuffersWriter::new();
for iex_payload in parser {
let now = Instant::now();
fb_writer.serialize(&iex_payload, &mut output_buf);
let serialize_nanos = Instant::now().duration_since(now).as_nanos();
dbg!(serialize_nanos);
}
let mut read_buf = StreamVec::new(output_buf);
let fb_reader = flatbuffers_runner::FlatbuffersReader::new();
let mut parsed_msgs = 0;
while let Ok(_) = fb_reader.deserialize(&mut read_buf, &mut summarizer) {
parsed_msgs += 1;
}
/*
let mut capnp_writer = capnp_runner::CapnpWriter::new();
for iex_payload in parser {
//let iex_payload = parser.next().unwrap();
let now = Instant::now();
capnp_writer.serialize(&iex_payload, &mut output_buf, false);
let serialize_nanos = Instant::now().duration_since(now).as_nanos();
dbg!(serialize_nanos);
}
let capnp_reader = capnp_runner::CapnpReader::new();
let mut read_buf = StreamVec::new(output_buf);
let mut parsed_msgs: u64 = 0;
while let Ok(_) = capnp_reader.deserialize_unpacked(&mut read_buf, &mut summarizer) {
parsed_msgs += 1;
}
*/
/*
let mut sbe_writer = sbe_runner::SBEWriter::new();
for iex_payload in parser {
//let iex_payload = parser.next().unwrap();
sbe_writer.serialize(&iex_payload, &mut output_buf);
}
let sbe_reader = sbe_runner::SBEReader::new();
let mut read_buf = StreamVec::new(output_buf);
let mut parsed_msgs: u64 = 0;
while let Ok(_) = sbe_reader.deserialize(&mut read_buf, &mut summarizer) {
parsed_msgs += 1;
}
*/
dbg!(parsed_msgs);
dbg!(summarizer);
let _sbe = run_analysis(
&buf,
&mut sbe_runner::SBEWriter::new(),
&mut sbe_runner::SBEReader::new(),
);
}
#[derive(Debug)]
#[derive(Debug, PartialEq)]
pub struct SummaryStats {
symbol: String,
trade_volume: u64,
@ -135,24 +85,23 @@ pub struct SummaryStats {
ask_low: u64,
}
#[derive(Default, Debug)]
#[derive(Default, Debug, PartialEq)]
pub struct Summarizer {
data: HashMap<u64, SummaryStats>
data: HashMap<u64, SummaryStats>,
}
impl Summarizer {
fn entry(&mut self, sym: &str) -> &mut SummaryStats {
let mut hasher = DefaultHasher::new();
hasher.write(sym.as_bytes());
self.data.entry(hasher.finish())
.or_insert(SummaryStats {
symbol: sym.to_string(),
trade_volume: 0,
bid_high: 0,
bid_low: u64::max_value(),
ask_high: 0,
ask_low: u64::max_value(),
})
self.data.entry(hasher.finish()).or_insert(SummaryStats {
symbol: sym.to_string(),
trade_volume: 0,
bid_high: 0,
bid_low: u64::max_value(),
ask_high: 0,
ask_low: u64::max_value(),
})
}
pub fn append_trade_volume(&mut self, sym: &str, volume: u64) {
@ -178,10 +127,7 @@ pub struct StreamVec {
impl StreamVec {
pub fn new(buf: Vec<u8>) -> StreamVec {
StreamVec {
pos: 0,
inner: buf,
}
StreamVec { pos: 0, inner: buf }
}
}
@ -189,7 +135,11 @@ impl Read for StreamVec {
fn read(&mut self, buf: &mut [u8]) -> Result<usize, Error> {
// TODO: There's *got* to be a better way to handle this
let end = self.pos + buf.len();
let end = if end > self.inner.len() { self.inner.len() } else { end };
let end = if end > self.inner.len() {
self.inner.len()
} else {
end
};
let read_size = end - self.pos;
buf[..read_size].copy_from_slice(&self.inner[self.pos..end]);
self.pos = end;
@ -226,3 +176,75 @@ fn parse_symbol(sym: &[u8; 8]) -> &str {
let (_, sym_bytes) = __take_until(" ", &sym[..]).unwrap();
unsafe { from_utf8_unchecked(sym_bytes) }
}
struct RunAnalysis {
serialize_hist: Histogram<u64>,
deserialize_hist: Histogram<u64>,
summary_stats: Summarizer,
serialize_total_nanos: u128,
deserialize_total_nanos: u128,
buf_len: usize,
}
fn run_analysis<S, D>(iex_data: &Vec<u8>, serializer: &mut S, deserializer: &mut D) -> RunAnalysis
where
S: RunnerSerialize,
D: RunnerDeserialize,
{
let upper = if cfg!(debug_assertions) {
1_000_000
} else {
100_000
};
let iex_parser = IexParser::new(iex_data);
let mut output_buf = Vec::with_capacity(iex_data.len());
let mut serialize_hist = Histogram::<u64>::new_with_bounds(1, upper, 2).unwrap();
let mut serialize_nanos_total = 0u128;
let mut serialize_msgs = 0;
for iex_payload in iex_parser {
let serialize_start = Instant::now();
serializer.serialize(&iex_payload, &mut output_buf);
let serialize_end = Instant::now().duration_since(serialize_start).as_nanos();
serialize_hist.record(serialize_end as u64).unwrap();
serialize_nanos_total += serialize_end;
serialize_msgs += 1;
}
let output_len = output_buf.len();
let mut read_buf = StreamVec::new(output_buf);
let mut summarizer = Summarizer::default();
let mut deserialize_hist = Histogram::<u64>::new_with_bounds(1, upper, 2).unwrap();
let mut parsed_msgs: u64 = 0;
let mut deserialize_nanos_total = 0u128;
loop {
let deserialize_start = Instant::now();
let res = deserializer.deserialize(&mut read_buf, &mut summarizer);
let deserialize_end = Instant::now().duration_since(deserialize_start).as_nanos();
if res.is_ok() {
deserialize_hist.record(deserialize_end as u64).unwrap();
deserialize_nanos_total += deserialize_end;
parsed_msgs += 1;
} else {
break;
}
}
dbg!(serialize_msgs, parsed_msgs);
RunAnalysis {
serialize_hist,
deserialize_hist,
summary_stats: summarizer,
serialize_total_nanos: serialize_nanos_total,
deserialize_total_nanos: deserialize_nanos_total,
buf_len: output_len,
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,5 @@
/// Generated code for SBE package marketdata_sbe
/// Imports core rather than std to broaden usable environments.
extern crate core;
@ -9,13 +8,13 @@ extern crate core;
/// Errors that may occur during the course of encoding or decoding.
#[derive(Debug)]
pub enum CodecErr {
/// Too few bytes in the byte-slice to read or write the data structure relevant
/// to the current state of the codec
NotEnoughBytes,
/// Too few bytes in the byte-slice to read or write the data structure relevant
/// to the current state of the codec
NotEnoughBytes,
/// Groups and vardata are constrained by the numeric type chosen to represent their
/// length as well as optional maxima imposed by the schema
SliceIsLongerThanAllowedBySchema,
/// Groups and vardata are constrained by the numeric type chosen to represent their
/// length as well as optional maxima imposed by the schema
SliceIsLongerThanAllowedBySchema,
}
pub type CodecResult<T> = core::result::Result<T, CodecErr>;
@ -23,206 +22,219 @@ pub type CodecResult<T> = core::result::Result<T, CodecErr>;
/// Scratch Decoder Data Wrapper - codec internal use only
#[derive(Debug)]
pub struct ScratchDecoderData<'d> {
data: &'d [u8],
pos: usize,
data: &'d [u8],
pos: usize,
}
impl<'d> ScratchDecoderData<'d> {
/// Create a struct reference overlaid atop the data buffer
/// such that the struct's contents directly reflect the buffer.
/// Advances the `pos` index by the size of the struct in bytes.
#[inline]
fn read_type<T>(&mut self, num_bytes: usize) -> CodecResult<&'d T> {
let end = self.pos + num_bytes;
if end <= self.data.len() {
let s = self.data[self.pos..end].as_ptr() as *mut T;
let v: &'d T = unsafe { &*s };
self.pos = end;
Ok(v)
} else {
Err(CodecErr::NotEnoughBytes)
/// Create a struct reference overlaid atop the data buffer
/// such that the struct's contents directly reflect the buffer.
/// Advances the `pos` index by the size of the struct in bytes.
#[inline]
fn read_type<T>(&mut self, num_bytes: usize) -> CodecResult<&'d T> {
let end = self.pos + num_bytes;
if end <= self.data.len() {
let s = self.data[self.pos..end].as_ptr() as *mut T;
let v: &'d T = unsafe { &*s };
self.pos = end;
Ok(v)
} else {
Err(CodecErr::NotEnoughBytes)
}
}
}
/// Advances the `pos` index by a set number of bytes.
#[inline]
fn skip_bytes(&mut self, num_bytes: usize) -> CodecResult<()> {
let end = self.pos + num_bytes;
if end <= self.data.len() {
self.pos = end;
Ok(())
} else {
Err(CodecErr::NotEnoughBytes)
/// Advances the `pos` index by a set number of bytes.
#[inline]
fn skip_bytes(&mut self, num_bytes: usize) -> CodecResult<()> {
let end = self.pos + num_bytes;
if end <= self.data.len() {
self.pos = end;
Ok(())
} else {
Err(CodecErr::NotEnoughBytes)
}
}
}
/// Create a slice reference overlaid atop the data buffer
/// such that the slice's members' contents directly reflect the buffer.
/// Advances the `pos` index by the size of the slice contents in bytes.
#[inline]
fn read_slice<T>(&mut self, count: usize, bytes_per_item: usize) -> CodecResult<&'d [T]> {
let num_bytes = bytes_per_item * count;
let end = self.pos + num_bytes;
if end <= self.data.len() {
let v: &'d [T] = unsafe {
core::slice::from_raw_parts(self.data[self.pos..end].as_ptr() as *const T, count)
};
self.pos = end;
Ok(v)
} else {
Err(CodecErr::NotEnoughBytes)
/// Create a slice reference overlaid atop the data buffer
/// such that the slice's members' contents directly reflect the buffer.
/// Advances the `pos` index by the size of the slice contents in bytes.
#[inline]
fn read_slice<T>(&mut self, count: usize, bytes_per_item: usize) -> CodecResult<&'d [T]> {
let num_bytes = bytes_per_item * count;
let end = self.pos + num_bytes;
if end <= self.data.len() {
let v: &'d [T] = unsafe {
core::slice::from_raw_parts(self.data[self.pos..end].as_ptr() as *const T, count)
};
self.pos = end;
Ok(v)
} else {
Err(CodecErr::NotEnoughBytes)
}
}
}
}
/// Scratch Encoder Data Wrapper - codec internal use only
#[derive(Debug)]
pub struct ScratchEncoderData<'d> {
data: &'d mut [u8],
pos: usize,
data: &'d mut [u8],
pos: usize,
}
impl<'d> ScratchEncoderData<'d> {
/// Copy the bytes of a value into the data buffer
/// Advances the `pos` index to after the newly-written bytes.
#[inline]
fn write_type<T>(&mut self, t: &T, num_bytes: usize) -> CodecResult<()> {
let end = self.pos + num_bytes;
if end <= self.data.len() {
let source_bytes: &[u8] = unsafe {
core::slice::from_raw_parts(t as *const T as *const u8, num_bytes)
};
(&mut self.data[self.pos..end]).copy_from_slice(source_bytes);
self.pos = end;
Ok(())
} else {
Err(CodecErr::NotEnoughBytes)
/// Copy the bytes of a value into the data buffer
/// Advances the `pos` index to after the newly-written bytes.
#[inline]
fn write_type<T>(&mut self, t: &T, num_bytes: usize) -> CodecResult<()> {
let end = self.pos + num_bytes;
if end <= self.data.len() {
let source_bytes: &[u8] =
unsafe { core::slice::from_raw_parts(t as *const T as *const u8, num_bytes) };
(&mut self.data[self.pos..end]).copy_from_slice(source_bytes);
self.pos = end;
Ok(())
} else {
Err(CodecErr::NotEnoughBytes)
}
}
}
/// Advances the `pos` index by a set number of bytes.
#[inline]
fn skip_bytes(&mut self, num_bytes: usize) -> CodecResult<()> {
let end = self.pos + num_bytes;
if end <= self.data.len() {
self.pos = end;
Ok(())
} else {
Err(CodecErr::NotEnoughBytes)
/// Advances the `pos` index by a set number of bytes.
#[inline]
fn skip_bytes(&mut self, num_bytes: usize) -> CodecResult<()> {
let end = self.pos + num_bytes;
if end <= self.data.len() {
self.pos = end;
Ok(())
} else {
Err(CodecErr::NotEnoughBytes)
}
}
}
/// Create a struct reference overlaid atop the data buffer
/// such that changes to the struct directly edit the buffer.
/// Note that the initial content of the struct's fields may be garbage.
/// Advances the `pos` index to after the newly-written bytes.
#[inline]
fn writable_overlay<T>(&mut self, num_bytes: usize) -> CodecResult<&'d mut T> {
let end = self.pos + num_bytes;
if end <= self.data.len() {
let v: &'d mut T = unsafe {
let s = self.data.as_ptr().offset(self.pos as isize) as *mut T;
&mut *s
};
self.pos = end;
Ok(v)
} else {
Err(CodecErr::NotEnoughBytes)
/// Create a struct reference overlaid atop the data buffer
/// such that changes to the struct directly edit the buffer.
/// Note that the initial content of the struct's fields may be garbage.
/// Advances the `pos` index to after the newly-written bytes.
#[inline]
fn writable_overlay<T>(&mut self, num_bytes: usize) -> CodecResult<&'d mut T> {
let end = self.pos + num_bytes;
if end <= self.data.len() {
let v: &'d mut T = unsafe {
let s = self.data.as_ptr().offset(self.pos as isize) as *mut T;
&mut *s
};
self.pos = end;
Ok(v)
} else {
Err(CodecErr::NotEnoughBytes)
}
}
}
/// Copy the bytes of a value into the data buffer at a specific position
/// Does **not** alter the `pos` index.
#[inline]
fn write_at_position<T>(&mut self, position: usize, t: &T, num_bytes: usize) -> CodecResult<()> {
let end = position + num_bytes;
if end <= self.data.len() {
let source_bytes: &[u8] = unsafe {
core::slice::from_raw_parts(t as *const T as *const u8, num_bytes)
};
(&mut self.data[position..end]).copy_from_slice(source_bytes);
Ok(())
} else {
Err(CodecErr::NotEnoughBytes)
/// Copy the bytes of a value into the data buffer at a specific position
/// Does **not** alter the `pos` index.
#[inline]
fn write_at_position<T>(
&mut self,
position: usize,
t: &T,
num_bytes: usize,
) -> CodecResult<()> {
let end = position + num_bytes;
if end <= self.data.len() {
let source_bytes: &[u8] =
unsafe { core::slice::from_raw_parts(t as *const T as *const u8, num_bytes) };
(&mut self.data[position..end]).copy_from_slice(source_bytes);
Ok(())
} else {
Err(CodecErr::NotEnoughBytes)
}
}
}
/// Create a mutable slice overlaid atop the data buffer directly
/// such that changes to the slice contents directly edit the buffer
/// Note that the initial content of the slice's members' fields may be garbage.
/// Advances the `pos` index to after the region representing the slice.
#[inline]
fn writable_slice<T>(&mut self, count: usize, bytes_per_item: usize) -> CodecResult<&'d mut [T]> {
let end = self.pos + (count * bytes_per_item);
if end <= self.data.len() {
let v: &'d mut [T] = unsafe {
core::slice::from_raw_parts_mut(self.data[self.pos..end].as_mut_ptr() as *mut T, count)
};
self.pos = end;
Ok(v)
} else {
Err(CodecErr::NotEnoughBytes)
/// Create a mutable slice overlaid atop the data buffer directly
/// such that changes to the slice contents directly edit the buffer
/// Note that the initial content of the slice's members' fields may be garbage.
/// Advances the `pos` index to after the region representing the slice.
#[inline]
fn writable_slice<T>(
&mut self,
count: usize,
bytes_per_item: usize,
) -> CodecResult<&'d mut [T]> {
let end = self.pos + (count * bytes_per_item);
if end <= self.data.len() {
let v: &'d mut [T] = unsafe {
core::slice::from_raw_parts_mut(
self.data[self.pos..end].as_mut_ptr() as *mut T,
count,
)
};
self.pos = end;
Ok(v)
} else {
Err(CodecErr::NotEnoughBytes)
}
}
}
/// Copy the raw bytes of a slice's contents into the data buffer
/// Does **not** encode the length of the slice explicitly into the buffer.
/// Advances the `pos` index to after the newly-written slice bytes.
#[inline]
fn write_slice_without_count<T>(&mut self, t: &[T], bytes_per_item: usize) -> CodecResult<()> {
let content_bytes_size = bytes_per_item * t.len();
let end = self.pos + content_bytes_size;
if end <= self.data.len() {
let source_bytes: &[u8] = unsafe {
core::slice::from_raw_parts(t.as_ptr() as *const u8, content_bytes_size)
};
(&mut self.data[self.pos..end]).copy_from_slice(source_bytes);
self.pos = end;
Ok(())
} else {
Err(CodecErr::NotEnoughBytes)
/// Copy the raw bytes of a slice's contents into the data buffer
/// Does **not** encode the length of the slice explicitly into the buffer.
/// Advances the `pos` index to after the newly-written slice bytes.
#[inline]
fn write_slice_without_count<T>(&mut self, t: &[T], bytes_per_item: usize) -> CodecResult<()> {
let content_bytes_size = bytes_per_item * t.len();
let end = self.pos + content_bytes_size;
if end <= self.data.len() {
let source_bytes: &[u8] =
unsafe { core::slice::from_raw_parts(t.as_ptr() as *const u8, content_bytes_size) };
(&mut self.data[self.pos..end]).copy_from_slice(source_bytes);
self.pos = end;
Ok(())
} else {
Err(CodecErr::NotEnoughBytes)
}
}
}
}
/// Convenience Either enum
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
pub enum Either<L, R> {
Left(L),
Right(R)
Left(L),
Right(R),
}
/// Enum Side
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[repr(u8)]
pub enum Side {
Buy = 0u8,
Sell = 1u8,
NullVal = 255u8,
Buy = 0u8,
Sell = 1u8,
NullVal = 255u8,
}
impl Default for Side {
fn default() -> Self { Side::NullVal }
fn default() -> Self {
Side::NullVal
}
}
/// Enum MsgType
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[repr(u8)]
pub enum MsgType {
Trade = 0u8,
Quote = 1u8,
NullVal = 255u8,
Trade = 0u8,
Quote = 1u8,
NullVal = 255u8,
}
impl Default for MsgType {
fn default() -> Self { MsgType::NullVal }
fn default() -> Self {
MsgType::NullVal
}
}
/// Quote
#[repr(C, packed)]
#[derive(Default)]
pub struct Quote {
pub price: u64,
pub size: u32,
pub flags: u8,
pub side: Side,
pub price: u64,
pub size: u32,
pub flags: u8,
pub side: Side,
}
impl Quote {}
@ -231,8 +243,8 @@ impl Quote {}
#[repr(C, packed)]
#[derive(Default)]
pub struct Trade {
pub price: u64,
pub size: u32,
pub price: u64,
pub size: u32,
}
impl Trade {}
@ -241,10 +253,10 @@ impl Trade {}
#[repr(C, packed)]
#[derive(Default)]
pub struct MessageHeader {
pub block_length: u16,
pub template_id: u16,
pub schema_id: u16,
pub version: u16,
pub block_length: u16,
pub template_id: u16,
pub schema_id: u16,
pub version: u16,