stream_deltas_bench/
stream_deltas_bench.rs

1// -------------------------------------------------------------------------------------------------
2//  Copyright (C) 2015-2025 Nautech Systems Pty Ltd. All rights reserved.
3//  https://nautechsystems.io
4//
5//  Licensed under the GNU Lesser General Public License Version 3.0 (the "License");
6//  You may not use this file except in compliance with the License.
7//  You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html
8//
9//  Unless required by applicable law or agreed to in writing, software
10//  distributed under the License is distributed on an "AS IS" BASIS,
11//  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12//  See the License for the specific language governing permissions and
13//  limitations under the License.
14// -------------------------------------------------------------------------------------------------
15
16use std::{path::Path, time::Instant};
17
18use nautilus_tardis::csv::stream_deltas;
19
20fn main() {
21    let test_data_path = Path::new(
22        "tests/test_data/large/tardis_deribit_incremental_book_L2_2020-04-01_BTC-PERPETUAL.csv.gz",
23    );
24
25    if !test_data_path.exists() {
26        eprintln!("Test data file not found: {test_data_path:?}");
27        return;
28    }
29
30    println!("Running stream_deltas benchmarks...");
31    println!("Test data: {test_data_path:?}");
32    println!();
33
34    // Benchmark 1: Small chunks
35    println!("Small chunks (100 records per chunk)");
36    let start = Instant::now();
37    let stream = stream_deltas(
38        test_data_path,
39        100,  // Small chunk size
40        None, // Auto-detect price precision
41        None, // Auto-detect size precision
42        None, // No instrument filter
43        None, // No limit
44    )
45    .unwrap();
46    let count: usize = stream.map(|chunk| chunk.unwrap().len()).sum();
47    let duration = start.elapsed();
48    println!("Processed {count} records in {duration:?}");
49    println!(
50        "  Rate: {:.0} records/second",
51        count as f64 / duration.as_secs_f64()
52    );
53    println!();
54
55    // Benchmark 2: Large chunks
56    println!("Large chunks (100,000 records per chunk)");
57    let start = Instant::now();
58    let stream = stream_deltas(
59        test_data_path,
60        100_000, // Large chunk size
61        None,    // Auto-detect price precision
62        None,    // Auto-detect size precision
63        None,    // No instrument filter
64        None,    // No limit
65    )
66    .unwrap();
67    let count: usize = stream.map(|chunk| chunk.unwrap().len()).sum();
68    let duration = start.elapsed();
69    println!("Processed {count} records in {duration:?}");
70    println!(
71        "  Rate: {:.0} records/second",
72        count as f64 / duration.as_secs_f64()
73    );
74    println!();
75
76    // Benchmark 3: With fixed precision
77    println!("With fixed precision (1,000 records per chunk)");
78    let start = Instant::now();
79    let stream = stream_deltas(
80        test_data_path,
81        1_000,   // Medium chunk size
82        Some(2), // Fixed price precision
83        Some(4), // Fixed size precision
84        None,    // No instrument filter
85        None,    // No limit
86    )
87    .unwrap();
88    let count: usize = stream.map(|chunk| chunk.unwrap().len()).sum();
89    let duration = start.elapsed();
90    println!("Processed {count} records in {duration:?}");
91    println!(
92        "  Rate: {:.0} records/second",
93        count as f64 / duration.as_secs_f64()
94    );
95    println!();
96
97    println!("Benchmarks completed!");
98}