1use std::{
17 collections::HashMap,
18 env,
19 fmt::Display,
20 str::FromStr,
21 sync::{Mutex, OnceLock, atomic::Ordering, mpsc::SendError},
22};
23
24use indexmap::IndexMap;
25use log::{
26 Level, LevelFilter, Log, STATIC_MAX_LEVEL,
27 kv::{ToValue, Value},
28 set_boxed_logger, set_max_level,
29};
30use nautilus_core::{
31 UUID4, UnixNanos,
32 datetime::unix_nanos_to_iso8601,
33 time::{get_atomic_clock_realtime, get_atomic_clock_static},
34};
35use nautilus_model::identifiers::TraderId;
36use serde::{Deserialize, Serialize, Serializer};
37use ustr::Ustr;
38
39use super::{LOGGING_BYPASSED, LOGGING_GUARDS_ACTIVE, LOGGING_INITIALIZED, LOGGING_REALTIME};
40use crate::{
41 enums::{LogColor, LogLevel},
42 logging::writer::{FileWriter, FileWriterConfig, LogWriter, StderrWriter, StdoutWriter},
43};
44
45const LOGGING: &str = "logging";
46const KV_COLOR: &str = "color";
47const KV_COMPONENT: &str = "component";
48
49static LOGGER_TX: OnceLock<std::sync::mpsc::Sender<LogEvent>> = OnceLock::new();
51
52static LOGGER_HANDLE: Mutex<Option<std::thread::JoinHandle<()>>> = Mutex::new(None);
54
55#[cfg_attr(
56 feature = "python",
57 pyo3::pyclass(module = "nautilus_trader.core.nautilus_pyo3.common")
58)]
59#[derive(Debug, Clone, PartialEq, Eq)]
60pub struct LoggerConfig {
61 pub stdout_level: LevelFilter,
63 pub fileout_level: LevelFilter,
65 component_level: HashMap<Ustr, LevelFilter>,
67 pub log_components_only: bool,
69 pub is_colored: bool,
71 pub print_config: bool,
73}
74
75impl Default for LoggerConfig {
76 fn default() -> Self {
78 Self {
79 stdout_level: LevelFilter::Info,
80 fileout_level: LevelFilter::Off,
81 component_level: HashMap::new(),
82 log_components_only: false,
83 is_colored: true,
84 print_config: false,
85 }
86 }
87}
88
89impl LoggerConfig {
90 #[must_use]
92 pub const fn new(
93 stdout_level: LevelFilter,
94 fileout_level: LevelFilter,
95 component_level: HashMap<Ustr, LevelFilter>,
96 log_components_only: bool,
97 is_colored: bool,
98 print_config: bool,
99 ) -> Self {
100 Self {
101 stdout_level,
102 fileout_level,
103 component_level,
104 log_components_only,
105 is_colored,
106 print_config,
107 }
108 }
109
110 pub fn from_spec(spec: &str) -> anyhow::Result<Self> {
114 let mut config = Self::default();
115 for kv in spec.split(';') {
116 let kv = kv.trim();
117 if kv.is_empty() {
118 continue;
119 }
120 let kv_lower = kv.to_lowercase(); if kv_lower == "log_components_only" {
123 config.log_components_only = true;
124 } else if kv_lower == "is_colored" {
125 config.is_colored = true;
126 } else if kv_lower == "print_config" {
127 config.print_config = true;
128 } else {
129 let parts: Vec<&str> = kv.split('=').collect();
130 if parts.len() != 2 {
131 anyhow::bail!("Invalid spec pair: {kv}");
132 }
133 let k = parts[0].trim(); let v = parts[1].trim(); let lvl = LevelFilter::from_str(v)
136 .map_err(|_| anyhow::anyhow!("Invalid log level: {v}"))?;
137 let k_lower = k.to_lowercase(); match k_lower.as_str() {
139 "stdout" => config.stdout_level = lvl,
140 "fileout" => config.fileout_level = lvl,
141 _ => {
142 config.component_level.insert(Ustr::from(k), lvl);
143 }
144 }
145 }
146 }
147 Ok(config)
148 }
149
150 pub fn from_env() -> anyhow::Result<Self> {
156 let spec = env::var("NAUTILUS_LOG")?;
157 Self::from_spec(&spec)
158 }
159}
160
161#[derive(Debug)]
167pub struct Logger {
168 pub config: LoggerConfig,
170 tx: std::sync::mpsc::Sender<LogEvent>,
172}
173
174#[derive(Debug)]
176pub enum LogEvent {
177 Log(LogLine),
179 Flush,
181 Close,
183}
184
185#[derive(Clone, Debug, Serialize, Deserialize)]
187pub struct LogLine {
188 pub timestamp: UnixNanos,
190 pub level: Level,
192 pub color: LogColor,
194 pub component: Ustr,
196 pub message: String,
198}
199
200impl Display for LogLine {
201 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
202 write!(f, "[{}] {}: {}", self.level, self.component, self.message)
203 }
204}
205
206#[derive(Clone, Debug)]
213pub struct LogLineWrapper {
214 line: LogLine,
216 cache: Option<String>,
218 colored: Option<String>,
220 trader_id: Ustr,
222}
223
224impl LogLineWrapper {
225 #[must_use]
227 pub const fn new(line: LogLine, trader_id: Ustr) -> Self {
228 Self {
229 line,
230 cache: None,
231 colored: None,
232 trader_id,
233 }
234 }
235
236 pub fn get_string(&mut self) -> &str {
241 self.cache.get_or_insert_with(|| {
242 format!(
243 "{} [{}] {}.{}: {}\n",
244 unix_nanos_to_iso8601(self.line.timestamp),
245 self.line.level,
246 self.trader_id,
247 &self.line.component,
248 &self.line.message,
249 )
250 })
251 }
252
253 pub fn get_colored(&mut self) -> &str {
259 self.colored.get_or_insert_with(|| {
260 format!(
261 "\x1b[1m{}\x1b[0m {}[{}] {}.{}: {}\x1b[0m\n",
262 unix_nanos_to_iso8601(self.line.timestamp),
263 &self.line.color.as_ansi(),
264 self.line.level,
265 self.trader_id,
266 &self.line.component,
267 &self.line.message,
268 )
269 })
270 }
271
272 #[must_use]
281 pub fn get_json(&self) -> String {
282 let json_string =
283 serde_json::to_string(&self).expect("Error serializing log event to string");
284 format!("{json_string}\n")
285 }
286}
287
288impl Serialize for LogLineWrapper {
289 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
290 where
291 S: Serializer,
292 {
293 let mut json_obj = IndexMap::new();
294 let timestamp = unix_nanos_to_iso8601(self.line.timestamp);
295 json_obj.insert("timestamp".to_string(), timestamp);
296 json_obj.insert("trader_id".to_string(), self.trader_id.to_string());
297 json_obj.insert("level".to_string(), self.line.level.to_string());
298 json_obj.insert("color".to_string(), self.line.color.to_string());
299 json_obj.insert("component".to_string(), self.line.component.to_string());
300 json_obj.insert("message".to_string(), self.line.message.clone());
301
302 json_obj.serialize(serializer)
303 }
304}
305
306impl Log for Logger {
307 fn enabled(&self, metadata: &log::Metadata) -> bool {
308 !LOGGING_BYPASSED.load(Ordering::Relaxed)
309 && (metadata.level() == Level::Error
310 || metadata.level() <= self.config.stdout_level
311 || metadata.level() <= self.config.fileout_level)
312 }
313
314 fn log(&self, record: &log::Record) {
315 if self.enabled(record.metadata()) {
316 let timestamp = if LOGGING_REALTIME.load(Ordering::Relaxed) {
317 get_atomic_clock_realtime().get_time_ns()
318 } else {
319 get_atomic_clock_static().get_time_ns()
320 };
321 let level = record.level();
322 let key_values = record.key_values();
323 let color: LogColor = key_values
324 .get(KV_COLOR.into())
325 .and_then(|v| v.to_u64().map(|v| (v as u8).into()))
326 .unwrap_or(level.into());
327 let component = key_values.get(KV_COMPONENT.into()).map_or_else(
328 || Ustr::from(record.metadata().target()),
329 |v| Ustr::from(&v.to_string()),
330 );
331
332 let line = LogLine {
333 timestamp,
334 level,
335 color,
336 component,
337 message: format!("{}", record.args()),
338 };
339 if let Err(SendError(LogEvent::Log(line))) = self.tx.send(LogEvent::Log(line)) {
340 eprintln!("Error sending log event (receiver closed): {line}");
341 }
342 }
343 }
344
345 fn flush(&self) {
346 if LOGGING_BYPASSED.load(Ordering::Relaxed) {
348 return;
349 }
350
351 if let Err(e) = self.tx.send(LogEvent::Flush) {
352 eprintln!("Error sending flush log event: {e}");
353 }
354 }
355}
356
357#[allow(clippy::too_many_arguments)]
358impl Logger {
359 pub fn init_with_env(
365 trader_id: TraderId,
366 instance_id: UUID4,
367 file_config: FileWriterConfig,
368 ) -> anyhow::Result<LogGuard> {
369 let config = LoggerConfig::from_env()?;
370 Self::init_with_config(trader_id, instance_id, config, file_config)
371 }
372
373 pub fn init_with_config(
388 trader_id: TraderId,
389 instance_id: UUID4,
390 config: LoggerConfig,
391 file_config: FileWriterConfig,
392 ) -> anyhow::Result<LogGuard> {
393 let (tx, rx) = std::sync::mpsc::channel::<LogEvent>();
394
395 let logger_tx = tx.clone();
396 let logger = Self {
397 tx: logger_tx,
398 config: config.clone(),
399 };
400
401 set_boxed_logger(Box::new(logger))?;
402
403 if LOGGER_TX.set(tx).is_err() {
405 debug_assert!(
406 false,
407 "LOGGER_TX already set - re-initialization not supported"
408 );
409 }
410
411 let print_config = config.print_config;
412 if print_config {
413 println!("STATIC_MAX_LEVEL={STATIC_MAX_LEVEL}");
414 println!("Logger initialized with {config:?} {file_config:?}");
415 }
416
417 let handle = std::thread::Builder::new()
418 .name(LOGGING.to_string())
419 .spawn(move || {
420 Self::handle_messages(
421 trader_id.to_string(),
422 instance_id.to_string(),
423 config,
424 file_config,
425 rx,
426 );
427 })?;
428
429 if let Ok(mut handle_guard) = LOGGER_HANDLE.lock() {
431 debug_assert!(
432 handle_guard.is_none(),
433 "LOGGER_HANDLE already set - re-initialization not supported"
434 );
435 *handle_guard = Some(handle);
436 }
437
438 let max_level = log::LevelFilter::Trace;
439 set_max_level(max_level);
440
441 if print_config {
442 println!("Logger set as `log` implementation with max level {max_level}");
443 }
444
445 LogGuard::new()
446 .ok_or_else(|| anyhow::anyhow!("Failed to create LogGuard from global sender"))
447 }
448
449 fn handle_messages(
450 trader_id: String,
451 instance_id: String,
452 config: LoggerConfig,
453 file_config: FileWriterConfig,
454 rx: std::sync::mpsc::Receiver<LogEvent>,
455 ) {
456 let LoggerConfig {
457 stdout_level,
458 fileout_level,
459 component_level,
460 log_components_only,
461 is_colored,
462 print_config: _,
463 } = config;
464
465 let trader_id_cache = Ustr::from(&trader_id);
466
467 let mut stdout_writer = StdoutWriter::new(stdout_level, is_colored);
469 let mut stderr_writer = StderrWriter::new(is_colored);
470
471 let mut file_writer_opt = if fileout_level == LevelFilter::Off {
473 None
474 } else {
475 FileWriter::new(trader_id, instance_id, file_config, fileout_level)
476 };
477
478 let process_event = |event: LogEvent,
479 stdout_writer: &mut StdoutWriter,
480 stderr_writer: &mut StderrWriter,
481 file_writer_opt: &mut Option<FileWriter>| {
482 match event {
483 LogEvent::Log(line) => {
484 let component_filter_level = component_level.get(&line.component);
485
486 if log_components_only && component_filter_level.is_none() {
487 return;
488 }
489
490 if let Some(&filter_level) = component_filter_level
491 && line.level > filter_level
492 {
493 return;
494 }
495
496 let mut wrapper = LogLineWrapper::new(line, trader_id_cache);
497
498 if stderr_writer.enabled(&wrapper.line) {
499 if is_colored {
500 stderr_writer.write(wrapper.get_colored());
501 } else {
502 stderr_writer.write(wrapper.get_string());
503 }
504 }
505
506 if stdout_writer.enabled(&wrapper.line) {
507 if is_colored {
508 stdout_writer.write(wrapper.get_colored());
509 } else {
510 stdout_writer.write(wrapper.get_string());
511 }
512 }
513
514 if let Some(file_writer) = file_writer_opt
515 && file_writer.enabled(&wrapper.line)
516 {
517 if file_writer.json_format {
518 file_writer.write(&wrapper.get_json());
519 } else {
520 file_writer.write(wrapper.get_string());
521 }
522 }
523 }
524 LogEvent::Flush => {
525 stdout_writer.flush();
526 stderr_writer.flush();
527
528 if let Some(file_writer) = file_writer_opt {
529 file_writer.flush();
530 }
531 }
532 LogEvent::Close => {
533 }
535 }
536 };
537
538 while let Ok(event) = rx.recv() {
540 match event {
541 LogEvent::Log(_) | LogEvent::Flush => process_event(
542 event,
543 &mut stdout_writer,
544 &mut stderr_writer,
545 &mut file_writer_opt,
546 ),
547 LogEvent::Close => {
548 stdout_writer.flush();
550 stderr_writer.flush();
551
552 if let Some(ref mut file_writer) = file_writer_opt {
553 file_writer.flush();
554 }
555
556 while let Ok(evt) = rx.try_recv() {
559 match evt {
560 LogEvent::Close => (), _ => process_event(
562 evt,
563 &mut stdout_writer,
564 &mut stderr_writer,
565 &mut file_writer_opt,
566 ),
567 }
568 }
569
570 stdout_writer.flush();
572 stderr_writer.flush();
573
574 if let Some(ref mut file_writer) = file_writer_opt {
575 file_writer.flush();
576 }
577
578 break;
579 }
580 }
581 }
582 }
583}
584
585pub(crate) fn shutdown_graceful() {
594 LOGGING_BYPASSED.store(true, Ordering::SeqCst);
596 log::set_max_level(log::LevelFilter::Off);
597
598 if let Some(tx) = LOGGER_TX.get() {
600 let _ = tx.send(LogEvent::Close);
601 }
602
603 if let Ok(mut handle_guard) = LOGGER_HANDLE.lock()
604 && let Some(handle) = handle_guard.take()
605 && handle.thread().id() != std::thread::current().id()
606 {
607 let _ = handle.join();
608 }
609
610 LOGGING_INITIALIZED.store(false, Ordering::SeqCst);
611}
612
613pub fn log<T: AsRef<str>>(level: LogLevel, color: LogColor, component: Ustr, message: T) {
614 let color = Value::from(color as u8);
615
616 match level {
617 LogLevel::Off => {}
618 LogLevel::Trace => {
619 log::trace!(component = component.to_value(), color = color; "{}", message.as_ref());
620 }
621 LogLevel::Debug => {
622 log::debug!(component = component.to_value(), color = color; "{}", message.as_ref());
623 }
624 LogLevel::Info => {
625 log::info!(component = component.to_value(), color = color; "{}", message.as_ref());
626 }
627 LogLevel::Warning => {
628 log::warn!(component = component.to_value(), color = color; "{}", message.as_ref());
629 }
630 LogLevel::Error => {
631 log::error!(component = component.to_value(), color = color; "{}", message.as_ref());
632 }
633 }
634}
635
636#[cfg_attr(
663 feature = "python",
664 pyo3::pyclass(module = "nautilus_trader.core.nautilus_pyo3.common")
665)]
666#[derive(Debug)]
667pub struct LogGuard {
668 tx: std::sync::mpsc::Sender<LogEvent>,
669}
670
671impl LogGuard {
672 #[must_use]
680 pub fn new() -> Option<Self> {
681 LOGGER_TX.get().map(|tx| {
682 LOGGING_GUARDS_ACTIVE
683 .fetch_update(Ordering::SeqCst, Ordering::SeqCst, |count| {
684 if count == u8::MAX {
685 None } else {
687 Some(count + 1)
688 }
689 })
690 .expect("Maximum number of active LogGuards (255) exceeded");
691
692 Self { tx: tx.clone() }
693 })
694 }
695}
696
697impl Drop for LogGuard {
698 fn drop(&mut self) {
703 let previous_count = LOGGING_GUARDS_ACTIVE
704 .fetch_update(Ordering::SeqCst, Ordering::SeqCst, |count| {
705 if count == 0 {
706 panic!("LogGuard reference count underflow");
707 }
708 Some(count - 1)
709 })
710 .expect("Failed to decrement LogGuard count");
711
712 if previous_count == 1 && LOGGING_GUARDS_ACTIVE.load(Ordering::SeqCst) == 0 {
714 LOGGING_BYPASSED.store(true, Ordering::SeqCst);
718
719 log::set_max_level(log::LevelFilter::Off);
721
722 let _ = self.tx.send(LogEvent::Close);
724
725 if let Ok(mut handle_guard) = LOGGER_HANDLE.lock()
727 && let Some(handle) = handle_guard.take()
728 {
729 if handle.thread().id() != std::thread::current().id() {
731 let _ = handle.join();
732 }
733 }
734
735 LOGGING_INITIALIZED.store(false, Ordering::SeqCst);
737 } else {
738 let _ = self.tx.send(LogEvent::Flush);
740 }
741 }
742}
743
744#[cfg(test)]
745mod tests {
746 use std::{collections::HashMap, time::Duration};
747
748 use log::LevelFilter;
749 use nautilus_core::UUID4;
750 use nautilus_model::identifiers::TraderId;
751 use rstest::*;
752 use serde_json::Value;
753 use tempfile::tempdir;
754 use ustr::Ustr;
755
756 use super::*;
757 use crate::{
758 enums::LogColor,
759 logging::{logging_clock_set_static_mode, logging_clock_set_static_time},
760 testing::wait_until,
761 };
762
763 #[rstest]
764 fn log_message_serialization() {
765 let log_message = LogLine {
766 timestamp: UnixNanos::default(),
767 level: log::Level::Info,
768 color: LogColor::Normal,
769 component: Ustr::from("Portfolio"),
770 message: "This is a log message".to_string(),
771 };
772
773 let serialized_json = serde_json::to_string(&log_message).unwrap();
774 let deserialized_value: Value = serde_json::from_str(&serialized_json).unwrap();
775
776 assert_eq!(deserialized_value["level"], "INFO");
777 assert_eq!(deserialized_value["component"], "Portfolio");
778 assert_eq!(deserialized_value["message"], "This is a log message");
779 }
780
781 #[rstest]
782 fn log_config_parsing() {
783 let config =
784 LoggerConfig::from_spec("stdout=Info;is_colored;fileout=Debug;RiskEngine=Error")
785 .unwrap();
786 assert_eq!(
787 config,
788 LoggerConfig {
789 stdout_level: LevelFilter::Info,
790 fileout_level: LevelFilter::Debug,
791 component_level: HashMap::from_iter(vec![(
792 Ustr::from("RiskEngine"),
793 LevelFilter::Error
794 )]),
795 log_components_only: false,
796 is_colored: true,
797 print_config: false,
798 }
799 );
800 }
801
802 #[rstest]
803 fn log_config_parsing2() {
804 let config = LoggerConfig::from_spec("stdout=Warn;print_config;fileout=Error;").unwrap();
805 assert_eq!(
806 config,
807 LoggerConfig {
808 stdout_level: LevelFilter::Warn,
809 fileout_level: LevelFilter::Error,
810 component_level: HashMap::new(),
811 log_components_only: false,
812 is_colored: true,
813 print_config: true,
814 }
815 );
816 }
817
818 #[rstest]
819 fn log_config_parsing_with_log_components_only() {
820 let config =
821 LoggerConfig::from_spec("stdout=Info;log_components_only;RiskEngine=Debug").unwrap();
822 assert_eq!(
823 config,
824 LoggerConfig {
825 stdout_level: LevelFilter::Info,
826 fileout_level: LevelFilter::Off,
827 component_level: HashMap::from_iter(vec![(
828 Ustr::from("RiskEngine"),
829 LevelFilter::Debug
830 )]),
831 log_components_only: true,
832 is_colored: true,
833 print_config: false,
834 }
835 );
836 }
837
838 mod serial_tests {
840 use super::*;
841
842 #[rstest]
843 fn test_logging_to_file() {
844 let config = LoggerConfig {
845 fileout_level: LevelFilter::Debug,
846 ..Default::default()
847 };
848
849 let temp_dir = tempdir().expect("Failed to create temporary directory");
850 let file_config = FileWriterConfig {
851 directory: Some(temp_dir.path().to_str().unwrap().to_string()),
852 ..Default::default()
853 };
854
855 let log_guard = Logger::init_with_config(
856 TraderId::from("TRADER-001"),
857 UUID4::new(),
858 config,
859 file_config,
860 );
861
862 logging_clock_set_static_mode();
863 logging_clock_set_static_time(1_650_000_000_000_000);
864
865 log::info!(
866 component = "RiskEngine";
867 "This is a test."
868 );
869
870 let mut log_contents = String::new();
871
872 wait_until(
873 || {
874 std::fs::read_dir(&temp_dir)
875 .expect("Failed to read directory")
876 .filter_map(Result::ok)
877 .any(|entry| entry.path().is_file())
878 },
879 Duration::from_secs(3),
880 );
881
882 drop(log_guard); wait_until(
885 || {
886 let log_file_path = std::fs::read_dir(&temp_dir)
887 .expect("Failed to read directory")
888 .filter_map(Result::ok)
889 .find(|entry| entry.path().is_file())
890 .expect("No files found in directory")
891 .path();
892 log_contents = std::fs::read_to_string(log_file_path)
893 .expect("Error while reading log file");
894 !log_contents.is_empty()
895 },
896 Duration::from_secs(3),
897 );
898
899 assert_eq!(
900 log_contents,
901 "1970-01-20T02:20:00.000000000Z [INFO] TRADER-001.RiskEngine: This is a test.\n"
902 );
903 }
904
905 #[rstest]
906 fn test_shutdown_drains_backlog_tail() {
907 let config = LoggerConfig {
909 stdout_level: LevelFilter::Off,
910 fileout_level: LevelFilter::Info,
911 ..Default::default()
912 };
913
914 let temp_dir = tempdir().expect("Failed to create temporary directory");
915 let file_config = FileWriterConfig {
916 directory: Some(temp_dir.path().to_str().unwrap().to_string()),
917 ..Default::default()
918 };
919
920 let log_guard = Logger::init_with_config(
921 TraderId::from("TRADER-TAIL"),
922 UUID4::new(),
923 config,
924 file_config,
925 )
926 .expect("Failed to initialize logger");
927
928 logging_clock_set_static_mode();
930 logging_clock_set_static_time(1_700_000_000_000_000);
931
932 const N: usize = 1000;
934 for i in 0..N {
935 log::info!(component = "TailDrain"; "BacklogTest {i}");
936 }
937
938 drop(log_guard);
940
941 let mut count = 0usize;
943 wait_until(
944 || {
945 if let Some(log_file) = std::fs::read_dir(&temp_dir)
946 .expect("Failed to read directory")
947 .filter_map(Result::ok)
948 .find(|entry| entry.path().is_file())
949 {
950 let log_file_path = log_file.path();
951 if let Ok(contents) = std::fs::read_to_string(log_file_path) {
952 count = contents
953 .lines()
954 .filter(|l| l.contains("BacklogTest "))
955 .count();
956 count >= N
957 } else {
958 false
959 }
960 } else {
961 false
962 }
963 },
964 Duration::from_secs(5),
965 );
966
967 assert_eq!(count, N, "Expected all pre-shutdown messages to be written");
968 }
969
970 #[rstest]
971 fn test_log_component_level_filtering() {
972 let config =
973 LoggerConfig::from_spec("stdout=Info;fileout=Debug;RiskEngine=Error").unwrap();
974
975 let temp_dir = tempdir().expect("Failed to create temporary directory");
976 let file_config = FileWriterConfig {
977 directory: Some(temp_dir.path().to_str().unwrap().to_string()),
978 ..Default::default()
979 };
980
981 let log_guard = Logger::init_with_config(
982 TraderId::from("TRADER-001"),
983 UUID4::new(),
984 config,
985 file_config,
986 );
987
988 logging_clock_set_static_mode();
989 logging_clock_set_static_time(1_650_000_000_000_000);
990
991 log::info!(
992 component = "RiskEngine";
993 "This is a test."
994 );
995
996 drop(log_guard); wait_until(
999 || {
1000 if let Some(log_file) = std::fs::read_dir(&temp_dir)
1001 .expect("Failed to read directory")
1002 .filter_map(Result::ok)
1003 .find(|entry| entry.path().is_file())
1004 {
1005 let log_file_path = log_file.path();
1006 let log_contents = std::fs::read_to_string(log_file_path)
1007 .expect("Error while reading log file");
1008 !log_contents.contains("RiskEngine")
1009 } else {
1010 false
1011 }
1012 },
1013 Duration::from_secs(3),
1014 );
1015
1016 assert!(
1017 std::fs::read_dir(&temp_dir)
1018 .expect("Failed to read directory")
1019 .filter_map(Result::ok)
1020 .any(|entry| entry.path().is_file()),
1021 "Log file exists"
1022 );
1023 }
1024
1025 #[rstest]
1026 fn test_logging_to_file_in_json_format() {
1027 let config =
1028 LoggerConfig::from_spec("stdout=Info;is_colored;fileout=Debug;RiskEngine=Info")
1029 .unwrap();
1030
1031 let temp_dir = tempdir().expect("Failed to create temporary directory");
1032 let file_config = FileWriterConfig {
1033 directory: Some(temp_dir.path().to_str().unwrap().to_string()),
1034 file_format: Some("json".to_string()),
1035 ..Default::default()
1036 };
1037
1038 let log_guard = Logger::init_with_config(
1039 TraderId::from("TRADER-001"),
1040 UUID4::new(),
1041 config,
1042 file_config,
1043 );
1044
1045 logging_clock_set_static_mode();
1046 logging_clock_set_static_time(1_650_000_000_000_000);
1047
1048 log::info!(
1049 component = "RiskEngine";
1050 "This is a test."
1051 );
1052
1053 let mut log_contents = String::new();
1054
1055 drop(log_guard); wait_until(
1058 || {
1059 if let Some(log_file) = std::fs::read_dir(&temp_dir)
1060 .expect("Failed to read directory")
1061 .filter_map(Result::ok)
1062 .find(|entry| entry.path().is_file())
1063 {
1064 let log_file_path = log_file.path();
1065 log_contents = std::fs::read_to_string(log_file_path)
1066 .expect("Error while reading log file");
1067 !log_contents.is_empty()
1068 } else {
1069 false
1070 }
1071 },
1072 Duration::from_secs(3),
1073 );
1074
1075 assert_eq!(
1076 log_contents,
1077 "{\"timestamp\":\"1970-01-20T02:20:00.000000000Z\",\"trader_id\":\"TRADER-001\",\"level\":\"INFO\",\"color\":\"NORMAL\",\"component\":\"RiskEngine\",\"message\":\"This is a test.\"}\n"
1078 );
1079 }
1080 }
1081}