Skip to content

Commit

Permalink
test: serde deserialization and triggering on size trigger, cleanup, …
Browse files Browse the repository at this point in the history
…MSRV bump, cleanup color ctrl test
  • Loading branch information
bconn98 committed Feb 16, 2024
1 parent c981ca4 commit 0f84515
Show file tree
Hide file tree
Showing 7 changed files with 364 additions and 9 deletions.
7 changes: 6 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ repository = "https://github.com/estk/log4rs"
readme = "README.md"
keywords = ["log", "logger", "logging", "log4"]
edition = "2018"
rust-version = "1.69"
rust-version = "1.70"

[features]
default = ["all_components", "config_parsing", "yaml_format"]
Expand Down Expand Up @@ -88,6 +88,7 @@ streaming-stats = "0.2.3"
humantime = "2.1"
tempfile = "3.8"
mock_instant = "0.3"
serde_test = "1.0.176"

[[example]]
name = "json_logger"
Expand All @@ -108,3 +109,7 @@ required-features = ["file_appender", "rolling_file_appender", "size_trigger"]
[[example]]
name = "multi_logger_config"
required-features = ["yaml_format", "config_parsing"]

[[example]]
name = "color_control"
required-features = ["yaml_format", "config_parsing"]
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
[![crates.io](https://img.shields.io/crates/v/log4rs.svg)](https://crates.io/crates/log4rs)
[![License: MIT OR Apache-2.0](https://img.shields.io/crates/l/clippy.svg)](#license)
![CI](https://github.com/estk/log4rs/workflows/CI/badge.svg)
[![Minimum rustc version](https://img.shields.io/badge/rustc-1.69+-green.svg)](https://github.com/estk/log4rs#rust-version-requirements)
[![Minimum rustc version](https://img.shields.io/badge/rustc-1.70+-green.svg)](https://github.com/estk/log4rs#rust-version-requirements)

log4rs is a highly configurable logging framework modeled after Java's Logback
and log4j libraries.
Expand Down Expand Up @@ -71,7 +71,7 @@ fn main() {

## Rust Version Requirements

1.69
1.70

## Building for Dev

Expand Down
2 changes: 1 addition & 1 deletion docs/Configuration.md
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@ other components, the default (and only supported) policy is `kind: compound`.
The _trigger_ field is used to dictate when the log file should be rolled. It
supports two types: `size`, and `time`.

For `size`, it require a _limit_ field. The _limit_ field is a string which defines the maximum file size
For `size`, it requires a _limit_ field. The _limit_ field is a string which defines the maximum file size
prior to a rolling of the file. The limit field requires one of the following
units in bytes, case does not matter:

Expand Down
25 changes: 25 additions & 0 deletions examples/color_control.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
use log::{error, info};
use log4rs;
use serde_yaml;
use std::env;

fn main() {
let config_str = include_str!("sample_config.yml");
let config = serde_yaml::from_str(config_str).unwrap();
log4rs::init_raw_config(config).unwrap();

let no_color = match env::var("NO_COLOR") {
Ok(no_color) => no_color,
Err(_) => "0".to_string(),
};
let clicolor_force = match env::var("CLICOLOR_FORCE") {
Ok(clicolor_force) => clicolor_force,
Err(_) => "0".to_string(),
};
let cli_color = match env::var("CLICOLOR") {
Ok(cli_color) => cli_color,
Err(_) => "0".to_string(),
};
info!("NO_COLOR: {}, CLICOLOR_FORCE: {}, CLICOLOR: {}", no_color, clicolor_force, cli_color);
error!("NO_COLOR: {}, CLICOLOR_FORCE: {}, CLICOLOR: {}", no_color, clicolor_force, cli_color);
}
297 changes: 297 additions & 0 deletions src/append/rolling_file/policy/compound/trigger/size.rs
Original file line number Diff line number Diff line change
Expand Up @@ -157,10 +157,307 @@ impl Deserialize for SizeTriggerDeserializer {
#[cfg(test)]
mod test {
use super::*;
use serde_test::{assert_de_tokens, assert_de_tokens_error, Token};

static BYTE_MULTIPLIER: u64 = 1024;

#[test]
fn pre_process() {
let trigger = SizeTrigger::new(2048);
assert!(!trigger.is_pre_process());
}

#[test]
fn test_trigger() {
let file = tempfile::tempdir().unwrap();
let mut logfile = LogFile {
writer: &mut None,
path: file.path(),
len: 0,
};

let trigger_bytes = 5;
let trigger = SizeTrigger::new(trigger_bytes);

// Logfile size is < trigger size, should never trigger
for size in 0..trigger_bytes {
logfile.len = size;
assert!(!trigger.trigger(&logfile).unwrap());
}

// Logfile size is == trigger size, should not trigger
logfile.len = trigger_bytes;
assert!(!trigger.trigger(&logfile).unwrap());

// Logfile size is >= trigger size, should trigger
logfile.len = trigger_bytes + 1;
assert!(trigger.trigger(&logfile).unwrap());
}

#[test]
#[cfg(feature = "config_parsing")]
fn test_u64_deserialize() {
let trigger = SizeTriggerConfig{ limit: BYTE_MULTIPLIER };
assert_de_tokens(
&trigger,
&[
Token::Struct { name: "SizeTriggerConfig", len: 1 },
Token::Str("limit"),
Token::U64(1024),
Token::StructEnd,
],
);
}

#[test]
#[cfg(feature = "config_parsing")]
fn test_i64_deserialize() {
let trigger = SizeTriggerConfig{ limit: BYTE_MULTIPLIER };
assert_de_tokens(
&trigger,
&[
Token::Struct { name: "SizeTriggerConfig", len: 1 },
Token::Str("limit"),
Token::I64(1024),
Token::StructEnd,
],
);

assert_de_tokens_error::<SizeTriggerConfig>(
&[
Token::Struct { name: "SizeTriggerConfig", len: 1 },
Token::Str("limit"),
Token::I64(-1024),
Token::StructEnd,
],
"invalid value: integer `-1024`, expected a non-negative number",
);
}

#[test]
#[cfg(feature = "config_parsing")]
fn test_str_deserialize() {
// Test no unit (aka value in Bytes)
let trigger = SizeTriggerConfig{ limit: BYTE_MULTIPLIER };
assert_de_tokens(
&trigger,
&[
Token::Struct { name: "SizeTriggerConfig", len: 1 },
Token::Str("limit"),
Token::Str("1024"),
Token::StructEnd,
],
);

// Test not an unsigned number
assert_de_tokens_error::<SizeTriggerConfig>(
&[
Token::Struct { name: "SizeTriggerConfig", len: 1 },
Token::Str("limit"),
Token::Str("-1024"),
Token::StructEnd,
],
"invalid value: string \"\", expected a number",
);
}

#[test]
#[cfg(feature = "config_parsing")]
fn byte_deserialize() {
let trigger = SizeTriggerConfig{ limit: BYTE_MULTIPLIER };

// Test spacing & b vs B
assert_de_tokens(
&trigger,
&[
Token::Struct { name: "SizeTriggerConfig", len: 1 },
Token::Str("limit"),
Token::Str("1024b"),
Token::StructEnd,
],
);
assert_de_tokens(
&trigger,
&[
Token::Struct { name: "SizeTriggerConfig", len: 1 },
Token::Str("limit"),
Token::Str("1024 B"),
Token::StructEnd,
],
);
}

#[test]
#[cfg(feature = "config_parsing")]
fn kilobyte_deserialize() {
let trigger = SizeTriggerConfig{ limit: BYTE_MULTIPLIER };

// Test kb unit
assert_de_tokens(
&trigger,
&[
Token::Struct { name: "SizeTriggerConfig", len: 1 },
Token::Str("limit"),
Token::Str("1 kb"),
Token::StructEnd,
],
);
assert_de_tokens(
&trigger,
&[
Token::Struct { name: "SizeTriggerConfig", len: 1 },
Token::Str("limit"),
Token::Str("1 KB"),
Token::StructEnd,
],
);
assert_de_tokens(
&trigger,
&[
Token::Struct { name: "SizeTriggerConfig", len: 1 },
Token::Str("limit"),
Token::Str("1 kB"),
Token::StructEnd,
],
);
assert_de_tokens(
&trigger,
&[
Token::Struct { name: "SizeTriggerConfig", len: 1 },
Token::Str("limit"),
Token::Str("1 Kb"),
Token::StructEnd,
],
);
}

#[test]
#[cfg(feature = "config_parsing")]
fn megabyte_deserialize() {
// Test mb unit
let trigger = SizeTriggerConfig{ limit: BYTE_MULTIPLIER.pow(2) };
assert_de_tokens(
&trigger,
&[
Token::Struct { name: "SizeTriggerConfig", len: 1 },
Token::Str("limit"),
Token::Str("1 mb"),
Token::StructEnd,
],
);
assert_de_tokens(
&trigger,
&[
Token::Struct { name: "SizeTriggerConfig", len: 1 },
Token::Str("limit"),
Token::Str("1 MB"),
Token::StructEnd,
],
);
assert_de_tokens(
&trigger,
&[
Token::Struct { name: "SizeTriggerConfig", len: 1 },
Token::Str("limit"),
Token::Str("1 mB"),
Token::StructEnd,
],
);
assert_de_tokens(
&trigger,
&[
Token::Struct { name: "SizeTriggerConfig", len: 1 },
Token::Str("limit"),
Token::Str("1 Mb"),
Token::StructEnd,
],
);
}

#[test]
#[cfg(feature = "config_parsing")]
fn gigabyte_deserialize() {
// Test gb unit
let trigger = SizeTriggerConfig{ limit: BYTE_MULTIPLIER.pow(3) };
assert_de_tokens(
&trigger,
&[
Token::Struct { name: "SizeTriggerConfig", len: 1 },
Token::Str("limit"),
Token::Str("1 gb"),
Token::StructEnd,
],
);
assert_de_tokens(
&trigger,
&[
Token::Struct { name: "SizeTriggerConfig", len: 1 },
Token::Str("limit"),
Token::Str("1 GB"),
Token::StructEnd,
],
);
assert_de_tokens(
&trigger,
&[
Token::Struct { name: "SizeTriggerConfig", len: 1 },
Token::Str("limit"),
Token::Str("1 gB"),
Token::StructEnd,
],
);
assert_de_tokens(
&trigger,
&[
Token::Struct { name: "SizeTriggerConfig", len: 1 },
Token::Str("limit"),
Token::Str("1 Gb"),
Token::StructEnd,
],
);
}

#[test]
#[cfg(feature = "config_parsing")]
fn terabyte_deserialize() {
// Test tb unit
let trigger = SizeTriggerConfig{ limit: BYTE_MULTIPLIER.pow(4) };
assert_de_tokens(
&trigger,
&[
Token::Struct { name: "SizeTriggerConfig", len: 1 },
Token::Str("limit"),
Token::Str("1 tb"),
Token::StructEnd,
],
);
assert_de_tokens(
&trigger,
&[
Token::Struct { name: "SizeTriggerConfig", len: 1 },
Token::Str("limit"),
Token::Str("1 TB"),
Token::StructEnd,
],
);
assert_de_tokens(
&trigger,
&[
Token::Struct { name: "SizeTriggerConfig", len: 1 },
Token::Str("limit"),
Token::Str("1 tB"),
Token::StructEnd,
],
);
assert_de_tokens(
&trigger,
&[
Token::Struct { name: "SizeTriggerConfig", len: 1 },
Token::Str("limit"),
Token::Str("1 Tb"),
Token::StructEnd,
],
);
}

}
Loading

0 comments on commit 0f84515

Please sign in to comment.