Skip to content

Commit 33e5c03

Browse files
committed
add more testing
1 parent 22cd203 commit 33e5c03

2 files changed

Lines changed: 276 additions & 5 deletions

File tree

bottlecap/src/logger.rs

Lines changed: 109 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -49,10 +49,9 @@ where
4949
for span in scope.from_root() {
5050
span_prefix.push_str(span.name());
5151
let ext = span.extensions();
52-
let fields = &ext
53-
.get::<FormattedFields<N>>()
54-
.expect("will never be `None`");
55-
if !fields.is_empty() {
52+
if let Some(fields) = ext.get::<FormattedFields<N>>()
53+
&& !fields.is_empty()
54+
{
5655
span_prefix.push('{');
5756
span_prefix.push_str(fields);
5857
span_prefix.push('}');
@@ -72,3 +71,109 @@ where
7271
writeln!(writer, "{output}")
7372
}
7473
}
74+
75+
#[cfg(test)]
76+
mod tests {
77+
use super::*;
78+
use tracing::subscriber::with_default;
79+
use tracing_subscriber::fmt::Subscriber;
80+
81+
/// Captures all output from a tracing subscriber using our Formatter.
82+
fn capture_log<F: FnOnce()>(f: F) -> String {
83+
let buf = std::sync::Arc::new(std::sync::Mutex::new(Vec::new()));
84+
let buf_clone = buf.clone();
85+
86+
let subscriber = Subscriber::builder()
87+
.with_writer(move || -> Box<dyn std::io::Write + Send> {
88+
Box::new(WriterGuard(buf_clone.clone()))
89+
})
90+
.with_max_level(tracing::Level::TRACE)
91+
.with_level(true)
92+
.with_target(false)
93+
.without_time()
94+
.event_format(Formatter)
95+
.finish();
96+
97+
with_default(subscriber, f);
98+
99+
let lock = buf.lock().expect("test lock poisoned");
100+
String::from_utf8(lock.clone()).expect("invalid UTF-8 in log output")
101+
}
102+
103+
/// A wrapper so Arc<Mutex<Vec<u8>>> implements std::io::Write.
104+
struct WriterGuard(std::sync::Arc<std::sync::Mutex<Vec<u8>>>);
105+
106+
impl std::io::Write for WriterGuard {
107+
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
108+
self.0.lock().unwrap().extend_from_slice(buf);
109+
Ok(buf.len())
110+
}
111+
fn flush(&mut self) -> std::io::Result<()> {
112+
Ok(())
113+
}
114+
}
115+
116+
#[test]
117+
fn test_formatter_outputs_valid_json_with_level() {
118+
let output = capture_log(|| {
119+
tracing::info!("hello world");
120+
});
121+
122+
let parsed: serde_json::Value =
123+
serde_json::from_str(output.trim()).expect("output should be valid JSON");
124+
125+
assert_eq!(parsed["level"], "INFO");
126+
assert!(
127+
parsed["message"]
128+
.as_str()
129+
.unwrap()
130+
.contains("DD_EXTENSION | INFO | hello world")
131+
);
132+
}
133+
134+
#[test]
135+
fn test_formatter_error_level() {
136+
let output = capture_log(|| {
137+
tracing::error!("something broke");
138+
});
139+
140+
let parsed: serde_json::Value = serde_json::from_str(output.trim()).unwrap();
141+
assert_eq!(parsed["level"], "ERROR");
142+
assert!(
143+
parsed["message"]
144+
.as_str()
145+
.unwrap()
146+
.contains("DD_EXTENSION | ERROR | something broke")
147+
);
148+
}
149+
150+
#[test]
151+
fn test_formatter_debug_level() {
152+
let output = capture_log(|| {
153+
tracing::debug!("debug details");
154+
});
155+
156+
let parsed: serde_json::Value = serde_json::from_str(output.trim()).unwrap();
157+
assert_eq!(parsed["level"], "DEBUG");
158+
assert!(
159+
parsed["message"]
160+
.as_str()
161+
.unwrap()
162+
.contains("DD_EXTENSION | DEBUG | debug details")
163+
);
164+
}
165+
166+
#[test]
167+
fn test_formatter_escapes_special_characters() {
168+
let output = capture_log(|| {
169+
tracing::info!("message with \"quotes\" and a\nnewline");
170+
});
171+
172+
// The output should be valid JSON despite special characters
173+
let parsed: serde_json::Value =
174+
serde_json::from_str(output.trim()).expect("special chars should be escaped");
175+
let msg = parsed["message"].as_str().unwrap();
176+
assert!(msg.contains("quotes"));
177+
assert!(msg.contains("newline"));
178+
}
179+
}

bottlecap/src/logs/lambda/processor.rs

Lines changed: 167 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -381,9 +381,11 @@ impl LambdaProcessor {
381381
lambda_message.message.clone(),
382382
);
383383

384-
// Extract log level from JSON (AWS JSON log format / Powertools)
384+
// Extract log level from JSON (AWS JSON log format / Powertools).
385+
// Try "level" first (standard), then fall back to "status" (Datadog convention).
385386
let status = json_obj
386387
.get("level")
388+
.or_else(|| json_obj.get("status"))
387389
.and_then(|v| v.as_str())
388390
.and_then(map_log_level_to_status)
389391
.map_or(
@@ -1956,4 +1958,168 @@ mod tests {
19561958
let intake_log = processor.get_intake_log(lambda_message).unwrap();
19571959
assert_eq!(intake_log.message.status, "debug");
19581960
}
1961+
1962+
#[tokio::test]
1963+
async fn test_get_intake_log_non_string_level_defaults_to_info() {
1964+
let config = Arc::new(config::Config {
1965+
service: Some("test-service".to_string()),
1966+
tags: HashMap::from([("test".to_string(), "tags".to_string())]),
1967+
..config::Config::default()
1968+
});
1969+
1970+
let tags_provider = Arc::new(provider::Provider::new(
1971+
Arc::clone(&config),
1972+
LAMBDA_RUNTIME_SLUG.to_string(),
1973+
&HashMap::from([("function_arn".to_string(), "test-arn".to_string())]),
1974+
));
1975+
1976+
let (tx, _rx) = tokio::sync::mpsc::channel(2);
1977+
let mut processor =
1978+
LambdaProcessor::new(tags_provider, Arc::clone(&config), tx.clone(), false);
1979+
1980+
// Set request_id
1981+
let start_event = TelemetryEvent {
1982+
time: Utc.with_ymd_and_hms(2023, 1, 7, 3, 23, 47).unwrap(),
1983+
record: TelemetryRecord::PlatformStart {
1984+
request_id: "test-request-id".to_string(),
1985+
version: Some("test".to_string()),
1986+
},
1987+
};
1988+
let start_msg = processor.get_message(start_event).await.unwrap();
1989+
processor.get_intake_log(start_msg).unwrap();
1990+
1991+
// level as a number
1992+
let event = TelemetryEvent {
1993+
time: Utc.with_ymd_and_hms(2023, 1, 7, 3, 23, 48).unwrap(),
1994+
record: TelemetryRecord::Function(Value::String(
1995+
r#"{"level":42,"message":"numeric level"}"#.to_string(),
1996+
)),
1997+
};
1998+
let lambda_message = processor.get_message(event).await.unwrap();
1999+
let intake_log = processor.get_intake_log(lambda_message).unwrap();
2000+
assert_eq!(intake_log.message.status, "info");
2001+
2002+
// level as null
2003+
let event = TelemetryEvent {
2004+
time: Utc.with_ymd_and_hms(2023, 1, 7, 3, 23, 49).unwrap(),
2005+
record: TelemetryRecord::Function(Value::String(
2006+
r#"{"level":null,"message":"null level"}"#.to_string(),
2007+
)),
2008+
};
2009+
let lambda_message = processor.get_message(event).await.unwrap();
2010+
let intake_log = processor.get_intake_log(lambda_message).unwrap();
2011+
assert_eq!(intake_log.message.status, "info");
2012+
2013+
// level as a boolean
2014+
let event = TelemetryEvent {
2015+
time: Utc.with_ymd_and_hms(2023, 1, 7, 3, 23, 50).unwrap(),
2016+
record: TelemetryRecord::Function(Value::String(
2017+
r#"{"level":true,"message":"bool level"}"#.to_string(),
2018+
)),
2019+
};
2020+
let lambda_message = processor.get_message(event).await.unwrap();
2021+
let intake_log = processor.get_intake_log(lambda_message).unwrap();
2022+
assert_eq!(intake_log.message.status, "info");
2023+
}
2024+
2025+
#[tokio::test]
2026+
async fn test_get_intake_log_ddtags_and_level_combined() {
2027+
let config = Arc::new(config::Config {
2028+
service: Some("test-service".to_string()),
2029+
tags: HashMap::from([("test".to_string(), "tags".to_string())]),
2030+
..config::Config::default()
2031+
});
2032+
2033+
let tags_provider = Arc::new(provider::Provider::new(
2034+
Arc::clone(&config),
2035+
LAMBDA_RUNTIME_SLUG.to_string(),
2036+
&HashMap::from([("function_arn".to_string(), "test-arn".to_string())]),
2037+
));
2038+
2039+
let (tx, _rx) = tokio::sync::mpsc::channel(2);
2040+
let mut processor =
2041+
LambdaProcessor::new(tags_provider, Arc::clone(&config), tx.clone(), false);
2042+
2043+
// Set request_id
2044+
let start_event = TelemetryEvent {
2045+
time: Utc.with_ymd_and_hms(2023, 1, 7, 3, 23, 47).unwrap(),
2046+
record: TelemetryRecord::PlatformStart {
2047+
request_id: "test-request-id".to_string(),
2048+
version: Some("test".to_string()),
2049+
},
2050+
};
2051+
let start_msg = processor.get_message(start_event).await.unwrap();
2052+
processor.get_intake_log(start_msg).unwrap();
2053+
2054+
// JSON log with both ddtags and level
2055+
let event = TelemetryEvent {
2056+
time: Utc.with_ymd_and_hms(2023, 1, 7, 3, 23, 48).unwrap(),
2057+
record: TelemetryRecord::Function(Value::String(
2058+
r#"{"level":"WARN","message":"warning with tags","ddtags":"env:staging,team:backend"}"#
2059+
.to_string(),
2060+
)),
2061+
};
2062+
let lambda_message = processor.get_message(event).await.unwrap();
2063+
let intake_log = processor.get_intake_log(lambda_message).unwrap();
2064+
2065+
// Level should be extracted
2066+
assert_eq!(intake_log.message.status, "warn");
2067+
// Tags should be extracted and appended
2068+
assert!(intake_log.tags.contains("env:staging"));
2069+
assert!(intake_log.tags.contains("team:backend"));
2070+
// ddtags should be removed from the message
2071+
assert!(!intake_log.message.message.contains("ddtags"));
2072+
}
2073+
2074+
#[tokio::test]
2075+
async fn test_get_intake_log_status_field_fallback() {
2076+
let config = Arc::new(config::Config {
2077+
service: Some("test-service".to_string()),
2078+
tags: HashMap::from([("test".to_string(), "tags".to_string())]),
2079+
..config::Config::default()
2080+
});
2081+
2082+
let tags_provider = Arc::new(provider::Provider::new(
2083+
Arc::clone(&config),
2084+
LAMBDA_RUNTIME_SLUG.to_string(),
2085+
&HashMap::from([("function_arn".to_string(), "test-arn".to_string())]),
2086+
));
2087+
2088+
let (tx, _rx) = tokio::sync::mpsc::channel(2);
2089+
let mut processor =
2090+
LambdaProcessor::new(tags_provider, Arc::clone(&config), tx.clone(), false);
2091+
2092+
// Set request_id
2093+
let start_event = TelemetryEvent {
2094+
time: Utc.with_ymd_and_hms(2023, 1, 7, 3, 23, 47).unwrap(),
2095+
record: TelemetryRecord::PlatformStart {
2096+
request_id: "test-request-id".to_string(),
2097+
version: Some("test".to_string()),
2098+
},
2099+
};
2100+
let start_msg = processor.get_message(start_event).await.unwrap();
2101+
processor.get_intake_log(start_msg).unwrap();
2102+
2103+
// JSON log with "status" field instead of "level" (Datadog convention)
2104+
let event = TelemetryEvent {
2105+
time: Utc.with_ymd_and_hms(2023, 1, 7, 3, 23, 48).unwrap(),
2106+
record: TelemetryRecord::Function(Value::String(
2107+
r#"{"status":"error","message":"something failed"}"#.to_string(),
2108+
)),
2109+
};
2110+
let lambda_message = processor.get_message(event).await.unwrap();
2111+
let intake_log = processor.get_intake_log(lambda_message).unwrap();
2112+
assert_eq!(intake_log.message.status, "error");
2113+
2114+
// "level" takes priority over "status" when both are present
2115+
let event = TelemetryEvent {
2116+
time: Utc.with_ymd_and_hms(2023, 1, 7, 3, 23, 49).unwrap(),
2117+
record: TelemetryRecord::Function(Value::String(
2118+
r#"{"level":"WARN","status":"error","message":"both fields"}"#.to_string(),
2119+
)),
2120+
};
2121+
let lambda_message = processor.get_message(event).await.unwrap();
2122+
let intake_log = processor.get_intake_log(lambda_message).unwrap();
2123+
assert_eq!(intake_log.message.status, "warn");
2124+
}
19592125
}

0 commit comments

Comments
 (0)