Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion crates/common/src/auction/orchestrator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -724,7 +724,11 @@ mod tests {
"Slot-1 should still be present"
);
assert!(
filtered.get("slot-1").unwrap().price.is_none(),
filtered
.get("slot-1")
.expect("slot-1 should be present")
.price
.is_none(),
"Price should still be None (not decoded yet)"
);
}
Expand Down
4 changes: 3 additions & 1 deletion crates/common/src/auth.rs
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,9 @@ mod tests {

let response = enforce_basic_auth(&settings, &req).expect("should challenge");
assert_eq!(response.get_status(), StatusCode::UNAUTHORIZED);
let realm = response.get_header(header::WWW_AUTHENTICATE).unwrap();
let realm = response
.get_header(header::WWW_AUTHENTICATE)
.expect("should have WWW-Authenticate header");
assert_eq!(realm, BASIC_AUTH_REALM);
}

Expand Down
17 changes: 11 additions & 6 deletions crates/common/src/backend.rs
Original file line number Diff line number Diff line change
Expand Up @@ -108,35 +108,40 @@ mod tests {

#[test]
fn returns_name_for_https_no_port() {
let name = ensure_origin_backend("https", "origin.example.com", None).unwrap();
let name = ensure_origin_backend("https", "origin.example.com", None)
.expect("should create backend for https without port");
assert_eq!(name, "backend_https_origin_example_com_443");
}

#[test]
fn returns_name_for_http_with_port_and_sanitizes() {
let name = ensure_origin_backend("http", "api.test-site.org", Some(8080)).unwrap();
let name = ensure_origin_backend("http", "api.test-site.org", Some(8080))
.expect("should create backend for http with custom port");
assert_eq!(name, "backend_http_api_test-site_org_8080");
// Explicitly check that ':' was replaced with '_'
assert!(name.ends_with("_8080"));
}

#[test]
fn returns_name_for_http_without_port_defaults_to_80() {
let name = ensure_origin_backend("http", "example.org", None).unwrap();
let name = ensure_origin_backend("http", "example.org", None)
.expect("should create backend for http defaulting to port 80");
assert_eq!(name, "backend_http_example_org_80");
}

#[test]
fn error_on_missing_host() {
let err = ensure_origin_backend("https", "", None).err().unwrap();
let err = ensure_origin_backend("https", "", None).expect_err("should error on empty host");
let msg = err.to_string();
assert!(msg.contains("missing host"));
}

#[test]
fn second_call_reuses_existing_backend() {
let first = ensure_origin_backend("https", "reuse.example.com", None).unwrap();
let second = ensure_origin_backend("https", "reuse.example.com", None).unwrap();
let first = ensure_origin_backend("https", "reuse.example.com", None)
.expect("should create backend first time");
let second = ensure_origin_backend("https", "reuse.example.com", None)
.expect("should reuse existing backend");
assert_eq!(first, second);
}
}
10 changes: 5 additions & 5 deletions crates/common/src/cookies.rs
Original file line number Diff line number Diff line change
Expand Up @@ -83,8 +83,8 @@ mod tests {
let jar = parse_cookies_to_jar(header_value);

assert!(jar.iter().count() == 2);
assert_eq!(jar.get("c1").unwrap().value(), "v1");
assert_eq!(jar.get("c2").unwrap().value(), "v2");
assert_eq!(jar.get("c1").expect("should have cookie c1").value(), "v1");
assert_eq!(jar.get("c2").expect("should have cookie c2").value(), "v2");
}

#[test]
Expand All @@ -93,7 +93,7 @@ mod tests {
let jar = parse_cookies_to_jar(cookie_str);

assert!(jar.iter().count() == 1);
assert_eq!(jar.get("c1").unwrap().value(), "v2");
assert_eq!(jar.get("c1").expect("should have cookie c1").value(), "v2");
}

#[test]
Expand All @@ -120,8 +120,8 @@ mod tests {
.expect("should have cookie jar");

assert!(jar.iter().count() == 2);
assert_eq!(jar.get("c1").unwrap().value(), "v1");
assert_eq!(jar.get("c2").unwrap().value(), "v2");
assert_eq!(jar.get("c1").expect("should have cookie c1").value(), "v1");
assert_eq!(jar.get("c2").expect("should have cookie c2").value(), "v2");
}

#[test]
Expand Down
24 changes: 14 additions & 10 deletions crates/common/src/html_processor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -521,8 +521,8 @@ mod tests {
let mut output = Vec::new();
pipeline
.process(Cursor::new(html.as_bytes()), &mut output)
.unwrap();
let processed = String::from_utf8(output).unwrap();
.expect("pipeline should process HTML");
let processed = String::from_utf8(output).expect("output should be valid UTF-8");

assert!(processed.contains("keep-me"));
assert!(!processed.contains("remove-me"));
Expand Down Expand Up @@ -554,9 +554,9 @@ mod tests {
let mut output = Vec::new();
pipeline
.process(Cursor::new(html.as_bytes()), &mut output)
.unwrap();
.expect("pipeline should process HTML");

let result = String::from_utf8(output).unwrap();
let result = String::from_utf8(output).expect("output should be valid UTF-8");
assert!(result.contains(r#"href="https://test.example.com/page""#));
assert!(result.contains(r#"href="//test.example.com/proto""#));
assert!(result.contains(r#"href="test.example.com/bare""#));
Expand Down Expand Up @@ -615,8 +615,8 @@ mod tests {
let mut output = Vec::new();
pipeline
.process(Cursor::new(html.as_bytes()), &mut output)
.unwrap();
let result = String::from_utf8(output).unwrap();
.expect("pipeline should process HTML");
let result = String::from_utf8(output).expect("output should be valid UTF-8");

// Assertions - only URL attribute replacements are expected
// Check URL replacements (not all occurrences will be replaced since
Expand Down Expand Up @@ -717,8 +717,10 @@ mod tests {

// Compress
let mut encoder = GzEncoder::new(Vec::new(), GzCompression::default());
encoder.write_all(html.as_bytes()).unwrap();
let compressed_input = encoder.finish().unwrap();
encoder
.write_all(html.as_bytes())
.expect("should write to gzip encoder");
let compressed_input = encoder.finish().expect("should finish gzip encoding");

println!("Compressed input size: {} bytes", compressed_input.len());

Expand All @@ -738,7 +740,7 @@ mod tests {
let mut compressed_output = Vec::new();
pipeline
.process(Cursor::new(&compressed_input), &mut compressed_output)
.unwrap();
.expect("pipeline should process gzipped HTML");

// Ensure we produced output
assert!(
Expand All @@ -749,7 +751,9 @@ mod tests {
// Decompress and verify
let mut decoder = GzDecoder::new(&compressed_output[..]);
let mut decompressed = String::new();
decoder.read_to_string(&mut decompressed).unwrap();
decoder
.read_to_string(&mut decompressed)
.expect("should decompress gzip output");

let remaining_urls = decompressed.matches("www.test-publisher.com").count();
let replaced_urls = decompressed
Expand Down
9 changes: 8 additions & 1 deletion crates/common/src/http_util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -277,12 +277,19 @@ pub fn verify_clear_url_signature(settings: &Settings, clear_url: &str, token: &
/// 2) Base64-decode the `x1||nonce||ciphertext+tag` bytes
/// 3) Compute SHA-256 over those bytes
/// 4) Return Base64 URL-safe (no padding) digest as `tstoken`
///
/// # Panics
///
/// This function will not panic under normal circumstances. The internal base64 decode
/// cannot fail because it operates on data that was just encoded by `encode_url`.
#[must_use]
pub fn compute_encrypted_sha256_token(settings: &Settings, full_url: &str) -> String {
// Encrypt deterministically using existing helper
let enc = encode_url(settings, full_url);
// Decode to raw bytes (x1 + nonce + ciphertext+tag)
let raw = URL_SAFE_NO_PAD.decode(enc.as_bytes()).unwrap_or_default();
let raw = URL_SAFE_NO_PAD
.decode(enc.as_bytes())
.expect("decode must succeed for just-encoded data");
let digest = Sha256::digest(&raw);
URL_SAFE_NO_PAD.encode(digest)
}
Expand Down
14 changes: 10 additions & 4 deletions crates/common/src/integrations/adserver_mock.rs
Original file line number Diff line number Diff line change
Expand Up @@ -470,20 +470,26 @@ mod tests {

auction_request.context.insert(
"provider_responses".to_string(),
serde_json::to_value(&bidder_responses).unwrap(),
serde_json::to_value(&bidder_responses).expect("should serialize bidder responses"),
);

let mediation_req = provider
.build_mediation_request(&auction_request, &bidder_responses)
.unwrap();
.expect("should build mediation request");

// Verify structure
assert_eq!(mediation_req["id"], "test-auction-123");
assert_eq!(mediation_req["imp"].as_array().unwrap().len(), 1);
assert_eq!(
mediation_req["imp"]
.as_array()
.expect("imp should be array")
.len(),
1
);
assert_eq!(
mediation_req["ext"]["bidder_responses"]
.as_array()
.unwrap()
.expect("bidder_responses should be array")
.len(),
2
);
Expand Down
2 changes: 1 addition & 1 deletion crates/common/src/integrations/didomi.rs
Original file line number Diff line number Diff line change
Expand Up @@ -261,7 +261,7 @@ mod tests {
let integration = DidomiIntegration::new(Arc::new(config(true)));
let url = integration
.build_target_url("https://sdk.privacy-center.org", "/loader.js", Some("v=1"))
.unwrap();
.expect("should build target URL");
assert_eq!(url, "https://sdk.privacy-center.org/loader.js?v=1");
}

Expand Down
17 changes: 12 additions & 5 deletions crates/common/src/integrations/lockr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -502,7 +502,9 @@ mod tests {
fn test_api_path_extraction_with_camel_case() {
// Test that we properly extract paths with correct casing
let path = "/integrations/lockr/api/publisher/app/v1/identityLockr/settings";
let extracted = path.strip_prefix("/integrations/lockr/api").unwrap();
let extracted = path
.strip_prefix("/integrations/lockr/api")
.expect("should strip prefix");
assert_eq!(extracted, "/publisher/app/v1/identityLockr/settings");
}

Expand All @@ -525,7 +527,9 @@ mod tests {
];

for (input, expected) in test_cases {
let result = input.strip_prefix("/integrations/lockr/api").unwrap();
let result = input
.strip_prefix("/integrations/lockr/api")
.expect("should strip prefix");
assert_eq!(result, expected, "Failed for input: {}", input);
}
}
Expand Down Expand Up @@ -589,7 +593,8 @@ const identityLockr = {
let result = integration.rewrite_sdk_host(mock_sdk_old.as_bytes().to_vec());
assert!(result.is_ok());

let rewritten = String::from_utf8(result.unwrap()).unwrap();
let rewritten = String::from_utf8(result.expect("should rewrite SDK host"))
.expect("should be valid UTF-8");

// Verify the host was rewritten to the proxy endpoint
assert!(rewritten.contains("'host': '/integrations/lockr/api'"));
Expand Down Expand Up @@ -630,7 +635,8 @@ const identityLockr = {
let result = integration.rewrite_sdk_host(mock_sdk_real.as_bytes().to_vec());
assert!(result.is_ok());

let rewritten = String::from_utf8(result.unwrap()).unwrap();
let rewritten = String::from_utf8(result.expect("should rewrite SDK host"))
.expect("should be valid UTF-8");

// Verify the host was rewritten to the proxy endpoint
assert!(rewritten.contains("'host': '/integrations/lockr/api'"));
Expand Down Expand Up @@ -688,7 +694,8 @@ const identityLockr = {
let result = integration.rewrite_sdk_host(mock_sdk.as_bytes().to_vec());
assert!(result.is_ok());

let rewritten = String::from_utf8(result.unwrap()).unwrap();
let rewritten = String::from_utf8(result.expect("should rewrite SDK host"))
.expect("should be valid UTF-8");

// When pattern doesn't match, content should be unchanged
assert!(rewritten.contains("'host': 'https://example.com'"));
Expand Down
12 changes: 6 additions & 6 deletions crates/common/src/integrations/nextjs/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@ mod tests {
let mut output = Vec::new();
pipeline
.process(Cursor::new(html.as_bytes()), &mut output)
.unwrap();
.expect("pipeline should process HTML");
let processed = String::from_utf8_lossy(&output);

// Note: URLs may have padding characters for length preservation
Expand Down Expand Up @@ -219,7 +219,7 @@ mod tests {
let mut output = Vec::new();
pipeline
.process(Cursor::new(html.as_bytes()), &mut output)
.unwrap();
.expect("pipeline should process HTML");

let final_html = String::from_utf8_lossy(&output);

Expand Down Expand Up @@ -267,7 +267,7 @@ mod tests {
let mut output = Vec::new();
pipeline
.process(Cursor::new(html.as_bytes()), &mut output)
.unwrap();
.expect("pipeline should process HTML");

let final_html = String::from_utf8_lossy(&output);

Expand Down Expand Up @@ -318,7 +318,7 @@ mod tests {
let mut output = Vec::new();
pipeline
.process(Cursor::new(html.as_bytes()), &mut output)
.unwrap();
.expect("pipeline should process HTML");

let final_html = String::from_utf8_lossy(&output);

Expand Down Expand Up @@ -384,7 +384,7 @@ mod tests {
let mut output = Vec::new();
pipeline
.process(Cursor::new(html.as_bytes()), &mut output)
.unwrap();
.expect("pipeline should process HTML");
let final_html = String::from_utf8_lossy(&output);

// RSC payloads should be rewritten via post-processing
Expand Down Expand Up @@ -457,7 +457,7 @@ mod tests {
let mut output = Vec::new();
pipeline
.process(Cursor::new(html.as_bytes()), &mut output)
.unwrap();
.expect("pipeline should process HTML");
let final_html = String::from_utf8_lossy(&output);

// Non-RSC scripts should be preserved
Expand Down
12 changes: 9 additions & 3 deletions crates/common/src/integrations/prebid.rs
Original file line number Diff line number Diff line change
Expand Up @@ -952,7 +952,7 @@ mod tests {
for url_field in ["nurl", "burl"] {
let value = response["seatbid"][0]["bid"][0][url_field]
.as_str()
.unwrap();
.expect("should get tracking URL");
assert!(
value.contains("/ad-proxy/track/"),
"tracking URLs should be proxied"
Expand All @@ -969,11 +969,17 @@ mod tests {
"proxy prefix should be applied"
);

let encoded = rewritten.split("/ad-proxy/track/").nth(1).unwrap();
let encoded = rewritten
.split("/ad-proxy/track/")
.nth(1)
.expect("should have encoded payload after proxy prefix");
let decoded = BASE64
.decode(encoded.as_bytes())
.expect("should decode base64 proxy payload");
assert_eq!(String::from_utf8(decoded).unwrap(), url);
assert_eq!(
String::from_utf8(decoded).expect("should be valid UTF-8"),
url
);
}

#[test]
Expand Down
Loading