diff --git a/crates/common/src/auction/orchestrator.rs b/crates/common/src/auction/orchestrator.rs index c53cc9b9..54aac10d 100644 --- a/crates/common/src/auction/orchestrator.rs +++ b/crates/common/src/auction/orchestrator.rs @@ -724,7 +724,11 @@ mod tests { "Slot-1 should still be present" ); assert!( - filtered.get("slot-1").unwrap().price.is_none(), + filtered + .get("slot-1") + .expect("slot-1 should be present") + .price + .is_none(), "Price should still be None (not decoded yet)" ); } diff --git a/crates/common/src/auth.rs b/crates/common/src/auth.rs index a85abf37..ba669e6a 100644 --- a/crates/common/src/auth.rs +++ b/crates/common/src/auth.rs @@ -82,7 +82,9 @@ mod tests { let response = enforce_basic_auth(&settings, &req).expect("should challenge"); assert_eq!(response.get_status(), StatusCode::UNAUTHORIZED); - let realm = response.get_header(header::WWW_AUTHENTICATE).unwrap(); + let realm = response + .get_header(header::WWW_AUTHENTICATE) + .expect("should have WWW-Authenticate header"); assert_eq!(realm, BASIC_AUTH_REALM); } diff --git a/crates/common/src/backend.rs b/crates/common/src/backend.rs index 798131f4..6d33088e 100644 --- a/crates/common/src/backend.rs +++ b/crates/common/src/backend.rs @@ -108,13 +108,15 @@ mod tests { #[test] fn returns_name_for_https_no_port() { - let name = ensure_origin_backend("https", "origin.example.com", None).unwrap(); + let name = ensure_origin_backend("https", "origin.example.com", None) + .expect("should create backend for https without port"); assert_eq!(name, "backend_https_origin_example_com_443"); } #[test] fn returns_name_for_http_with_port_and_sanitizes() { - let name = ensure_origin_backend("http", "api.test-site.org", Some(8080)).unwrap(); + let name = ensure_origin_backend("http", "api.test-site.org", Some(8080)) + .expect("should create backend for http with custom port"); assert_eq!(name, "backend_http_api_test-site_org_8080"); // Explicitly check that ':' was replaced with '_' assert!(name.ends_with("_8080")); @@ -122,21 +124,24 @@ mod tests { #[test] fn returns_name_for_http_without_port_defaults_to_80() { - let name = ensure_origin_backend("http", "example.org", None).unwrap(); + let name = ensure_origin_backend("http", "example.org", None) + .expect("should create backend for http defaulting to port 80"); assert_eq!(name, "backend_http_example_org_80"); } #[test] fn error_on_missing_host() { - let err = ensure_origin_backend("https", "", None).err().unwrap(); + let err = ensure_origin_backend("https", "", None).expect_err("should error on empty host"); let msg = err.to_string(); assert!(msg.contains("missing host")); } #[test] fn second_call_reuses_existing_backend() { - let first = ensure_origin_backend("https", "reuse.example.com", None).unwrap(); - let second = ensure_origin_backend("https", "reuse.example.com", None).unwrap(); + let first = ensure_origin_backend("https", "reuse.example.com", None) + .expect("should create backend first time"); + let second = ensure_origin_backend("https", "reuse.example.com", None) + .expect("should reuse existing backend"); assert_eq!(first, second); } } diff --git a/crates/common/src/cookies.rs b/crates/common/src/cookies.rs index 7511ed2b..fbcce2be 100644 --- a/crates/common/src/cookies.rs +++ b/crates/common/src/cookies.rs @@ -83,8 +83,8 @@ mod tests { let jar = parse_cookies_to_jar(header_value); assert!(jar.iter().count() == 2); - assert_eq!(jar.get("c1").unwrap().value(), "v1"); - assert_eq!(jar.get("c2").unwrap().value(), "v2"); + assert_eq!(jar.get("c1").expect("should have cookie c1").value(), "v1"); + assert_eq!(jar.get("c2").expect("should have cookie c2").value(), "v2"); } #[test] @@ -93,7 +93,7 @@ mod tests { let jar = parse_cookies_to_jar(cookie_str); assert!(jar.iter().count() == 1); - assert_eq!(jar.get("c1").unwrap().value(), "v2"); + assert_eq!(jar.get("c1").expect("should have cookie c1").value(), "v2"); } #[test] @@ -120,8 +120,8 @@ mod tests { .expect("should have cookie jar"); assert!(jar.iter().count() == 2); - assert_eq!(jar.get("c1").unwrap().value(), "v1"); - assert_eq!(jar.get("c2").unwrap().value(), "v2"); + assert_eq!(jar.get("c1").expect("should have cookie c1").value(), "v1"); + assert_eq!(jar.get("c2").expect("should have cookie c2").value(), "v2"); } #[test] diff --git a/crates/common/src/html_processor.rs b/crates/common/src/html_processor.rs index e3d827fb..f4909aa7 100644 --- a/crates/common/src/html_processor.rs +++ b/crates/common/src/html_processor.rs @@ -521,8 +521,8 @@ mod tests { let mut output = Vec::new(); pipeline .process(Cursor::new(html.as_bytes()), &mut output) - .unwrap(); - let processed = String::from_utf8(output).unwrap(); + .expect("pipeline should process HTML"); + let processed = String::from_utf8(output).expect("output should be valid UTF-8"); assert!(processed.contains("keep-me")); assert!(!processed.contains("remove-me")); @@ -554,9 +554,9 @@ mod tests { let mut output = Vec::new(); pipeline .process(Cursor::new(html.as_bytes()), &mut output) - .unwrap(); + .expect("pipeline should process HTML"); - let result = String::from_utf8(output).unwrap(); + let result = String::from_utf8(output).expect("output should be valid UTF-8"); assert!(result.contains(r#"href="https://test.example.com/page""#)); assert!(result.contains(r#"href="//test.example.com/proto""#)); assert!(result.contains(r#"href="test.example.com/bare""#)); @@ -615,8 +615,8 @@ mod tests { let mut output = Vec::new(); pipeline .process(Cursor::new(html.as_bytes()), &mut output) - .unwrap(); - let result = String::from_utf8(output).unwrap(); + .expect("pipeline should process HTML"); + let result = String::from_utf8(output).expect("output should be valid UTF-8"); // Assertions - only URL attribute replacements are expected // Check URL replacements (not all occurrences will be replaced since @@ -717,8 +717,10 @@ mod tests { // Compress let mut encoder = GzEncoder::new(Vec::new(), GzCompression::default()); - encoder.write_all(html.as_bytes()).unwrap(); - let compressed_input = encoder.finish().unwrap(); + encoder + .write_all(html.as_bytes()) + .expect("should write to gzip encoder"); + let compressed_input = encoder.finish().expect("should finish gzip encoding"); println!("Compressed input size: {} bytes", compressed_input.len()); @@ -738,7 +740,7 @@ mod tests { let mut compressed_output = Vec::new(); pipeline .process(Cursor::new(&compressed_input), &mut compressed_output) - .unwrap(); + .expect("pipeline should process gzipped HTML"); // Ensure we produced output assert!( @@ -749,7 +751,9 @@ mod tests { // Decompress and verify let mut decoder = GzDecoder::new(&compressed_output[..]); let mut decompressed = String::new(); - decoder.read_to_string(&mut decompressed).unwrap(); + decoder + .read_to_string(&mut decompressed) + .expect("should decompress gzip output"); let remaining_urls = decompressed.matches("www.test-publisher.com").count(); let replaced_urls = decompressed diff --git a/crates/common/src/http_util.rs b/crates/common/src/http_util.rs index 30f0e438..956047aa 100644 --- a/crates/common/src/http_util.rs +++ b/crates/common/src/http_util.rs @@ -277,12 +277,19 @@ pub fn verify_clear_url_signature(settings: &Settings, clear_url: &str, token: & /// 2) Base64-decode the `x1||nonce||ciphertext+tag` bytes /// 3) Compute SHA-256 over those bytes /// 4) Return Base64 URL-safe (no padding) digest as `tstoken` +/// +/// # Panics +/// +/// This function will not panic under normal circumstances. The internal base64 decode +/// cannot fail because it operates on data that was just encoded by `encode_url`. #[must_use] pub fn compute_encrypted_sha256_token(settings: &Settings, full_url: &str) -> String { // Encrypt deterministically using existing helper let enc = encode_url(settings, full_url); // Decode to raw bytes (x1 + nonce + ciphertext+tag) - let raw = URL_SAFE_NO_PAD.decode(enc.as_bytes()).unwrap_or_default(); + let raw = URL_SAFE_NO_PAD + .decode(enc.as_bytes()) + .expect("decode must succeed for just-encoded data"); let digest = Sha256::digest(&raw); URL_SAFE_NO_PAD.encode(digest) } diff --git a/crates/common/src/integrations/adserver_mock.rs b/crates/common/src/integrations/adserver_mock.rs index 253a7a88..b84625e5 100644 --- a/crates/common/src/integrations/adserver_mock.rs +++ b/crates/common/src/integrations/adserver_mock.rs @@ -470,20 +470,26 @@ mod tests { auction_request.context.insert( "provider_responses".to_string(), - serde_json::to_value(&bidder_responses).unwrap(), + serde_json::to_value(&bidder_responses).expect("should serialize bidder responses"), ); let mediation_req = provider .build_mediation_request(&auction_request, &bidder_responses) - .unwrap(); + .expect("should build mediation request"); // Verify structure assert_eq!(mediation_req["id"], "test-auction-123"); - assert_eq!(mediation_req["imp"].as_array().unwrap().len(), 1); + assert_eq!( + mediation_req["imp"] + .as_array() + .expect("imp should be array") + .len(), + 1 + ); assert_eq!( mediation_req["ext"]["bidder_responses"] .as_array() - .unwrap() + .expect("bidder_responses should be array") .len(), 2 ); diff --git a/crates/common/src/integrations/didomi.rs b/crates/common/src/integrations/didomi.rs index 457a39bf..b279f7e3 100644 --- a/crates/common/src/integrations/didomi.rs +++ b/crates/common/src/integrations/didomi.rs @@ -261,7 +261,7 @@ mod tests { let integration = DidomiIntegration::new(Arc::new(config(true))); let url = integration .build_target_url("https://sdk.privacy-center.org", "/loader.js", Some("v=1")) - .unwrap(); + .expect("should build target URL"); assert_eq!(url, "https://sdk.privacy-center.org/loader.js?v=1"); } diff --git a/crates/common/src/integrations/lockr.rs b/crates/common/src/integrations/lockr.rs index 83c10dfa..6a5dd30f 100644 --- a/crates/common/src/integrations/lockr.rs +++ b/crates/common/src/integrations/lockr.rs @@ -502,7 +502,9 @@ mod tests { fn test_api_path_extraction_with_camel_case() { // Test that we properly extract paths with correct casing let path = "/integrations/lockr/api/publisher/app/v1/identityLockr/settings"; - let extracted = path.strip_prefix("/integrations/lockr/api").unwrap(); + let extracted = path + .strip_prefix("/integrations/lockr/api") + .expect("should strip prefix"); assert_eq!(extracted, "/publisher/app/v1/identityLockr/settings"); } @@ -525,7 +527,9 @@ mod tests { ]; for (input, expected) in test_cases { - let result = input.strip_prefix("/integrations/lockr/api").unwrap(); + let result = input + .strip_prefix("/integrations/lockr/api") + .expect("should strip prefix"); assert_eq!(result, expected, "Failed for input: {}", input); } } @@ -589,7 +593,8 @@ const identityLockr = { let result = integration.rewrite_sdk_host(mock_sdk_old.as_bytes().to_vec()); assert!(result.is_ok()); - let rewritten = String::from_utf8(result.unwrap()).unwrap(); + let rewritten = String::from_utf8(result.expect("should rewrite SDK host")) + .expect("should be valid UTF-8"); // Verify the host was rewritten to the proxy endpoint assert!(rewritten.contains("'host': '/integrations/lockr/api'")); @@ -630,7 +635,8 @@ const identityLockr = { let result = integration.rewrite_sdk_host(mock_sdk_real.as_bytes().to_vec()); assert!(result.is_ok()); - let rewritten = String::from_utf8(result.unwrap()).unwrap(); + let rewritten = String::from_utf8(result.expect("should rewrite SDK host")) + .expect("should be valid UTF-8"); // Verify the host was rewritten to the proxy endpoint assert!(rewritten.contains("'host': '/integrations/lockr/api'")); @@ -688,7 +694,8 @@ const identityLockr = { let result = integration.rewrite_sdk_host(mock_sdk.as_bytes().to_vec()); assert!(result.is_ok()); - let rewritten = String::from_utf8(result.unwrap()).unwrap(); + let rewritten = String::from_utf8(result.expect("should rewrite SDK host")) + .expect("should be valid UTF-8"); // When pattern doesn't match, content should be unchanged assert!(rewritten.contains("'host': 'https://example.com'")); diff --git a/crates/common/src/integrations/nextjs/mod.rs b/crates/common/src/integrations/nextjs/mod.rs index 522dae01..549e420d 100644 --- a/crates/common/src/integrations/nextjs/mod.rs +++ b/crates/common/src/integrations/nextjs/mod.rs @@ -156,7 +156,7 @@ mod tests { let mut output = Vec::new(); pipeline .process(Cursor::new(html.as_bytes()), &mut output) - .unwrap(); + .expect("pipeline should process HTML"); let processed = String::from_utf8_lossy(&output); // Note: URLs may have padding characters for length preservation @@ -219,7 +219,7 @@ mod tests { let mut output = Vec::new(); pipeline .process(Cursor::new(html.as_bytes()), &mut output) - .unwrap(); + .expect("pipeline should process HTML"); let final_html = String::from_utf8_lossy(&output); @@ -267,7 +267,7 @@ mod tests { let mut output = Vec::new(); pipeline .process(Cursor::new(html.as_bytes()), &mut output) - .unwrap(); + .expect("pipeline should process HTML"); let final_html = String::from_utf8_lossy(&output); @@ -318,7 +318,7 @@ mod tests { let mut output = Vec::new(); pipeline .process(Cursor::new(html.as_bytes()), &mut output) - .unwrap(); + .expect("pipeline should process HTML"); let final_html = String::from_utf8_lossy(&output); @@ -384,7 +384,7 @@ mod tests { let mut output = Vec::new(); pipeline .process(Cursor::new(html.as_bytes()), &mut output) - .unwrap(); + .expect("pipeline should process HTML"); let final_html = String::from_utf8_lossy(&output); // RSC payloads should be rewritten via post-processing @@ -457,7 +457,7 @@ mod tests { let mut output = Vec::new(); pipeline .process(Cursor::new(html.as_bytes()), &mut output) - .unwrap(); + .expect("pipeline should process HTML"); let final_html = String::from_utf8_lossy(&output); // Non-RSC scripts should be preserved diff --git a/crates/common/src/integrations/prebid.rs b/crates/common/src/integrations/prebid.rs index 8432fdec..2a5ddba6 100644 --- a/crates/common/src/integrations/prebid.rs +++ b/crates/common/src/integrations/prebid.rs @@ -952,7 +952,7 @@ mod tests { for url_field in ["nurl", "burl"] { let value = response["seatbid"][0]["bid"][0][url_field] .as_str() - .unwrap(); + .expect("should get tracking URL"); assert!( value.contains("/ad-proxy/track/"), "tracking URLs should be proxied" @@ -969,11 +969,17 @@ mod tests { "proxy prefix should be applied" ); - let encoded = rewritten.split("/ad-proxy/track/").nth(1).unwrap(); + let encoded = rewritten + .split("/ad-proxy/track/") + .nth(1) + .expect("should have encoded payload after proxy prefix"); let decoded = BASE64 .decode(encoded.as_bytes()) .expect("should decode base64 proxy payload"); - assert_eq!(String::from_utf8(decoded).unwrap(), url); + assert_eq!( + String::from_utf8(decoded).expect("should be valid UTF-8"), + url + ); } #[test] diff --git a/crates/common/src/models.rs b/crates/common/src/models.rs index 7789b166..08a32cec 100644 --- a/crates/common/src/models.rs +++ b/crates/common/src/models.rs @@ -61,7 +61,8 @@ mod tests { "url": "https://example.com/track/impression" }); - let callback: Callback = serde_json::from_value(json_data).unwrap(); + let callback: Callback = + serde_json::from_value(json_data).expect("should deserialize callback"); assert_eq!(callback.callback_type, "impression"); assert_eq!(callback.url, "https://example.com/track/impression"); } @@ -74,7 +75,8 @@ mod tests { "url": "https://example.com/track/click" }"#; - let callback: Callback = serde_json::from_str(json_str).unwrap(); + let callback: Callback = + serde_json::from_str(json_str).expect("should deserialize callback from str"); assert_eq!(callback.callback_type, "click"); assert_eq!(callback.url, "https://example.com/track/click"); } @@ -107,7 +109,8 @@ mod tests { ] }); - let ad_response: AdResponse = serde_json::from_value(json_data).unwrap(); + let ad_response: AdResponse = + serde_json::from_value(json_data).expect("should deserialize ad response"); assert_eq!(ad_response.network_id, "12345"); assert_eq!(ad_response.site_id, "67890"); @@ -147,7 +150,8 @@ mod tests { "callbacks": [] }); - let ad_response: AdResponse = serde_json::from_value(json_data).unwrap(); + let ad_response: AdResponse = serde_json::from_value(json_data) + .expect("should deserialize ad response with empty callbacks"); assert_eq!(ad_response.callbacks.len(), 0); } @@ -187,7 +191,8 @@ mod tests { "callbacks": [] }"#; - let ad_response: AdResponse = serde_json::from_str(json_str).unwrap(); + let ad_response: AdResponse = + serde_json::from_str(json_str).expect("should deserialize ad response from str"); assert_eq!(ad_response.network_id, "net123"); assert_eq!(ad_response.site_id, "site456"); assert_eq!(ad_response.page_id, "page789"); @@ -215,7 +220,8 @@ mod tests { "another": 123 }); - let callback: Callback = serde_json::from_value(json_data).unwrap(); + let callback: Callback = serde_json::from_value(json_data) + .expect("should deserialize callback with extra fields"); assert_eq!(callback.callback_type, "conversion"); assert_eq!(callback.url, "https://example.com/track/conversion"); } @@ -281,7 +287,8 @@ mod tests { "url": format!("https://example.com/track/{}", cb_type) }); - let callback: Callback = serde_json::from_value(json_data).unwrap(); + let callback: Callback = + serde_json::from_value(json_data).expect("should deserialize callback type"); assert_eq!(callback.callback_type, cb_type); assert_eq!( callback.url, diff --git a/crates/common/src/proxy.rs b/crates/common/src/proxy.rs index 6ae49856..baa91697 100644 --- a/crates/common/src/proxy.rs +++ b/crates/common/src/proxy.rs @@ -1288,8 +1288,8 @@ mod tests { let loc = resp .get_header(header::LOCATION) .and_then(|h| h.to_str().ok()) - .unwrap(); - let parsed = url::Url::parse(loc).expect("should parse location"); + .expect("Location header should be present and valid"); + let parsed = url::Url::parse(loc).expect("Location should be a valid URL"); let mut pairs: std::collections::HashMap = parsed .query_pairs() .map(|(k, v)| (k.into_owned(), v.into_owned())) @@ -1316,7 +1316,7 @@ mod tests { Method::POST, "https://edge.example/first-party/proxy-rebuild", ); - req.set_body(serde_json::to_string(&body).unwrap()); + req.set_body(serde_json::to_string(&body).expect("test JSON should serialize")); let mut resp = handle_first_party_proxy_rebuild(&settings, req) .await .expect("rebuild ok"); @@ -1435,37 +1435,46 @@ mod tests { copy_proxy_forward_headers(&src, &mut dst); assert_eq!( - dst.get_header(HEADER_USER_AGENT).unwrap().to_str().unwrap(), + dst.get_header(HEADER_USER_AGENT) + .expect("User-Agent header should be copied") + .to_str() + .expect("User-Agent should be valid UTF-8"), "UA/1.0" ); assert_eq!( - dst.get_header(HEADER_ACCEPT).unwrap().to_str().unwrap(), + dst.get_header(HEADER_ACCEPT) + .expect("Accept header should be copied") + .to_str() + .expect("Accept should be valid UTF-8"), "image/*" ); assert_eq!( dst.get_header(HEADER_ACCEPT_LANGUAGE) - .unwrap() + .expect("Accept-Language header should be copied") .to_str() - .unwrap(), + .expect("Accept-Language should be valid UTF-8"), "en-US" ); // Accept-Encoding is overridden to only include supported encodings assert_eq!( dst.get_header(HEADER_ACCEPT_ENCODING) - .unwrap() + .expect("Accept-Encoding header should be set") .to_str() - .unwrap(), + .expect("Accept-Encoding should be valid UTF-8"), SUPPORTED_ENCODINGS ); assert_eq!( - dst.get_header(HEADER_REFERER).unwrap().to_str().unwrap(), + dst.get_header(HEADER_REFERER) + .expect("Referer header should be copied") + .to_str() + .expect("Referer should be valid UTF-8"), "https://pub.example/page" ); assert_eq!( dst.get_header(HEADER_X_FORWARDED_FOR) - .unwrap() + .expect("X-Forwarded-For header should be copied") .to_str() - .unwrap(), + .expect("X-Forwarded-For should be valid UTF-8"), "203.0.113.1" ); } @@ -1517,9 +1526,9 @@ mod tests { .expect("finalize should succeed"); let ct = out .get_header(header::CONTENT_TYPE) - .unwrap() + .expect("Content-Type header should be present") .to_str() - .unwrap(); + .expect("Content-Type should be valid UTF-8"); assert_eq!(ct, "text/html; charset=utf-8"); let cc = out .get_header(header::CACHE_CONTROL) @@ -1547,9 +1556,9 @@ mod tests { assert!(body.contains("/first-party/proxy?tsurl="), "{}", body); let ct = out .get_header(header::CONTENT_TYPE) - .unwrap() + .expect("Content-Type header should be present") .to_str() - .unwrap(); + .expect("Content-Type should be valid UTF-8"); assert_eq!(ct, "text/css; charset=utf-8"); } @@ -1564,9 +1573,9 @@ mod tests { // Since CT was missing and Accept indicates image, it should set generic image/* let ct = out .get_header(header::CONTENT_TYPE) - .unwrap() + .expect("Content-Type header should be present") .to_str() - .unwrap(); + .expect("Content-Type should be valid UTF-8"); assert_eq!(ct, "image/*"); } @@ -1583,9 +1592,9 @@ mod tests { assert_eq!(out.get_status(), StatusCode::ACCEPTED); let ct = out .get_header(header::CONTENT_TYPE) - .unwrap() + .expect("Content-Type header should be present") .to_str() - .unwrap(); + .expect("Content-Type should be valid UTF-8"); assert_eq!(ct, "application/json"); let body = out.take_body_str(); assert_eq!(body, "{\"ok\":true}"); @@ -1603,8 +1612,10 @@ mod tests { // Gzip compress the HTML let mut encoder = GzEncoder::new(Vec::new(), Compression::default()); - encoder.write_all(html.as_bytes()).unwrap(); - let compressed = encoder.finish().unwrap(); + encoder + .write_all(html.as_bytes()) + .expect("gzip write should succeed"); + let compressed = encoder.finish().expect("gzip finish should succeed"); let beresp = Response::from_status(StatusCode::OK) .with_header(header::CONTENT_TYPE, "text/html; charset=utf-8") @@ -1620,14 +1631,14 @@ mod tests { .get_header(header::CONTENT_ENCODING) .expect("Content-Encoding should be preserved") .to_str() - .unwrap(); + .expect("Content-Encoding should be valid UTF-8"); assert_eq!(ce, "gzip"); let ct = out .get_header(header::CONTENT_TYPE) - .unwrap() + .expect("Content-Type header should be present") .to_str() - .unwrap(); + .expect("Content-Type should be valid UTF-8"); assert_eq!(ct, "text/html; charset=utf-8"); // Decompress output to verify content was rewritten @@ -1658,7 +1669,9 @@ mod tests { let mut compressed = Vec::new(); { let mut encoder = CompressorWriter::new(&mut compressed, 4096, 4, 22); - encoder.write_all(css.as_bytes()).unwrap(); + encoder + .write_all(css.as_bytes()) + .expect("brotli write should succeed"); } let beresp = Response::from_status(StatusCode::OK) @@ -1675,14 +1688,14 @@ mod tests { .get_header(header::CONTENT_ENCODING) .expect("Content-Encoding should be preserved") .to_str() - .unwrap(); + .expect("Content-Encoding should be valid UTF-8"); assert_eq!(ce, "br"); let ct = out .get_header(header::CONTENT_TYPE) - .unwrap() + .expect("Content-Type header should be present") .to_str() - .unwrap(); + .expect("Content-Type should be valid UTF-8"); assert_eq!(ct, "text/css; charset=utf-8"); // Decompress output to verify content was rewritten @@ -1721,9 +1734,9 @@ mod tests { let ct = out .get_header(header::CONTENT_TYPE) - .unwrap() + .expect("Content-Type header should be present") .to_str() - .unwrap(); + .expect("Content-Type should be valid UTF-8"); assert_eq!(ct, "text/html; charset=utf-8"); let body = out.take_body_str(); diff --git a/crates/common/src/request_signing/discovery.rs b/crates/common/src/request_signing/discovery.rs index 964027e3..4dffa339 100644 --- a/crates/common/src/request_signing/discovery.rs +++ b/crates/common/src/request_signing/discovery.rs @@ -58,10 +58,12 @@ mod tests { }); let discovery = TrustedServerDiscovery::new(jwks); - let serialized = serde_json::to_string(&discovery).unwrap(); + let serialized = + serde_json::to_string(&discovery).expect("should serialize discovery document"); // Verify it's valid JSON - let parsed: serde_json::Value = serde_json::from_str(&serialized).unwrap(); + let parsed: serde_json::Value = + serde_json::from_str(&serialized).expect("should parse serialized JSON"); assert_eq!(parsed["version"], "1.0"); assert!(parsed.get("jwks").is_some()); @@ -80,8 +82,10 @@ mod tests { }); let discovery = TrustedServerDiscovery::new(jwks); - let serialized = serde_json::to_string(&discovery).unwrap(); - let parsed: serde_json::Value = serde_json::from_str(&serialized).unwrap(); + let serialized = + serde_json::to_string(&discovery).expect("should serialize discovery document"); + let parsed: serde_json::Value = + serde_json::from_str(&serialized).expect("should parse serialized JSON"); assert!(parsed["jwks"]["keys"].is_array()); assert_eq!(parsed["jwks"]["keys"][0]["kid"], "test-key"); diff --git a/crates/common/src/request_signing/endpoints.rs b/crates/common/src/request_signing/endpoints.rs index 2c3d5f4c..762b2692 100644 --- a/crates/common/src/request_signing/endpoints.rs +++ b/crates/common/src/request_signing/endpoints.rs @@ -345,8 +345,11 @@ mod tests { // First, create a valid signature let payload = "test message"; - let signer = crate::request_signing::RequestSigner::from_config().unwrap(); - let signature = signer.sign(payload.as_bytes()).unwrap(); + let signer = crate::request_signing::RequestSigner::from_config() + .expect("should create signer from config"); + let signature = signer + .sign(payload.as_bytes()) + .expect("should sign payload"); // Create verification request let verify_req = VerifySignatureRequest { @@ -355,17 +358,19 @@ mod tests { kid: signer.kid.clone(), }; - let body = serde_json::to_string(&verify_req).unwrap(); + let body = serde_json::to_string(&verify_req).expect("should serialize verify request"); let mut req = Request::new(Method::POST, "https://test.com/verify-signature"); req.set_body(body); // Handle the request - let mut resp = handle_verify_signature(&settings, req).unwrap(); + let mut resp = + handle_verify_signature(&settings, req).expect("should handle verification request"); assert_eq!(resp.get_status(), StatusCode::OK); // Parse response let resp_body = resp.take_body_str(); - let verify_resp: VerifySignatureResponse = serde_json::from_str(&resp_body).unwrap(); + let verify_resp: VerifySignatureResponse = + serde_json::from_str(&resp_body).expect("should deserialize verify response"); assert!(verify_resp.verified, "Signature should be verified"); assert_eq!(verify_resp.kid, signer.kid); @@ -375,10 +380,13 @@ mod tests { #[test] fn test_handle_verify_signature_invalid() { let settings = crate::test_support::tests::create_test_settings(); - let signer = crate::request_signing::RequestSigner::from_config().unwrap(); + let signer = crate::request_signing::RequestSigner::from_config() + .expect("should create signer from config"); // Create a signature for a different payload - let wrong_signature = signer.sign(b"different payload").unwrap(); + let wrong_signature = signer + .sign(b"different payload") + .expect("should sign different payload"); // Create request with signature that does not match the payload let verify_req = VerifySignatureRequest { @@ -387,17 +395,19 @@ mod tests { kid: signer.kid.clone(), }; - let body = serde_json::to_string(&verify_req).unwrap(); + let body = serde_json::to_string(&verify_req).expect("should serialize verify request"); let mut req = Request::new(Method::POST, "https://test.com/verify-signature"); req.set_body(body); // Handle the request - let mut resp = handle_verify_signature(&settings, req).unwrap(); + let mut resp = + handle_verify_signature(&settings, req).expect("should handle verification request"); assert_eq!(resp.get_status(), StatusCode::OK); // Parse response let resp_body = resp.take_body_str(); - let verify_resp: VerifySignatureResponse = serde_json::from_str(&resp_body).unwrap(); + let verify_resp: VerifySignatureResponse = + serde_json::from_str(&resp_body).expect("should deserialize verify response"); assert!(!verify_resp.verified, "Invalid signature should not verify"); assert_eq!(verify_resp.kid, signer.kid); @@ -425,7 +435,8 @@ mod tests { match result { Ok(mut resp) => { let body = resp.take_body_str(); - let response: RotateKeyResponse = serde_json::from_str(&body).unwrap(); + let response: RotateKeyResponse = + serde_json::from_str(&body).expect("should deserialize rotate response"); println!( "Rotation response: success={}, message={}", response.success, response.message @@ -443,7 +454,7 @@ mod tests { kid: Some("test-custom-key".to_string()), }; - let body_json = serde_json::to_string(&req_body).unwrap(); + let body_json = serde_json::to_string(&req_body).expect("should serialize rotate request"); let mut req = Request::new(Method::POST, "https://test.com/admin/keys/rotate"); req.set_body(body_json); @@ -451,7 +462,8 @@ mod tests { match result { Ok(mut resp) => { let body = resp.take_body_str(); - let response: RotateKeyResponse = serde_json::from_str(&body).unwrap(); + let response: RotateKeyResponse = + serde_json::from_str(&body).expect("should deserialize rotate response"); println!( "Custom KID rotation: success={}, new_kid={}", response.success, response.new_kid @@ -480,7 +492,8 @@ mod tests { delete: false, }; - let body_json = serde_json::to_string(&req_body).unwrap(); + let body_json = + serde_json::to_string(&req_body).expect("should serialize deactivate request"); let mut req = Request::new(Method::POST, "https://test.com/admin/keys/deactivate"); req.set_body(body_json); @@ -488,7 +501,8 @@ mod tests { match result { Ok(mut resp) => { let body = resp.take_body_str(); - let response: DeactivateKeyResponse = serde_json::from_str(&body).unwrap(); + let response: DeactivateKeyResponse = + serde_json::from_str(&body).expect("should deserialize deactivate response"); println!( "Deactivate response: success={}, message={}", response.success, response.message @@ -507,7 +521,8 @@ mod tests { delete: true, }; - let body_json = serde_json::to_string(&req_body).unwrap(); + let body_json = + serde_json::to_string(&req_body).expect("should serialize deactivate request"); let mut req = Request::new(Method::POST, "https://test.com/admin/keys/deactivate"); req.set_body(body_json); @@ -515,7 +530,8 @@ mod tests { match result { Ok(mut resp) => { let body = resp.take_body_str(); - let response: DeactivateKeyResponse = serde_json::from_str(&body).unwrap(); + let response: DeactivateKeyResponse = + serde_json::from_str(&body).expect("should deserialize deactivate response"); println!( "Delete response: success={}, deleted={}", response.success, response.deleted @@ -538,14 +554,16 @@ mod tests { #[test] fn test_rotate_key_request_deserialization() { let json = r#"{"kid":"custom-key"}"#; - let req: RotateKeyRequest = serde_json::from_str(json).unwrap(); + let req: RotateKeyRequest = + serde_json::from_str(json).expect("should deserialize rotate key request"); assert_eq!(req.kid, Some("custom-key".to_string())); } #[test] fn test_deactivate_key_request_deserialization() { let json = r#"{"kid":"old-key","delete":true}"#; - let req: DeactivateKeyRequest = serde_json::from_str(json).unwrap(); + let req: DeactivateKeyRequest = + serde_json::from_str(json).expect("should deserialize deactivate key request"); assert_eq!(req.kid, "old-key"); assert!(req.delete); } @@ -565,7 +583,8 @@ mod tests { let body = resp.take_body_str(); // Parse the discovery document - let discovery: serde_json::Value = serde_json::from_str(&body).unwrap(); + let discovery: serde_json::Value = + serde_json::from_str(&body).expect("should parse discovery document"); // Verify structure - only version and jwks assert_eq!(discovery["version"], "1.0"); diff --git a/crates/common/src/request_signing/signing.rs b/crates/common/src/request_signing/signing.rs index 6013ff05..1961c780 100644 --- a/crates/common/src/request_signing/signing.rs +++ b/crates/common/src/request_signing/signing.rs @@ -161,57 +161,64 @@ mod tests { fn test_request_signer_sign() { // Report unwraps print full error chain on test failure // Note: unwrapping a Report prints it nicely if test fails. - let signer = RequestSigner::from_config().unwrap(); + let signer = RequestSigner::from_config().expect("should create signer from config"); let signature = signer .sign(b"these pretzels are making me thirsty") - .unwrap(); + .expect("should sign payload"); assert!(!signature.is_empty()); assert!(signature.len() > 32); } #[test] fn test_request_signer_from_config() { - let signer = RequestSigner::from_config().unwrap(); + let signer = RequestSigner::from_config().expect("should create signer from config"); assert!(!signer.kid.is_empty()); } #[test] fn test_sign_and_verify() { let payload = b"test payload for verification"; - let signer = RequestSigner::from_config().unwrap(); - let signature = signer.sign(payload).unwrap(); + let signer = RequestSigner::from_config().expect("should create signer from config"); + let signature = signer.sign(payload).expect("should sign payload"); - let result = verify_signature(payload, &signature, &signer.kid).unwrap(); + let result = + verify_signature(payload, &signature, &signer.kid).expect("should verify signature"); assert!(result, "Signature should be valid"); } #[test] fn test_verify_invalid_signature() { let payload = b"test payload"; - let signer = RequestSigner::from_config().unwrap(); + let signer = RequestSigner::from_config().expect("should create signer from config"); - let wrong_signature = signer.sign(b"different payload").unwrap(); + let wrong_signature = signer + .sign(b"different payload") + .expect("should sign different payload"); - let result = verify_signature(payload, &wrong_signature, &signer.kid).unwrap(); + let result = verify_signature(payload, &wrong_signature, &signer.kid) + .expect("should attempt verification"); assert!(!result, "Invalid signature should not verify"); } #[test] fn test_verify_wrong_payload() { let original_payload = b"original payload"; - let signer = RequestSigner::from_config().unwrap(); - let signature = signer.sign(original_payload).unwrap(); + let signer = RequestSigner::from_config().expect("should create signer from config"); + let signature = signer + .sign(original_payload) + .expect("should sign original payload"); let wrong_payload = b"wrong payload"; - let result = verify_signature(wrong_payload, &signature, &signer.kid).unwrap(); + let result = verify_signature(wrong_payload, &signature, &signer.kid) + .expect("should attempt verification"); assert!(!result, "Signature should not verify with wrong payload"); } #[test] fn test_verify_missing_key() { let payload = b"test payload"; - let signer = RequestSigner::from_config().unwrap(); - let signature = signer.sign(payload).unwrap(); + let signer = RequestSigner::from_config().expect("should create signer from config"); + let signature = signer.sign(payload).expect("should sign payload"); let nonexistent_kid = "nonexistent-key-id"; let result = verify_signature(payload, &signature, nonexistent_kid); @@ -221,7 +228,7 @@ mod tests { #[test] fn test_verify_malformed_signature() { let payload = b"test payload"; - let signer = RequestSigner::from_config().unwrap(); + let signer = RequestSigner::from_config().expect("should create signer from config"); let malformed_signature = "not-valid-base64!!!"; let result = verify_signature(payload, malformed_signature, &signer.kid); diff --git a/crates/common/src/settings.rs b/crates/common/src/settings.rs index 84bbd9a3..a8510db0 100644 --- a/crates/common/src/settings.rs +++ b/crates/common/src/settings.rs @@ -465,7 +465,7 @@ mod tests { let settings = Settings::new(); assert!(settings.is_ok(), "Settings should load from embedded TOML"); - let settings = settings.unwrap(); + let settings = settings.expect("should load settings from embedded TOML"); assert!(!settings.publisher.domain.is_empty()); assert!(!settings.publisher.cookie_domain.is_empty()); @@ -578,7 +578,7 @@ mod tests { #[test] fn test_settings_missing_required_fields() { - let re = Regex::new(r"origin_url = .*").unwrap(); + let re = Regex::new(r"origin_url = .*").expect("regex should compile"); let toml_str = crate_test_settings_str(); let toml_str = re.replace(&toml_str, ""); @@ -599,7 +599,7 @@ mod tests { #[test] fn test_settings_invalid_toml_syntax() { - let re = Regex::new(r"\]").unwrap(); + let re = Regex::new(r"\]").expect("regex should compile"); let toml_str = crate_test_settings_str(); let toml_str = re.replace(&toml_str, ""); @@ -609,7 +609,7 @@ mod tests { #[test] fn test_settings_partial_config() { - let re = Regex::new(r"\[publisher\]").unwrap(); + let re = Regex::new(r"\[publisher\]").expect("regex should compile"); let toml_str = crate_test_settings_str(); let toml_str = re.replace(&toml_str, ""); @@ -788,7 +788,7 @@ mod tests { assert!(settings.is_ok(), "Settings should load from embedded TOML"); assert_eq!( - settings.unwrap().publisher.origin_url, + settings.expect("should load settings").publisher.origin_url, "https://change-publisher.com" ); }, @@ -812,7 +812,7 @@ mod tests { assert!(settings.is_ok(), "Settings should load from embedded TOML"); assert_eq!( - settings.unwrap().publisher.origin_url, + settings.expect("should load settings").publisher.origin_url, "https://change-publisher.com" ); }, diff --git a/crates/common/src/settings_data.rs b/crates/common/src/settings_data.rs index 6596f92a..01967add 100644 --- a/crates/common/src/settings_data.rs +++ b/crates/common/src/settings_data.rs @@ -45,7 +45,7 @@ mod tests { let settings = get_settings(); assert!(settings.is_ok(), "Settings should load from embedded TOML"); - let settings = settings.unwrap(); + let settings = settings.expect("should load settings from embedded TOML"); // Verify basic structure is loaded assert!(!settings.publisher.domain.is_empty()); assert!(!settings.publisher.cookie_domain.is_empty()); diff --git a/crates/common/src/streaming_processor.rs b/crates/common/src/streaming_processor.rs index 4a31b70f..8f9a809a 100644 --- a/crates/common/src/streaming_processor.rs +++ b/crates/common/src/streaming_processor.rs @@ -551,9 +551,14 @@ mod tests { let input = b"hello world"; let mut output = Vec::new(); - pipeline.process(&input[..], &mut output).unwrap(); + pipeline + .process(&input[..], &mut output) + .expect("pipeline should process uncompressed input"); - assert_eq!(String::from_utf8(output).unwrap(), "hi world"); + assert_eq!( + String::from_utf8(output).expect("output should be valid UTF-8"), + "hi world" + ); } #[test] @@ -598,22 +603,28 @@ mod tests { // Test that intermediate chunks return empty let chunk1 = b""; - let result1 = adapter.process_chunk(chunk1, false).unwrap(); + let result1 = adapter + .process_chunk(chunk1, false) + .expect("should process chunk1"); assert_eq!(result1.len(), 0, "Should return empty for non-last chunk"); let chunk2 = b"

original

"; - let result2 = adapter.process_chunk(chunk2, false).unwrap(); + let result2 = adapter + .process_chunk(chunk2, false) + .expect("should process chunk2"); assert_eq!(result2.len(), 0, "Should return empty for non-last chunk"); // Test that last chunk processes everything let chunk3 = b""; - let result3 = adapter.process_chunk(chunk3, true).unwrap(); + let result3 = adapter + .process_chunk(chunk3, true) + .expect("should process final chunk"); assert!( !result3.is_empty(), "Should return processed content for last chunk" ); - let output = String::from_utf8(result3).unwrap(); + let output = String::from_utf8(result3).expect("output should be valid UTF-8"); assert!(output.contains("replaced"), "Should have replaced content"); assert!(output.contains(""), "Should have complete HTML"); } @@ -639,16 +650,20 @@ mod tests { let mut last_chunk = chunks.next().unwrap_or(&[]); for chunk in chunks { - let result = adapter.process_chunk(last_chunk, false).unwrap(); + let result = adapter + .process_chunk(last_chunk, false) + .expect("should process intermediate chunk"); assert_eq!(result.len(), 0, "Intermediate chunks should return empty"); last_chunk = chunk; } // Process last chunk - let result = adapter.process_chunk(last_chunk, true).unwrap(); + let result = adapter + .process_chunk(last_chunk, true) + .expect("should process last chunk"); assert!(!result.is_empty(), "Last chunk should return content"); - let output = String::from_utf8(result).unwrap(); + let output = String::from_utf8(result).expect("output should be valid UTF-8"); assert!( output.contains("Paragraph 999"), "Should contain all content" @@ -663,15 +678,21 @@ mod tests { let mut adapter = HtmlRewriterAdapter::new(settings); // Process some content - adapter.process_chunk(b"", false).unwrap(); - adapter.process_chunk(b"test", false).unwrap(); + adapter + .process_chunk(b"", false) + .expect("should process html tag"); + adapter + .process_chunk(b"test", false) + .expect("should process body"); // Reset should clear accumulated input adapter.reset(); // After reset, adapter should be ready for new input - let result = adapter.process_chunk(b"

new

", true).unwrap(); - let output = String::from_utf8(result).unwrap(); + let result = adapter + .process_chunk(b"

new

", true) + .expect("should process new content after reset"); + let output = String::from_utf8(result).expect("output should be valid UTF-8"); assert_eq!( output, "

new

", "Should only contain new input after reset" @@ -701,9 +722,11 @@ mod tests { let input = b"Link"; let mut output = Vec::new(); - pipeline.process(&input[..], &mut output).unwrap(); + pipeline + .process(&input[..], &mut output) + .expect("pipeline should process HTML"); - let result = String::from_utf8(output).unwrap(); + let result = String::from_utf8(output).expect("output should be valid UTF-8"); assert!( result.contains("https://test.com"), "Should have replaced URL" diff --git a/crates/common/src/streaming_replacer.rs b/crates/common/src/streaming_replacer.rs index 1bfa370e..faf8f9a2 100644 --- a/crates/common/src/streaming_replacer.rs +++ b/crates/common/src/streaming_replacer.rs @@ -204,7 +204,7 @@ mod tests { let input = b"Visit https://origin.example.com for more info"; let processed = replacer.process_chunk(input, true); - let result = String::from_utf8(processed).unwrap(); + let result = String::from_utf8(processed).expect("output should be valid UTF-8"); assert_eq!(result, "Visit https://test.example.com for more info"); } @@ -224,7 +224,8 @@ mod tests { let processed1 = replacer.process_chunk(chunk1, false); let processed2 = replacer.process_chunk(chunk2, true); - let result = String::from_utf8([processed1, processed2].concat()).unwrap(); + let result = String::from_utf8([processed1, processed2].concat()) + .expect("output should be valid UTF-8"); assert_eq!(result, "Visit https://test.example.com for more info"); } @@ -246,7 +247,7 @@ mod tests { let input = b"Link and //origin.example.com/resource"; let processed = replacer.process_chunk(input, true); - let result = String::from_utf8(processed).unwrap(); + let result = String::from_utf8(processed).expect("output should be valid UTF-8"); assert!(result.contains("https://test.example.com")); assert!(result.contains("//test.example.com/resource")); @@ -281,7 +282,7 @@ mod tests { result.extend(processed); } - let result_str = String::from_utf8(result).unwrap(); + let result_str = String::from_utf8(result).expect("output should be valid UTF-8"); assert_eq!(result_str, "https://test.example.com"); } @@ -310,7 +311,7 @@ mod tests { "#; let processed = replacer.process_chunk(content.as_bytes(), true); - let result = String::from_utf8(processed).unwrap(); + let result = String::from_utf8(processed).expect("output should be valid UTF-8"); // Verify all patterns were replaced assert!(result.contains("https://test.example.com/page")); @@ -335,7 +336,7 @@ mod tests { let content = b"Visit https://origin.example.com:8080/api or //origin.example.com:8080/resource"; let processed = replacer.process_chunk(content, true); - let result = String::from_utf8(processed).unwrap(); + let result = String::from_utf8(processed).expect("output should be valid UTF-8"); assert_eq!( result, @@ -359,7 +360,7 @@ mod tests { "#; let processed = replacer.process_chunk(content.as_bytes(), true); - let result = String::from_utf8(processed).unwrap(); + let result = String::from_utf8(processed).expect("output should be valid UTF-8"); // When request is HTTP, all URLs should be replaced with HTTP assert!(result.contains("http://test.example.com")); @@ -384,7 +385,7 @@ mod tests { result.extend(replacer.process_chunk(chunk, is_last)); } - let result_str = String::from_utf8(result).unwrap(); + let result_str = String::from_utf8(result).expect("output should be valid UTF-8"); assert!(result_str.contains("https://test.com/test")); assert!(result_str.contains("https://test.com/more")); assert!(result_str.contains("思怙ᕏ测试")); @@ -406,7 +407,7 @@ mod tests { result.extend(replacer.process_chunk(chunk1, false)); result.extend(replacer.process_chunk(chunk2, true)); - let result_str = String::from_utf8(result).unwrap(); + let result_str = String::from_utf8(result).expect("output should be valid UTF-8"); assert!(result_str.contains("https://new.com/før/bår/test")); } @@ -420,7 +421,7 @@ mod tests { // Process the entire content at once to verify it works let all_at_once = replacer.process_chunk(content, true); - let expected = String::from_utf8(all_at_once).unwrap(); + let expected = String::from_utf8(all_at_once).expect("output should be valid UTF-8"); assert!(expected.contains("https://test.com/test")); assert!(expected.contains("https://test.com/more")); } @@ -444,7 +445,7 @@ mod tests { result.extend(replacer.process_chunk(chunk, is_last)); } - let result_str = String::from_utf8(result).unwrap(); + let result_str = String::from_utf8(result).expect("output should be valid UTF-8"); assert!(result_str.contains("https://test.com/page1")); assert!(result_str.contains("https://test.com/page2")); } @@ -466,7 +467,7 @@ mod tests { result.extend(replacer.process_chunk(chunk, is_last)); } - let result_str = String::from_utf8(result).unwrap(); + let result_str = String::from_utf8(result).expect("output should be valid UTF-8"); // Just verify the content is preserved correctly assert!(result_str.contains("思怙ᕏ测试")); assert!(result_str.contains("🎉")); @@ -490,7 +491,7 @@ mod tests { let input = b"The color is gray, not light gray."; let processed = replacer.process_chunk(input, true); - let result = String::from_utf8(processed).unwrap(); + let result = String::from_utf8(processed).expect("output should be valid UTF-8"); assert_eq!(result, "The colour is grey, not light grey."); } @@ -513,7 +514,7 @@ mod tests { let input = b"Say hello world and hello there!"; let processed = replacer.process_chunk(input, true); - let result = String::from_utf8(processed).unwrap(); + let result = String::from_utf8(processed).expect("output should be valid UTF-8"); // Note: Since we apply replacements in order, "hello world" gets replaced first assert_eq!(result, "Say greetings universe and hi there!"); @@ -537,7 +538,7 @@ mod tests { let input = b"abcdef"; let processed = replacer.process_chunk(input, true); - let result = String::from_utf8(processed).unwrap(); + let result = String::from_utf8(processed).expect("output should be valid UTF-8"); // "abc" gets replaced first, so "bcd" is no longer found assert_eq!(result, "xyzdef"); @@ -550,7 +551,7 @@ mod tests { let input = b"Keep this REMOVE_ME but not this"; let processed = replacer.process_chunk(input, true); - let result = String::from_utf8(processed).unwrap(); + let result = String::from_utf8(processed).expect("output should be valid UTF-8"); assert_eq!(result, "Keep this but not this"); } @@ -562,7 +563,7 @@ mod tests { let input = b"Hello world, hello there, HELLO!"; let processed = replacer.process_chunk(input, true); - let result = String::from_utf8(processed).unwrap(); + let result = String::from_utf8(processed).expect("output should be valid UTF-8"); assert_eq!(result, "Hi world, hello there, HELLO!"); } @@ -585,7 +586,7 @@ mod tests { let input = b"The cost: $10.99 [TAG] is final"; let processed = replacer.process_chunk(input, true); - let result = String::from_utf8(processed).unwrap(); + let result = String::from_utf8(processed).expect("output should be valid UTF-8"); assert_eq!(result, "The price: €9.99