diff --git a/intercept/apidump/apidump_test.go b/intercept/apidump/apidump_test.go index 01472963..2921f7fe 100644 --- a/intercept/apidump/apidump_test.go +++ b/intercept/apidump/apidump_test.go @@ -52,7 +52,7 @@ func TestBridgedMiddleware_RedactsSensitiveRequestHeaders(t *testing.T) { req.Header.Set("User-Agent", "test-client") // Call middleware with a mock next function - _, err = middleware(req, func(r *http.Request) (*http.Response, error) { + resp, err := middleware(req, func(r *http.Request) (*http.Response, error) { return &http.Response{ StatusCode: http.StatusOK, Status: "200 OK", @@ -62,6 +62,7 @@ func TestBridgedMiddleware_RedactsSensitiveRequestHeaders(t *testing.T) { }, nil }) require.NoError(t, err) + defer resp.Body.Close() // Read the request dump file modelDir := filepath.Join(tmpDir, "openai", "gpt-4") @@ -170,7 +171,7 @@ func TestBridgedMiddleware_PreservesRequestBody(t *testing.T) { require.NoError(t, err) var capturedBody []byte - _, err = middleware(req, func(r *http.Request) (*http.Response, error) { + resp2, err := middleware(req, func(r *http.Request) (*http.Response, error) { // Read the body in the next handler to verify it's still available capturedBody, _ = io.ReadAll(r.Body) return &http.Response{ @@ -182,6 +183,7 @@ func TestBridgedMiddleware_PreservesRequestBody(t *testing.T) { }, nil }) require.NoError(t, err) + defer resp2.Body.Close() // Verify the body was preserved for the next handler require.Equal(t, originalBody, string(capturedBody)) @@ -202,7 +204,7 @@ func TestBridgedMiddleware_ModelWithSlash(t *testing.T) { req, err := http.NewRequest(http.MethodPost, "https://api.google.com/v1/chat", bytes.NewReader([]byte(`{}`))) require.NoError(t, err) - _, err = middleware(req, func(r *http.Request) (*http.Response, error) { + resp3, err := middleware(req, func(r *http.Request) (*http.Response, error) { return &http.Response{ StatusCode: http.StatusOK, Status: "200 OK", @@ -212,6 +214,7 @@ func TestBridgedMiddleware_ModelWithSlash(t *testing.T) { }, nil }) require.NoError(t, err) + defer resp3.Body.Close() // Verify files are created with sanitized model name modelDir := filepath.Join(tmpDir, "google", "gemini-1.5-pro") @@ -290,7 +293,7 @@ func TestBridgedMiddleware_AllSensitiveRequestHeaders(t *testing.T) { req.Header.Set("Proxy-Authorization", "Basic proxy-creds") req.Header.Set("X-Amz-Security-Token", "aws-security-token") - _, err = middleware(req, func(r *http.Request) (*http.Response, error) { + resp4, err := middleware(req, func(r *http.Request) (*http.Response, error) { return &http.Response{ StatusCode: http.StatusOK, Status: "200 OK", @@ -300,6 +303,7 @@ func TestBridgedMiddleware_AllSensitiveRequestHeaders(t *testing.T) { }, nil }) require.NoError(t, err) + defer resp4.Body.Close() modelDir := filepath.Join(tmpDir, "openai", "gpt-4") reqDumpPath := findDumpFile(t, modelDir, SuffixRequest) @@ -358,7 +362,7 @@ func TestPassthroughMiddleware(t *testing.T) { req, err := http.NewRequest(http.MethodGet, "https://api.openai.com/v1/models", nil) require.NoError(t, err) - resp, err := rt.RoundTrip(req) + resp, err := rt.RoundTrip(req) //nolint:bodyclose // resp is nil on error require.ErrorIs(t, err, innerErr) require.Nil(t, resp) }) diff --git a/intercept/apidump/streaming_test.go b/intercept/apidump/streaming_test.go index 47c04926..9ab8e71e 100644 --- a/intercept/apidump/streaming_test.go +++ b/intercept/apidump/streaming_test.go @@ -120,6 +120,7 @@ func TestMiddleware_PreservesResponseBody(t *testing.T) { }, nil }) require.NoError(t, err) + defer resp.Body.Close() // Verify the response body is still readable after middleware capturedBody, err := io.ReadAll(resp.Body) diff --git a/internal/integrationtest/apidump_test.go b/internal/integrationtest/apidump_test.go index 41e23551..9fbbaf15 100644 --- a/internal/integrationtest/apidump_test.go +++ b/internal/integrationtest/apidump_test.go @@ -128,9 +128,11 @@ func TestAPIDump(t *testing.T) { withCustomProvider(tc.providerFunc(srv.URL, dumpDir)), ) - resp := bridgeServer.makeRequest(t, http.MethodPost, tc.path, fix.Request(), tc.headers) + resp, err := bridgeServer.makeRequest(t, http.MethodPost, tc.path, fix.Request(), tc.headers) + require.NoError(t, err) + defer resp.Body.Close() require.Equal(t, http.StatusOK, resp.StatusCode) - _, err := io.ReadAll(resp.Body) + _, err = io.ReadAll(resp.Body) require.NoError(t, err) // Verify dump files were created. @@ -187,6 +189,7 @@ func TestAPIDump(t *testing.T) { // Parse the dumped HTTP response. dumpResp, err := http.ReadResponse(bufio.NewReader(bytes.NewReader(respDumpData)), nil) require.NoError(t, err) + defer dumpResp.Body.Close() require.Equal(t, http.StatusOK, dumpResp.StatusCode) dumpRespBody, err := io.ReadAll(dumpResp.Body) require.NoError(t, err) @@ -256,12 +259,14 @@ func TestAPIDumpPassthrough(t *testing.T) { withCustomProvider(tc.providerFunc(upstream.URL, dumpDir)), ) - bridgeServer.makeRequest(t, http.MethodGet, tc.requestPath, nil) + resp, err := bridgeServer.makeRequest(t, http.MethodGet, tc.requestPath, nil) + require.NoError(t, err) + defer resp.Body.Close() // Find dump files in the passthrough directory. passthroughDir := filepath.Join(dumpDir, tc.name, "passthrough") var reqDumpFile, respDumpFile string - err := filepath.Walk(passthroughDir, func(path string, info os.FileInfo, err error) error { + err = filepath.Walk(passthroughDir, func(path string, info os.FileInfo, err error) error { if err != nil { return err } @@ -299,6 +304,7 @@ func TestAPIDumpPassthrough(t *testing.T) { require.NoError(t, err) dumpResp, err := http.ReadResponse(bufio.NewReader(bytes.NewReader(respDumpData)), nil) require.NoError(t, err) + defer dumpResp.Body.Close() require.Equal(t, http.StatusOK, dumpResp.StatusCode) dumpRespBody, err := io.ReadAll(dumpResp.Body) require.NoError(t, err) diff --git a/internal/integrationtest/bridge_test.go b/internal/integrationtest/bridge_test.go index 519c2e97..c305c9f8 100644 --- a/internal/integrationtest/bridge_test.go +++ b/internal/integrationtest/bridge_test.go @@ -89,7 +89,9 @@ func TestAnthropicMessages(t *testing.T) { // Make API call to aibridge for Anthropic /v1/messages reqBody, err := sjson.SetBytes(fix.Request(), "stream", tc.streaming) require.NoError(t, err) - resp := bridgeServer.makeRequest(t, http.MethodPost, pathAnthropicMessages, reqBody) + resp, err := bridgeServer.makeRequest(t, http.MethodPost, pathAnthropicMessages, reqBody) + require.NoError(t, err) + defer resp.Body.Close() require.Equal(t, http.StatusOK, resp.StatusCode) // Response-specific checks. @@ -220,7 +222,9 @@ func TestAnthropicMessagesModelThoughts(t *testing.T) { reqBody, err := sjson.SetBytes(fix.Request(), "stream", tc.streaming) require.NoError(t, err) - resp := bridgeServer.makeRequest(t, http.MethodPost, pathAnthropicMessages, reqBody) + resp, err := bridgeServer.makeRequest(t, http.MethodPost, pathAnthropicMessages, reqBody) + require.NoError(t, err) + defer resp.Body.Close() require.Equal(t, http.StatusOK, resp.StatusCode) if tc.streaming { @@ -258,7 +262,9 @@ func TestAWSBedrockIntegration(t *testing.T) { withCustomProvider(provider.NewAnthropic(anthropicCfg("http://unused", apiKey), bedrockCfg)), ) - resp := bridgeServer.makeRequest(t, http.MethodPost, pathAnthropicMessages, fixtures.Request(t, fixtures.AntSingleBuiltinTool)) + resp, err := bridgeServer.makeRequest(t, http.MethodPost, pathAnthropicMessages, fixtures.Request(t, fixtures.AntSingleBuiltinTool)) + require.NoError(t, err) + defer resp.Body.Close() require.Equal(t, http.StatusInternalServerError, resp.StatusCode) body, err := io.ReadAll(resp.Body) @@ -296,7 +302,9 @@ func TestAWSBedrockIntegration(t *testing.T) { // We override the AWS Bedrock client to route requests through our mock server. reqBody, err := sjson.SetBytes(fix.Request(), "stream", streaming) require.NoError(t, err) - resp := bridgeServer.makeRequest(t, http.MethodPost, pathAnthropicMessages, reqBody) + resp, err := bridgeServer.makeRequest(t, http.MethodPost, pathAnthropicMessages, reqBody) + require.NoError(t, err) + defer resp.Body.Close() // For streaming responses, consume the body to allow the stream to complete. if streaming { @@ -419,9 +427,11 @@ func TestAWSBedrockIntegration(t *testing.T) { require.NoError(t, err) // Send with Anthropic-Beta header containing flags that should be filtered. - resp := bridgeServer.makeRequest(t, http.MethodPost, pathAnthropicMessages, reqBody, http.Header{ + resp, err := bridgeServer.makeRequest(t, http.MethodPost, pathAnthropicMessages, reqBody, http.Header{ "Anthropic-Beta": {"interleaved-thinking-2025-05-14,effort-2025-11-24,context-management-2025-06-27,prompt-caching-scope-2026-01-05"}, }) + require.NoError(t, err) + defer resp.Body.Close() require.Equal(t, http.StatusOK, resp.StatusCode) _, err = io.ReadAll(resp.Body) require.NoError(t, err) @@ -502,7 +512,9 @@ func TestOpenAIChatCompletions(t *testing.T) { // Make API call to aibridge for OpenAI /v1/chat/completions reqBody, err := sjson.SetBytes(fix.Request(), "stream", tc.streaming) require.NoError(t, err) - resp := bridgeServer.makeRequest(t, http.MethodPost, pathOpenAIChatCompletions, reqBody) + resp, err := bridgeServer.makeRequest(t, http.MethodPost, pathOpenAIChatCompletions, reqBody) + require.NoError(t, err) + defer resp.Body.Close() require.Equal(t, http.StatusOK, resp.StatusCode) // Response-specific checks. @@ -583,7 +595,9 @@ func TestOpenAIChatCompletions(t *testing.T) { // Add the stream param to the request. reqBody, err := sjson.SetBytes(fix.Request(), "stream", true) require.NoError(t, err) - resp := bridgeServer.makeRequest(t, http.MethodPost, pathOpenAIChatCompletions, reqBody) + resp, err := bridgeServer.makeRequest(t, http.MethodPost, pathOpenAIChatCompletions, reqBody) + require.NoError(t, err) + defer resp.Body.Close() require.Equal(t, http.StatusOK, resp.StatusCode) // Verify SSE headers are sent correctly @@ -767,7 +781,9 @@ func TestSimple(t *testing.T) { // When: calling the "API server" with the fixture's request body. reqBody, err := sjson.SetBytes(fix.Request(), "stream", streaming) require.NoError(t, err) - resp := bridgeServer.makeRequest(t, http.MethodPost, tc.path, reqBody, http.Header{"User-Agent": {tc.userAgent}}) + resp, err := bridgeServer.makeRequest(t, http.MethodPost, tc.path, reqBody, http.Header{"User-Agent": {tc.userAgent}}) + require.NoError(t, err) + defer resp.Body.Close() require.Equal(t, http.StatusOK, resp.StatusCode) // Then: I expect the upstream request to have the correct path. @@ -875,11 +891,13 @@ func TestSessionIDTracking(t *testing.T) { require.NoError(t, err) } - resp := bridgeServer.makeRequest(t, http.MethodPost, pathAnthropicMessages, reqBody, tc.header) + resp, err := bridgeServer.makeRequest(t, http.MethodPost, pathAnthropicMessages, reqBody, tc.header) + require.NoError(t, err) + defer resp.Body.Close() require.Equal(t, http.StatusOK, resp.StatusCode) // Drain the body to let the stream complete. - _, err := io.ReadAll(resp.Body) + _, err = io.ReadAll(resp.Body) require.NoError(t, err) interceptions := bridgeServer.Recorder.RecordedInterceptions() @@ -951,7 +969,9 @@ func TestFallthrough(t *testing.T) { upstream := newMockUpstream(t.Context(), t, newFixtureResponse(fix)) bridgeServer := newBridgeTestServer(t.Context(), t, upstream.URL+tc.basePath) - resp := bridgeServer.makeRequest(t, http.MethodGet, tc.requestPath, nil) + resp, err := bridgeServer.makeRequest(t, http.MethodGet, tc.requestPath, nil) + require.NoError(t, err) + defer resp.Body.Close() require.Equal(t, http.StatusOK, resp.StatusCode) @@ -984,6 +1004,7 @@ func TestAnthropicInjectedTools(t *testing.T) { // Build the requirements & make the assertions which are common to all providers. bridgeServer, mockMCP, resp := setupInjectedToolTest(t, fixtures.AntSingleInjectedTool, streaming, defaultTracer, pathAnthropicMessages, anthropicToolResultValidator(t)) + defer resp.Body.Close() // Ensure expected tool was invoked with expected input. toolUsages := bridgeServer.Recorder.RecordedToolUsages() @@ -1067,6 +1088,7 @@ func TestOpenAIInjectedTools(t *testing.T) { // Build the requirements & make the assertions which are common to all providers. bridgeServer, mockMCP, resp := setupInjectedToolTest(t, fixtures.OaiChatSingleInjectedTool, streaming, defaultTracer, pathOpenAIChatCompletions, openaiChatToolResultValidator(t)) + defer resp.Body.Close() // Ensure expected tool was invoked with expected input. toolUsages := bridgeServer.Recorder.RecordedToolUsages() @@ -1290,7 +1312,9 @@ func TestErrorHandling(t *testing.T) { reqBody, err := sjson.SetBytes(fix.Request(), "stream", streaming) require.NoError(t, err) - resp := bridgeServer.makeRequest(t, http.MethodPost, tc.path, reqBody) + resp, err := bridgeServer.makeRequest(t, http.MethodPost, tc.path, reqBody) + require.NoError(t, err) + defer resp.Body.Close() tc.responseHandlerFn(resp) bridgeServer.Recorder.VerifyAllInterceptionsEnded(t) @@ -1357,7 +1381,9 @@ func TestErrorHandling(t *testing.T) { bridgeServer := newBridgeTestServer(ctx, t, upstream.URL) - resp := bridgeServer.makeRequest(t, http.MethodPost, tc.path, fix.Request()) + resp, err := bridgeServer.makeRequest(t, http.MethodPost, tc.path, fix.Request()) + require.NoError(t, err) + defer resp.Body.Close() tc.responseHandlerFn(resp) bridgeServer.Recorder.VerifyAllInterceptionsEnded(t) @@ -1416,7 +1442,9 @@ func TestStableRequestEncoding(t *testing.T) { // Make multiple requests and verify they all have identical payloads. for range count { - resp := bridgeServer.makeRequest(t, http.MethodPost, tc.path, fix.Request()) + resp, err := bridgeServer.makeRequest(t, http.MethodPost, tc.path, fix.Request()) + require.NoError(t, err) + defer resp.Body.Close() require.Equal(t, http.StatusOK, resp.StatusCode) } @@ -1679,7 +1707,9 @@ func TestAnthropicToolChoiceParallelDisabled(t *testing.T) { reqBody, err := sjson.SetBytes(fix.Request(), "tool_choice", tc.toolChoice) require.NoError(t, err) - resp := bridgeServer.makeRequest(t, http.MethodPost, pathAnthropicMessages, reqBody) + resp, err := bridgeServer.makeRequest(t, http.MethodPost, pathAnthropicMessages, reqBody) + require.NoError(t, err) + defer resp.Body.Close() require.Equal(t, http.StatusOK, resp.StatusCode) // Verify tool_choice in the upstream request. @@ -1842,7 +1872,9 @@ func TestChatCompletionsParallelToolCallsDisabled(t *testing.T) { reqBody, err = sjson.SetBytes(reqBody, "stream", streaming) require.NoError(t, err) - resp := bridgeServer.makeRequest(t, http.MethodPost, pathOpenAIChatCompletions, reqBody) + resp, err := bridgeServer.makeRequest(t, http.MethodPost, pathOpenAIChatCompletions, reqBody) + require.NoError(t, err) + defer resp.Body.Close() _, err = io.ReadAll(resp.Body) require.NoError(t, err) @@ -1886,7 +1918,9 @@ func TestThinkingAdaptiveIsPreserved(t *testing.T) { reqBody, err = sjson.SetBytes(reqBody, "stream", streaming) require.NoError(t, err) - resp := bridgeServer.makeRequest(t, http.MethodPost, pathAnthropicMessages, reqBody) + resp, err := bridgeServer.makeRequest(t, http.MethodPost, pathAnthropicMessages, reqBody) + require.NoError(t, err) + defer resp.Body.Close() require.Equal(t, http.StatusOK, resp.StatusCode) _, err = io.ReadAll(resp.Body) require.NoError(t, err) @@ -1949,7 +1983,9 @@ func TestEnvironmentDoNotLeak(t *testing.T) { bridgeServer := newBridgeTestServer(ctx, t, upstream.URL) - resp := bridgeServer.makeRequest(t, http.MethodPost, tc.path, fix.Request()) + resp, err := bridgeServer.makeRequest(t, http.MethodPost, tc.path, fix.Request()) + require.NoError(t, err) + defer resp.Body.Close() require.Equal(t, http.StatusOK, resp.StatusCode) // Verify that environment values did not leak. @@ -2063,7 +2099,9 @@ func TestActorHeaders(t *testing.T) { reqBody, err := sjson.SetBytes(fix.Request(), "stream", tc.streaming) require.NoError(t, err) - resp := bridgeServer.makeRequest(t, http.MethodPost, tc.path, reqBody) + resp, err := bridgeServer.makeRequest(t, http.MethodPost, tc.path, reqBody) + require.NoError(t, err) + defer resp.Body.Close() // Drain the body so streaming responses complete without // a "connection reset" error in the mock upstream. _, err = io.ReadAll(resp.Body) diff --git a/internal/integrationtest/circuit_breaker_test.go b/internal/integrationtest/circuit_breaker_test.go index e1bb8cfd..3ad039be 100644 --- a/internal/integrationtest/circuit_breaker_test.go +++ b/internal/integrationtest/circuit_breaker_test.go @@ -136,26 +136,28 @@ func TestCircuitBreaker_FullRecoveryCycle(t *testing.T) { withActor("test-user-id", nil), ) - doRequest := func() *http.Response { - resp := bridgeServer.makeRequest(t, http.MethodPost, tc.path, []byte(tc.requestBody), tc.headers) - _, err := io.ReadAll(resp.Body) + doRequest := func() int { + resp, err := bridgeServer.makeRequest(t, http.MethodPost, tc.path, []byte(tc.requestBody), tc.headers) require.NoError(t, err) - return resp + _, err = io.ReadAll(resp.Body) + require.NoError(t, err) + require.NoError(t, resp.Body.Close()) + return resp.StatusCode } // Phase 1: Trip the circuit breaker // First FailureThreshold requests hit upstream, get 429 for i := uint32(0); i < cbConfig.FailureThreshold; i++ { - resp := doRequest() - assert.Equal(t, http.StatusTooManyRequests, resp.StatusCode) + status := doRequest() + assert.Equal(t, http.StatusTooManyRequests, status) } //nolint:gosec // G115: test constant, no overflow risk assert.Equal(t, int32(cbConfig.FailureThreshold), upstreamCalls.Load()) // Phase 2: Verify circuit is open // Request should be blocked by circuit breaker (no upstream call) - resp := doRequest() - assert.Equal(t, http.StatusServiceUnavailable, resp.StatusCode) + status := doRequest() + assert.Equal(t, http.StatusServiceUnavailable, status) //nolint:gosec // G115: test constant, no overflow risk assert.Equal(t, int32(cbConfig.FailureThreshold), upstreamCalls.Load(), "No new upstream call when circuit is open") @@ -177,8 +179,8 @@ func TestCircuitBreaker_FullRecoveryCycle(t *testing.T) { // Phase 4: Recovery - request in half-open state should succeed and close circuit upstreamCallsBefore := upstreamCalls.Load() - resp = doRequest() - assert.Equal(t, http.StatusOK, resp.StatusCode, "Request should succeed in half-open state") + status = doRequest() + assert.Equal(t, http.StatusOK, status, "Request should succeed in half-open state") assert.Equal(t, upstreamCallsBefore+1, upstreamCalls.Load(), "Request should reach upstream in half-open state") // Verify circuit is now closed @@ -188,8 +190,8 @@ func TestCircuitBreaker_FullRecoveryCycle(t *testing.T) { // Phase 5: Verify circuit is fully functional again // Multiple requests should all succeed and reach upstream for i := 0; i < 3; i++ { - resp = doRequest() - assert.Equal(t, http.StatusOK, resp.StatusCode, "Request should succeed after circuit closes") + status = doRequest() + assert.Equal(t, http.StatusOK, status, "Request should succeed after circuit closes") } // All requests should have reached upstream @@ -291,22 +293,24 @@ func TestCircuitBreaker_HalfOpenFailure(t *testing.T) { withActor("test-user-id", nil), ) - doRequest := func() *http.Response { - resp := bridgeServer.makeRequest(t, http.MethodPost, tc.path, []byte(tc.requestBody), tc.headers) - _, err := io.ReadAll(resp.Body) + doRequest := func() int { + resp, err := bridgeServer.makeRequest(t, http.MethodPost, tc.path, []byte(tc.requestBody), tc.headers) + require.NoError(t, err) + _, err = io.ReadAll(resp.Body) require.NoError(t, err) - return resp + require.NoError(t, resp.Body.Close()) + return resp.StatusCode } // Phase 1: Trip the circuit for i := uint32(0); i < cbConfig.FailureThreshold; i++ { - resp := doRequest() - assert.Equal(t, http.StatusTooManyRequests, resp.StatusCode) + status := doRequest() + assert.Equal(t, http.StatusTooManyRequests, status) } // Verify circuit is open - resp := doRequest() - assert.Equal(t, http.StatusServiceUnavailable, resp.StatusCode) + status := doRequest() + assert.Equal(t, http.StatusServiceUnavailable, status) trips := promtest.ToFloat64(m.CircuitBreakerTrips.WithLabelValues(tc.expectProvider, tc.expectEndpoint, tc.expectModel)) assert.Equal(t, 1.0, trips, "CircuitBreakerTrips should be 1") @@ -316,13 +320,13 @@ func TestCircuitBreaker_HalfOpenFailure(t *testing.T) { // Phase 3: Request in half-open state fails, circuit should re-open upstreamCallsBefore := upstreamCalls.Load() - resp = doRequest() - assert.Equal(t, http.StatusTooManyRequests, resp.StatusCode, "Request should fail in half-open state") + status = doRequest() + assert.Equal(t, http.StatusTooManyRequests, status, "Request should fail in half-open state") assert.Equal(t, upstreamCallsBefore+1, upstreamCalls.Load(), "Request should reach upstream in half-open state") // Circuit should be open again - next request should be rejected immediately - resp = doRequest() - assert.Equal(t, http.StatusServiceUnavailable, resp.StatusCode, "Circuit should be open again after half-open failure") + status = doRequest() + assert.Equal(t, http.StatusServiceUnavailable, status, "Circuit should be open again after half-open failure") assert.Equal(t, upstreamCallsBefore+1, upstreamCalls.Load(), "Request should NOT reach upstream when circuit re-opens") // Verify metrics: trips should be 2 now (tripped twice) @@ -437,22 +441,24 @@ func TestCircuitBreaker_HalfOpenMaxRequests(t *testing.T) { withActor("test-user-id", nil), ) - doRequest := func() *http.Response { - resp := bridgeServer.makeRequest(t, http.MethodPost, tc.path, []byte(tc.requestBody), tc.headers) - _, err := io.ReadAll(resp.Body) + doRequest := func() int { + resp, err := bridgeServer.makeRequest(t, http.MethodPost, tc.path, []byte(tc.requestBody), tc.headers) require.NoError(t, err) - return resp + _, err = io.ReadAll(resp.Body) + require.NoError(t, err) + require.NoError(t, resp.Body.Close()) + return resp.StatusCode } // Phase 1: Trip the circuit for i := uint32(0); i < cbConfig.FailureThreshold; i++ { - resp := doRequest() - assert.Equal(t, http.StatusTooManyRequests, resp.StatusCode) + status := doRequest() + assert.Equal(t, http.StatusTooManyRequests, status) } // Verify circuit is open - resp := doRequest() - assert.Equal(t, http.StatusServiceUnavailable, resp.StatusCode) + status := doRequest() + assert.Equal(t, http.StatusServiceUnavailable, status) // Phase 2: Wait for half-open state and switch upstream to success time.Sleep(cbConfig.Timeout + 10*time.Millisecond) @@ -468,8 +474,8 @@ func TestCircuitBreaker_HalfOpenMaxRequests(t *testing.T) { wg.Add(1) go func() { defer wg.Done() - resp := doRequest() - responses <- resp.StatusCode + status := doRequest() + responses <- status }() } @@ -556,28 +562,30 @@ func TestCircuitBreaker_PerModelIsolation(t *testing.T) { withActor("test-user-id", nil), ) - doRequest := func(model string) *http.Response { + doRequest := func(model string) int { body := fmt.Sprintf(`{"model":%q,"max_tokens":1024,"messages":[{"role":"user","content":"hi"}]}`, model) - resp := bridgeServer.makeRequest(t, http.MethodPost, pathAnthropicMessages, []byte(body), http.Header{ + resp, err := bridgeServer.makeRequest(t, http.MethodPost, pathAnthropicMessages, []byte(body), http.Header{ "x-api-key": {"test"}, "anthropic-version": {"2023-06-01"}, }) - _, err := io.ReadAll(resp.Body) require.NoError(t, err) - return resp + _, err = io.ReadAll(resp.Body) + require.NoError(t, err) + require.NoError(t, resp.Body.Close()) + return resp.StatusCode } // Phase 1: Trip the circuit for sonnet model for i := uint32(0); i < cbConfig.FailureThreshold; i++ { - resp := doRequest("claude-sonnet-4-20250514") - assert.Equal(t, http.StatusTooManyRequests, resp.StatusCode) + status := doRequest("claude-sonnet-4-20250514") + assert.Equal(t, http.StatusTooManyRequests, status) } //nolint:gosec // G115: test constant, no overflow risk assert.Equal(t, int32(cbConfig.FailureThreshold), sonnetCalls.Load()) // Verify sonnet circuit is open - resp := doRequest("claude-sonnet-4-20250514") - assert.Equal(t, http.StatusServiceUnavailable, resp.StatusCode, "Sonnet circuit should be open") + status := doRequest("claude-sonnet-4-20250514") + assert.Equal(t, http.StatusServiceUnavailable, status, "Sonnet circuit should be open") //nolint:gosec // G115: test constant, no overflow risk assert.Equal(t, int32(cbConfig.FailureThreshold), sonnetCalls.Load(), "No new sonnet calls when circuit is open") @@ -589,14 +597,14 @@ func TestCircuitBreaker_PerModelIsolation(t *testing.T) { assert.Equal(t, 1.0, sonnetState, "Sonnet CircuitBreakerState should be 1 (open)") // Phase 2: Haiku model should still work (independent circuit) - resp = doRequest("claude-3-5-haiku-20241022") - assert.Equal(t, http.StatusOK, resp.StatusCode, "Haiku should succeed while sonnet circuit is open") + status = doRequest("claude-3-5-haiku-20241022") + assert.Equal(t, http.StatusOK, status, "Haiku should succeed while sonnet circuit is open") assert.Equal(t, int32(1), haikuCalls.Load(), "Haiku call should reach upstream") // Make multiple haiku requests - all should succeed for i := 0; i < 3; i++ { - resp = doRequest("claude-3-5-haiku-20241022") - assert.Equal(t, http.StatusOK, resp.StatusCode, "Haiku should continue to succeed") + status = doRequest("claude-3-5-haiku-20241022") + assert.Equal(t, http.StatusOK, status, "Haiku should continue to succeed") } assert.Equal(t, int32(4), haikuCalls.Load(), "All haiku calls should reach upstream") @@ -611,8 +619,8 @@ func TestCircuitBreaker_PerModelIsolation(t *testing.T) { time.Sleep(cbConfig.Timeout + 10*time.Millisecond) sonnetShouldFail.Store(false) - resp = doRequest("claude-sonnet-4-20250514") - assert.Equal(t, http.StatusOK, resp.StatusCode, "Sonnet should recover after timeout") + status = doRequest("claude-sonnet-4-20250514") + assert.Equal(t, http.StatusOK, status, "Sonnet should recover after timeout") // Verify sonnet circuit is now closed sonnetState = promtest.ToFloat64(m.CircuitBreakerState.WithLabelValues(config.ProviderAnthropic, "/v1/messages", "claude-sonnet-4-20250514")) diff --git a/internal/integrationtest/metrics_test.go b/internal/integrationtest/metrics_test.go index 1d8a103d..bb777ea0 100644 --- a/internal/integrationtest/metrics_test.go +++ b/internal/integrationtest/metrics_test.go @@ -155,8 +155,10 @@ func TestMetrics_Interception(t *testing.T) { withMetrics(m), ) - resp := bridgeServer.makeRequest(t, http.MethodPost, tc.path, fix.Request(), tc.headers) - _, err := io.ReadAll(resp.Body) + resp, err := bridgeServer.makeRequest(t, http.MethodPost, tc.path, fix.Request(), tc.headers) + require.NoError(t, err) + defer resp.Body.Close() + _, err = io.ReadAll(resp.Body) require.NoError(t, err) count := promtest.ToFloat64(m.InterceptionCount.WithLabelValues( @@ -237,7 +239,9 @@ func TestMetrics_PassthroughCount(t *testing.T) { withMetrics(m), ) - resp := bridgeServer.makeRequest(t, http.MethodGet, "/openai/v1/models", nil) + resp, err := bridgeServer.makeRequest(t, http.MethodGet, "/openai/v1/models", nil) + require.NoError(t, err) + defer resp.Body.Close() require.Equal(t, http.StatusOK, resp.StatusCode) count := promtest.ToFloat64(m.PassthroughCount.WithLabelValues( @@ -259,9 +263,11 @@ func TestMetrics_PromptCount(t *testing.T) { withMetrics(m), ) - resp := bridgeServer.makeRequest(t, http.MethodPost, pathOpenAIChatCompletions, fix.Request(), http.Header{"User-Agent": []string{"claude-code/1.0.0"}}) + resp, err := bridgeServer.makeRequest(t, http.MethodPost, pathOpenAIChatCompletions, fix.Request(), http.Header{"User-Agent": []string{"claude-code/1.0.0"}}) + require.NoError(t, err) + defer resp.Body.Close() require.Equal(t, http.StatusOK, resp.StatusCode) - _, err := io.ReadAll(resp.Body) + _, err = io.ReadAll(resp.Body) require.NoError(t, err) prompts := promtest.ToFloat64(m.PromptCount.WithLabelValues( @@ -353,7 +359,9 @@ func TestMetrics_TokenUseCount(t *testing.T) { reqBody, err = sjson.SetBytes(reqBody, "stream", true) require.NoError(t, err) } - resp := bridgeServer.makeRequest(t, http.MethodPost, tc.reqPath, reqBody, nil) + resp, err := bridgeServer.makeRequest(t, http.MethodPost, tc.reqPath, reqBody, nil) + require.NoError(t, err) + defer resp.Body.Close() require.Equal(t, http.StatusOK, resp.StatusCode) _, _ = io.ReadAll(resp.Body) @@ -386,9 +394,11 @@ func TestMetrics_NonInjectedToolUseCount(t *testing.T) { withMetrics(m), ) - resp := bridgeServer.makeRequest(t, http.MethodPost, pathOpenAIChatCompletions, fix.Request()) + resp, err := bridgeServer.makeRequest(t, http.MethodPost, pathOpenAIChatCompletions, fix.Request()) + require.NoError(t, err) + defer resp.Body.Close() require.Equal(t, http.StatusOK, resp.StatusCode) - _, err := io.ReadAll(resp.Body) + _, err = io.ReadAll(resp.Body) require.NoError(t, err) count := promtest.ToFloat64(m.NonInjectedToolUseCount.WithLabelValues( @@ -416,9 +426,11 @@ func TestMetrics_InjectedToolUseCount(t *testing.T) { withMCP(mockMCP), ) - resp := bridgeServer.makeRequest(t, http.MethodPost, pathAnthropicMessages, fix.Request()) + resp, err := bridgeServer.makeRequest(t, http.MethodPost, pathAnthropicMessages, fix.Request()) + require.NoError(t, err) + defer resp.Body.Close() require.Equal(t, http.StatusOK, resp.StatusCode) - _, err := io.ReadAll(resp.Body) + _, err = io.ReadAll(resp.Body) require.NoError(t, err) // Wait until full roundtrip has completed. diff --git a/internal/integrationtest/responses_test.go b/internal/integrationtest/responses_test.go index 2e9d8cd6..65c307ee 100644 --- a/internal/integrationtest/responses_test.go +++ b/internal/integrationtest/responses_test.go @@ -343,7 +343,9 @@ func TestResponsesOutputMatchesUpstream(t *testing.T) { bridgeServer := newBridgeTestServer(ctx, t, upstream.URL) - resp := bridgeServer.makeRequest(t, http.MethodPost, pathOpenAIResponses, fix.Request(), http.Header{"User-Agent": {tc.userAgent}}) + resp, err := bridgeServer.makeRequest(t, http.MethodPost, pathOpenAIResponses, fix.Request(), http.Header{"User-Agent": {tc.userAgent}}) + require.NoError(t, err) + defer resp.Body.Close() require.Equal(t, http.StatusOK, resp.StatusCode) got, err := io.ReadAll(resp.Body) @@ -430,7 +432,9 @@ func TestResponsesBackgroundModeForbidden(t *testing.T) { // Create a request with background mode enabled reqBytes := responsesRequestBytes(t, tc.streaming, keyVal{"background", true}) - resp := bridgeServer.makeRequest(t, http.MethodPost, pathOpenAIResponses, reqBytes) + resp, err := bridgeServer.makeRequest(t, http.MethodPost, pathOpenAIResponses, reqBytes) + require.NoError(t, err) + defer resp.Body.Close() require.Equal(t, "application/json", resp.Header.Get("Content-Type")) require.Equal(t, http.StatusNotImplemented, resp.StatusCode) @@ -568,7 +572,9 @@ func TestResponsesParallelToolsOverwritten(t *testing.T) { require.NoError(t, err) } - resp := bridgeServer.makeRequest(t, http.MethodPost, pathOpenAIResponses, reqBody) + resp, err := bridgeServer.makeRequest(t, http.MethodPost, pathOpenAIResponses, reqBody) + require.NoError(t, err) + defer resp.Body.Close() _, err = io.ReadAll(resp.Body) require.NoError(t, err) @@ -638,7 +644,9 @@ func TestClientAndConnectionError(t *testing.T) { bridgeServer := newBridgeTestServer(ctx, t, tc.addr, withCustomProvider(provider.NewOpenAI(openAICfg(tc.addr, apiKey)))) reqBytes := responsesRequestBytes(t, tc.streaming) - resp := bridgeServer.makeRequest(t, http.MethodPost, pathOpenAIResponses, reqBytes) + resp, err := bridgeServer.makeRequest(t, http.MethodPost, pathOpenAIResponses, reqBytes) + require.NoError(t, err) + defer resp.Body.Close() require.Equal(t, "application/json", resp.Header.Get("Content-Type")) require.Equal(t, http.StatusInternalServerError, resp.StatusCode) @@ -715,7 +723,9 @@ func TestUpstreamError(t *testing.T) { bridgeServer := newBridgeTestServer(ctx, t, upstream.URL) reqBytes := responsesRequestBytes(t, tc.streaming) - resp := bridgeServer.makeRequest(t, http.MethodPost, pathOpenAIResponses, reqBytes) + resp, err := bridgeServer.makeRequest(t, http.MethodPost, pathOpenAIResponses, reqBytes) + require.NoError(t, err) + defer resp.Body.Close() require.Equal(t, tc.statusCode, resp.StatusCode) require.Equal(t, tc.contentType, resp.Header.Get("Content-Type")) @@ -896,7 +906,9 @@ func TestResponsesInjectedTool(t *testing.T) { bridgeServer := newBridgeTestServer(ctx, t, upstream.URL, withMCP(mockMCP)) - resp := bridgeServer.makeRequest(t, http.MethodPost, pathOpenAIResponses, fix.Request()) + resp, err := bridgeServer.makeRequest(t, http.MethodPost, pathOpenAIResponses, fix.Request()) + require.NoError(t, err) + defer resp.Body.Close() require.Equal(t, http.StatusOK, resp.StatusCode) body, err := io.ReadAll(resp.Body) @@ -1033,10 +1045,12 @@ func TestResponsesModelThoughts(t *testing.T) { bridgeServer := newBridgeTestServer(ctx, t, upstream.URL) - resp := bridgeServer.makeRequest(t, http.MethodPost, pathOpenAIResponses, fix.Request()) + resp, err := bridgeServer.makeRequest(t, http.MethodPost, pathOpenAIResponses, fix.Request()) + require.NoError(t, err) + defer resp.Body.Close() require.Equal(t, http.StatusOK, resp.StatusCode) - _, err := io.ReadAll(resp.Body) + _, err = io.ReadAll(resp.Body) require.NoError(t, err) bridgeServer.Recorder.VerifyModelThoughtsRecorded(t, tc.expectedThoughts) diff --git a/internal/integrationtest/setupbridge.go b/internal/integrationtest/setupbridge.go index 17883c2b..afd403a3 100644 --- a/internal/integrationtest/setupbridge.go +++ b/internal/integrationtest/setupbridge.go @@ -63,11 +63,13 @@ type bridgeTestServer struct { // makeRequest builds and executes an HTTP request against this server. // Optional headers are applied after the default Content-Type. -func (s *bridgeTestServer) makeRequest(t *testing.T, method string, path string, body []byte, header ...http.Header) *http.Response { +func (s *bridgeTestServer) makeRequest(t *testing.T, method string, path string, body []byte, header ...http.Header) (*http.Response, error) { t.Helper() req, err := http.NewRequestWithContext(t.Context(), method, s.URL+path, bytes.NewReader(body)) - require.NoError(t, err) + if err != nil { + return nil, err + } req.Header.Set("Content-Type", "application/json") for _, h := range header { for k, vals := range h { @@ -76,10 +78,7 @@ func (s *bridgeTestServer) makeRequest(t *testing.T, method string, path string, } } } - resp, err := http.DefaultClient.Do(req) - require.NoError(t, err) - t.Cleanup(func() { _ = resp.Body.Close() }) - return resp + return http.DefaultClient.Do(req) } type bridgeOption func(*bridgeConfig) @@ -236,7 +235,8 @@ func setupInjectedToolTest( reqBody, err := sjson.SetBytes(fix.Request(), "stream", streaming) require.NoError(t, err) - resp := bridgeServer.makeRequest(t, http.MethodPost, path, reqBody) + resp, err := bridgeServer.makeRequest(t, http.MethodPost, path, reqBody) + require.NoError(t, err) require.Equal(t, http.StatusOK, resp.StatusCode) // Wait both requests (initial + tool call result) diff --git a/internal/integrationtest/trace_test.go b/internal/integrationtest/trace_test.go index 75fe0949..878118ab 100644 --- a/internal/integrationtest/trace_test.go +++ b/internal/integrationtest/trace_test.go @@ -155,7 +155,9 @@ func TestTraceAnthropic(t *testing.T) { reqBody, err := sjson.SetBytes(fix.Request(), "stream", tc.streaming) require.NoError(t, err) - resp := bridgeServer.makeRequest(t, http.MethodPost, pathAnthropicMessages, reqBody) + resp, err := bridgeServer.makeRequest(t, http.MethodPost, pathAnthropicMessages, reqBody) + require.NoError(t, err) + defer resp.Body.Close() require.Equal(t, http.StatusOK, resp.StatusCode) bridgeServer.Close() @@ -265,7 +267,9 @@ func TestTraceAnthropicErr(t *testing.T) { reqBody, err := sjson.SetBytes(fix.Request(), "stream", tc.streaming) require.NoError(t, err) - resp := bridgeServer.makeRequest(t, http.MethodPost, pathAnthropicMessages, reqBody) + resp, err := bridgeServer.makeRequest(t, http.MethodPost, pathAnthropicMessages, reqBody) + require.NoError(t, err) + defer resp.Body.Close() if tc.streaming { require.Equal(t, http.StatusOK, resp.StatusCode) } else { @@ -385,10 +389,11 @@ func TestInjectedToolsTrace(t *testing.T) { validatorFn = openaiChatToolResultValidator(t) } - bridgeServer, mockMCP, _ := setupInjectedToolTest( + bridgeServer, mockMCP, resp := setupInjectedToolTest( t, tc.fixture, tc.streaming, tracer, tc.path, validatorFn, tc.opts..., ) + defer resp.Body.Close() require.Len(t, bridgeServer.Recorder.RecordedInterceptions(), 1) intcID := bridgeServer.Recorder.RecordedInterceptions()[0].ID @@ -542,7 +547,9 @@ func TestTraceOpenAI(t *testing.T) { reqBody, err := sjson.SetBytes(fix.Request(), "stream", tc.streaming) require.NoError(t, err) - resp := bridgeServer.makeRequest(t, http.MethodPost, tc.path, reqBody) + resp, err := bridgeServer.makeRequest(t, http.MethodPost, tc.path, reqBody) + require.NoError(t, err) + defer resp.Body.Close() require.Equal(t, http.StatusOK, resp.StatusCode) bridgeServer.Close() @@ -697,7 +704,9 @@ func TestTraceOpenAIErr(t *testing.T) { reqBody, err := sjson.SetBytes(fix.Request(), "stream", tc.streaming) require.NoError(t, err) - resp := bridgeServer.makeRequest(t, http.MethodPost, tc.path, reqBody) + resp, err := bridgeServer.makeRequest(t, http.MethodPost, tc.path, reqBody) + require.NoError(t, err) + defer resp.Body.Close() require.Equal(t, tc.expectCode, resp.StatusCode) bridgeServer.Close() @@ -737,7 +746,9 @@ func TestTracePassthrough(t *testing.T) { withTracer(tracer), ) - resp := bridgeServer.makeRequest(t, http.MethodGet, "/openai/v1/models", nil) + resp, err := bridgeServer.makeRequest(t, http.MethodGet, "/openai/v1/models", nil) + require.NoError(t, err) + defer resp.Body.Close() require.Equal(t, http.StatusOK, resp.StatusCode) bridgeServer.Close()