diff --git a/core/llm/index.ts b/core/llm/index.ts index f7d97b73e3c..099d2234ec2 100644 --- a/core/llm/index.ts +++ b/core/llm/index.ts @@ -460,9 +460,11 @@ export abstract class BaseLLM implements ILLM { ); } } - return new Error( + const error = new Error( `HTTP ${resp.status} ${resp.statusText} from ${resp.url}\n\n${text}`, - ); + ) as any; + error.response = resp; + return error; } fetch(url: RequestInfo | URL, init?: RequestInit): Promise { @@ -543,7 +545,7 @@ export abstract class BaseLLM implements ILLM { }; return withExponentialBackoff( () => customFetch(url, init) as any, - 5, + 8, 0.5, ); } diff --git a/core/util/withExponentialBackoff.test.ts b/core/util/withExponentialBackoff.test.ts index 5049073c758..f561c82eb36 100644 --- a/core/util/withExponentialBackoff.test.ts +++ b/core/util/withExponentialBackoff.test.ts @@ -6,7 +6,7 @@ import { RETRY_AFTER_HEADER, } from "./withExponentialBackoff"; -describe.skip("withExponentialBackoff", () => { +describe("withExponentialBackoff", () => { it("should return result when apiCall succeeds on first attempt", async () => { // Arrange const apiCall = jest.fn().mockResolvedValue("Success"); @@ -112,7 +112,7 @@ describe.skip("withExponentialBackoff", () => { // Act & Assert await expect( withExponentialBackoff(apiCall, maxTries, initialDelaySeconds), - ).rejects.toThrow("Failed to make API call after max tries"); + ).rejects.toThrow(`Failed to make API call after ${maxTries} retries`); expect(apiCall).toHaveBeenCalledTimes(maxTries); }); @@ -135,7 +135,7 @@ describe.skip("withExponentialBackoff", () => { await expect( withExponentialBackoff(apiCall, maxTries, initialDelaySeconds), - ).rejects.toThrow("Failed to make API call after max tries"); + ).rejects.toThrow(`Failed to make API call after ${maxTries} retries`); expect(apiCall).toHaveBeenCalledTimes(0); }); diff --git a/packages/openai-adapters/src/apis/OpenAI.ts b/packages/openai-adapters/src/apis/OpenAI.ts index d0f8d30ca3a..d7afcd7d77b 100644 --- a/packages/openai-adapters/src/apis/OpenAI.ts +++ b/packages/openai-adapters/src/apis/OpenAI.ts @@ -46,6 +46,7 @@ export class OpenAIApi implements BaseLlmApi { baseURL: this.apiBase, fetch: customFetch(config.requestOptions), timeout: config?.requestOptions?.timeout || undefined, + maxRetries: 8, }); } modifyChatBody(body: T): T {