Skip to content

Handle JSON Parsing errors #5995

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 4 commits into from
Jun 9, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 16 additions & 0 deletions core/__mocks__/@continuedev/fetch/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
import { vi } from "vitest";

export const fetchwithRequestOptions = vi.fn(
async (url, options, requestOptions) => {
console.log("Mocked fetch called with:", url, options, requestOptions);
return {
ok: true,
status: 200,
statusText: "OK",
};
},
);

export const streamSse = vi.fn(function* () {
yield "";
});
40 changes: 24 additions & 16 deletions core/context/providers/GoogleContextProvider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -38,27 +38,35 @@ class GoogleContextProvider extends BaseContextProvider {
body: payload,
});

if (!response.ok) {
throw new Error(
`Failed to fetch Google search results: ${response.statusText}`,
);
}
const results = await response.text();
try {
const parsed = JSON.parse(results);
let content = `Google Search: ${query}\n\n`;
const answerBox = parsed.answerBox;

const jsonResults = JSON.parse(results);
let content = `Google Search: ${query}\n\n`;
const answerBox = jsonResults.answerBox;
if (answerBox) {
content += `Answer Box (${answerBox.title}): ${answerBox.answer}\n\n`;
}

if (answerBox) {
content += `Answer Box (${answerBox.title}): ${answerBox.answer}\n\n`;
}
for (const result of parsed.organic) {
content += `${result.title}\n${result.link}\n${result.snippet}\n\n`;
}

for (const result of jsonResults.organic) {
content += `${result.title}\n${result.link}\n${result.snippet}\n\n`;
return [
{
content,
name: "Google Search",
description: "Google Search",
},
];
} catch (e) {
throw new Error(`Failed to parse Google search results: ${results}`);
}

return [
{
content,
name: "Google Search",
description: "Google Search",
},
];
}
}

Expand Down
18 changes: 11 additions & 7 deletions core/context/providers/GreptileContextProvider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -81,13 +81,17 @@ class GreptileContextProvider extends BaseContextProvider {
}

// Parse the response as JSON
const json = JSON.parse(rawText);

return json.sources.map((source: any) => ({
description: source.filepath,
content: `File: ${source.filepath}\nLines: ${source.linestart}-${source.lineend}\n\n${source.summary}`,
name: (source.filepath.split("/").pop() ?? "").split("\\").pop() ?? "",
}));
try {
const json = JSON.parse(rawText);
return json.sources.map((source: any) => ({
description: source.filepath,
content: `File: ${source.filepath}\nLines: ${source.linestart}-${source.lineend}\n\n${source.summary}`,
name:
(source.filepath.split("/").pop() ?? "").split("\\").pop() ?? "",
}));
} catch (jsonError) {
throw new Error(`Failed to parse Greptile response:\n${rawText}`);
}
} catch (error) {
console.error("Error getting context items from Greptile:", error);
throw new Error("Error getting context items from Greptile");
Expand Down
2 changes: 1 addition & 1 deletion core/core.ts
Original file line number Diff line number Diff line change
Expand Up @@ -701,7 +701,7 @@ export class Core {
this.messenger.send("toolCallPartialOutput", params);
};

return await callTool(tool, toolCall.function.arguments, {
return await callTool(tool, toolCall, {
config,
ide: this.ide,
llm: config.selectedModelByRole.chat,
Expand Down
32 changes: 21 additions & 11 deletions core/indexing/LanceDbIndex.ts
Original file line number Diff line number Diff line change
Expand Up @@ -294,17 +294,27 @@ export class LanceDbIndex implements CodebaseIndex {
);
const cachedItems = await stmt.all();

const lanceRows: LanceDbRow[] = cachedItems.map(
({ uuid, vector, startLine, endLine, contents }) => ({
path,
uuid,
startLine,
endLine,
contents,
cachekey: cacheKey,
vector: JSON.parse(vector),
}),
);
const lanceRows: LanceDbRow[] = [];
for (const item of cachedItems) {
try {
const vector = JSON.parse(item.vector);
const { uuid, startLine, endLine, contents } = item;

cachedItems.push({
path,
uuid,
startLine,
endLine,
contents,
cachekey: cacheKey,
vector,
});
} catch (err) {
console.warn(
`LanceDBIndex, skipping ${item.path} due to invalid vector JSON:\n${item.vector}\n\nError: ${err}`,
);
}
}

if (lanceRows.length > 0) {
if (needToCreateLanceTable) {
Expand Down
113 changes: 113 additions & 0 deletions core/llm/llm-pre-fetch.vitest.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
import { fetchwithRequestOptions } from "@continuedev/fetch";
import * as openAiAdapters from "@continuedev/openai-adapters";
import * as dotenv from "dotenv";
import { beforeEach, describe, expect, test, vi } from "vitest";
import { ChatMessage, ILLM } from "..";
import Anthropic from "./llms/Anthropic";
import Gemini from "./llms/Gemini";
import OpenAI from "./llms/OpenAI";

dotenv.config();

vi.mock("@continuedev/fetch");
vi.mock("@continuedev/openai-adapters");

async function dudLLMCall(llm: ILLM, messages: ChatMessage[]) {
try {
const abortController = new AbortController();
const gen = llm.streamChat(messages, abortController.signal, {});
await gen.next();
await gen.return({
completion: "",
completionOptions: {
model: "",
},
modelTitle: "",
prompt: "",
});
abortController.abort();
} catch (e) {
console.error("Expected error", e);
}
}

const invalidToolCallArg = '{"name": "Ali';
const messagesWithInvalidToolCallArgs: ChatMessage[] = [
{
role: "user",
content: "Call the say_hello tool",
},
{
role: "assistant",
content: "",
toolCalls: [
{
id: "tool_call_1",
type: "function",
function: {
name: "say_name",
arguments: invalidToolCallArg,
},
},
],
},
{
role: "user",
content: "This is my response",
},
];

describe("LLM Pre-fetch", () => {
beforeEach(() => {
vi.resetAllMocks();
// Log to verify the mock is properly set up
console.log("Mock setup:", openAiAdapters);
});

test("Invalid tool call args are ignored", async () => {
const anthropic = new Anthropic({
model: "not-important",
apiKey: "invalid",
});
await dudLLMCall(anthropic, messagesWithInvalidToolCallArgs);
expect(fetchwithRequestOptions).toHaveBeenCalledWith(
expect.any(URL),
{
method: "POST",
headers: expect.any(Object),
signal: expect.any(AbortSignal),
body: expect.stringContaining('"name":"say_name","input":{}'),
},
expect.any(Object),
);

vi.clearAllMocks();
const gemini = new Gemini({ model: "gemini-something", apiKey: "invalid" });
await dudLLMCall(gemini, messagesWithInvalidToolCallArgs);
expect(fetchwithRequestOptions).toHaveBeenCalledWith(
expect.any(URL),
{
method: "POST",
// headers: expect.any(Object),
signal: expect.any(AbortSignal),
body: expect.stringContaining('"name":"say_name","args":{}'),
},
expect.any(Object),
);

// OPENAI DOES NOT NEED TO CLEAR INVALID TOOL CALL ARGS BECAUSE IT STORES THEM IN STRINGS
vi.clearAllMocks();
const openai = new OpenAI({ model: "gpt-something", apiKey: "invalid" });
await dudLLMCall(openai, messagesWithInvalidToolCallArgs);
expect(fetchwithRequestOptions).toHaveBeenCalledWith(
expect.any(URL),
{
method: "POST",
headers: expect.any(Object),
signal: expect.any(AbortSignal),
body: expect.stringContaining(JSON.stringify(invalidToolCallArg)),
},
expect.any(Object),
);
});
});
3 changes: 2 additions & 1 deletion core/llm/llms/Anthropic.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import { streamSse } from "@continuedev/fetch";
import { ChatMessage, CompletionOptions, LLMOptions } from "../../index.js";
import { safeParseToolCallArgs } from "../../tools/parseArgs.js";
import { renderChatMessage, stripImages } from "../../util/messageContent.js";
import { BaseLLM } from "../index.js";

Expand Down Expand Up @@ -66,7 +67,7 @@ class Anthropic extends BaseLLM {
type: "tool_use",
id: toolCall.id,
name: toolCall.function?.name,
input: JSON.parse(toolCall.function?.arguments || "{}"),
input: safeParseToolCallArgs(toolCall),
})),
};
} else if (message.role === "thinking" && !message.redactedThinking) {
Expand Down
34 changes: 23 additions & 11 deletions core/llm/llms/Bedrock.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ import {
CompletionOptions,
LLMOptions,
} from "../../index.js";
import { safeParseToolCallArgs } from "../../tools/parseArgs.js";
import { renderChatMessage, stripImages } from "../../util/messageContent.js";
import { BaseLLM } from "../index.js";
import { PROVIDER_TOOL_SUPPORT } from "../toolSupport.js";
Expand Down Expand Up @@ -408,7 +409,7 @@ class Bedrock extends BaseLLM {
toolUse: {
toolUseId: toolCall.id,
name: toolCall.function?.name,
input: JSON.parse(toolCall.function?.arguments || "{}"),
input: safeParseToolCallArgs(toolCall),
},
})),
};
Expand Down Expand Up @@ -564,10 +565,14 @@ class Bedrock extends BaseLLM {
const command = new InvokeModelCommand(input);
const response = await client.send(command);
if (response.body) {
const responseBody = JSON.parse(
new TextDecoder().decode(response.body),
);
return this._extractEmbeddings(responseBody);
const decoder = new TextDecoder();
const decoded = decoder.decode(response.body);
try {
const responseBody = JSON.parse(decoded);
return this._extractEmbeddings(responseBody);
} catch (e) {
console.error(`Error parsing response body from:\n${decoded}`, e);
}
}
return [];
}),
Expand Down Expand Up @@ -662,12 +667,19 @@ class Bedrock extends BaseLLM {
throw new Error("Empty response received from Bedrock");
}

const responseBody = JSON.parse(new TextDecoder().decode(response.body));

// Sort results by index to maintain original order
return responseBody.results
.sort((a: any, b: any) => a.index - b.index)
.map((result: any) => result.relevance_score);
const decoder = new TextDecoder();
const decoded = decoder.decode(response.body);
try {
const responseBody = JSON.parse(decoded);
// Sort results by index to maintain original order
return responseBody.results
.sort((a: any, b: any) => a.index - b.index)
.map((result: any) => result.relevance_score);
} catch (e) {
throw new Error(
`Error parsing JSON from Bedrock response body:\n${decoded}, ${JSON.stringify(e)}`,
);
}
} catch (error: unknown) {
if (error instanceof Error) {
if ("code" in error) {
Expand Down
12 changes: 9 additions & 3 deletions core/llm/llms/BedrockImport.ts
Original file line number Diff line number Diff line change
Expand Up @@ -51,9 +51,15 @@ class BedrockImport extends BaseLLM {

if (response.body) {
for await (const item of response.body) {
const chunk = JSON.parse(new TextDecoder().decode(item.chunk?.bytes));
if (chunk.outputs[0].text) {
yield chunk.outputs[0].text;
const decoder = new TextDecoder();
const decoded = decoder.decode(item.chunk?.bytes);
try {
const chunk = JSON.parse(decoded);
if (chunk.outputs[0].text) {
yield chunk.outputs[0].text;
}
} catch (e) {
throw new Error(`Malformed JSON received from Bedrock: ${decoded}`);
}
}
}
Expand Down
5 changes: 3 additions & 2 deletions core/llm/llms/Gemini.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import {
TextMessagePart,
ToolCallDelta,
} from "../../index.js";
import { safeParseToolCallArgs } from "../../tools/parseArgs.js";
import { renderChatMessage, stripImages } from "../../util/messageContent.js";
import { BaseLLM } from "../index.js";
import {
Expand Down Expand Up @@ -250,11 +251,11 @@ class Gemini extends BaseLLM {
};
if (msg.toolCalls) {
msg.toolCalls.forEach((toolCall) => {
if (toolCall.function?.name && toolCall.function?.arguments) {
if (toolCall.function?.name) {
assistantMsg.parts.push({
functionCall: {
name: toolCall.function.name,
args: JSON.parse(toolCall.function.arguments),
args: safeParseToolCallArgs(toolCall),
},
});
}
Expand Down
13 changes: 9 additions & 4 deletions core/llm/llms/HuggingFaceTEI.ts
Original file line number Diff line number Diff line change
Expand Up @@ -52,11 +52,16 @@ class HuggingFaceTEIEmbeddingsProvider extends BaseLLM {
});
if (!resp.ok) {
const text = await resp.text();
const embedError = JSON.parse(text) as TEIEmbedErrorResponse;
if (!embedError.error_type || !embedError.error) {
throw new Error(text);
let teiError: TEIEmbedErrorResponse | null = null;
try {
teiError = JSON.parse(text);
} catch (e) {
console.log(`Failed to parse TEI embed error response:\n${text}`, e);
}
throw new TEIEmbedError(embedError);
if (teiError && (teiError.error_type || teiError.error)) {
throw new TEIEmbedError(teiError);
}
throw new Error(text);
}
return (await resp.json()) as number[][];
}
Expand Down
Loading
Loading