Skip to content
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions src/extension/byok/vscode-node/byokContribution.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import { ICAPIClientService } from '../../../platform/endpoint/common/capiClient
import { IVSCodeExtensionContext } from '../../../platform/extContext/common/extensionContext';
import { ILogService } from '../../../platform/log/common/logService';
import { IFetcherService } from '../../../platform/networking/common/fetcherService';
import { IRequestLogger } from '../../../platform/requestLogger/node/requestLogger';
import { Disposable } from '../../../util/vs/base/common/lifecycle';
import { IInstantiationService } from '../../../util/vs/platform/instantiation/common/instantiation';
import { BYOKKnownModels, BYOKModelProvider, isBYOKEnabled } from '../../byok/common/byokProvider';
Expand All @@ -21,6 +22,7 @@ import { CustomOAIBYOKModelProvider } from './customOAIProvider';
import { GeminiNativeBYOKLMProvider } from './geminiNativeProvider';
import { OllamaLMProvider } from './ollamaProvider';
import { OAIBYOKLMProvider } from './openAIProvider';
import { OpenCodeZenLMProvider } from './openCodeZenProvider';
import { OpenRouterLMProvider } from './openRouterProvider';
import { XAIBYOKLMProvider } from './xAIProvider';

Expand All @@ -38,6 +40,7 @@ export class BYOKContrib extends Disposable implements IExtensionContribution {
@IVSCodeExtensionContext extensionContext: IVSCodeExtensionContext,
@IAuthenticationService authService: IAuthenticationService,
@IInstantiationService private readonly _instantiationService: IInstantiationService,
@IRequestLogger private readonly _requestLoggerService: IRequestLogger,
) {
super();
this._register(commands.registerCommand('github.copilot.chat.manageBYOK', async (vendor: string) => {
Expand Down Expand Up @@ -91,6 +94,7 @@ export class BYOKContrib extends Disposable implements IExtensionContribution {
this._providers.set(XAIBYOKLMProvider.providerName.toLowerCase(), instantiationService.createInstance(XAIBYOKLMProvider, knownModels[XAIBYOKLMProvider.providerName], this._byokStorageService));
this._providers.set(OAIBYOKLMProvider.providerName.toLowerCase(), instantiationService.createInstance(OAIBYOKLMProvider, knownModels[OAIBYOKLMProvider.providerName], this._byokStorageService));
this._providers.set(OpenRouterLMProvider.providerName.toLowerCase(), instantiationService.createInstance(OpenRouterLMProvider, this._byokStorageService));
this._providers.set(OpenCodeZenLMProvider.providerName.toLowerCase(), instantiationService.createInstance(OpenCodeZenLMProvider, this._byokStorageService, this._fetcherService, this._logService, this._requestLoggerService, instantiationService));
this._providers.set(AzureBYOKModelProvider.providerName.toLowerCase(), instantiationService.createInstance(AzureBYOKModelProvider, this._byokStorageService));
this._providers.set(CustomOAIBYOKModelProvider.providerName.toLowerCase(), instantiationService.createInstance(CustomOAIBYOKModelProvider, this._byokStorageService));

Expand Down
121 changes: 121 additions & 0 deletions src/extension/byok/vscode-node/openCodeZenProvider.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,121 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/

import { LanguageModelChatInformation } from 'vscode';
import { IChatModelInformation, ModelSupportedEndpoint } from '../../../platform/endpoint/common/endpointProvider';
import { ILogService } from '../../../platform/log/common/logService';
import { IFetcherService } from '../../../platform/networking/common/fetcherService';
import { IInstantiationService } from '../../../util/vs/platform/instantiation/common/instantiation';
import { BYOKAuthType, BYOKKnownModels } from '../common/byokProvider';
import { OpenAIEndpoint } from '../node/openAIEndpoint';
import { BaseOpenAICompatibleLMProvider } from './baseOpenAICompatibleProvider';
import { IBYOKStorageService } from './byokStorageService';

export class OpenCodeZenLMProvider extends BaseOpenAICompatibleLMProvider {
public static readonly providerName = 'OpenCodeZen';
private readonly _zenInstantiationService: IInstantiationService;

constructor(
byokStorageService: IBYOKStorageService,
@IFetcherService _fetcherService: IFetcherService,
@ILogService _logService: ILogService,
@IInstantiationService instantiationService: IInstantiationService,
) {
super(
BYOKAuthType.GlobalApiKey,
OpenCodeZenLMProvider.providerName,
'https://opencode.ai/zen/v1',
undefined,
byokStorageService,
_fetcherService,
_logService,
instantiationService,
);
this._zenInstantiationService = instantiationService;
}

protected override async getAllModels(): Promise<BYOKKnownModels> {
if (!this._apiKey) {
return {};
}
try {
const response = await this._fetcherService.fetch(`${this._baseUrl}/models`, {
method: 'GET',
headers: {
'Authorization': `Bearer ${this._apiKey}`,
'Content-Type': 'application/json'
}
});

if (!response.ok) {
throw new Error(`Failed to fetch OpenCode Zen models: ${response.statusText}`);
}

const data = await response.json();
const knownModels: BYOKKnownModels = {};

// The spec says models are at /models. We'll map them to BYOKKnownModels.
// If the API returns a 'data' array (standard OpenAI-like), we iterate it.
const models = Array.isArray(data) ? data : (data.data || []);

for (const model of models) {
knownModels[model.id] = {
name: model.name || model.id,
toolCalling: model.capabilities?.supports?.tool_calls ?? true,
vision: model.capabilities?.supports?.vision ?? false,
maxInputTokens: model.capabilities?.limits?.max_prompt_tokens ?? 128000,
maxOutputTokens: model.capabilities?.limits?.max_output_tokens ?? 4096,
// Store the supported endpoints if available to help with routing
supportedEndpoints: model.supported_endpoints
};
}

this._knownModels = knownModels;
return knownModels;
} catch (error) {
this._logService.error(error, `Error fetching available OpenCode Zen models`);
throw error;
}
}



protected override async getModelInfo(modelId: string, apiKey: string | undefined): Promise<IChatModelInformation> {
const info = await super.getModelInfo(modelId, apiKey);

// OpenCode Zen uses specific endpoints based on the model family
// This logic is based on the official documentation
if (modelId.includes('claude')) {
info.supported_endpoints = [ModelSupportedEndpoint.Messages];
} else if (modelId.includes('gpt')) {
info.supported_endpoints = [ModelSupportedEndpoint.Responses];
} else if (modelId.includes('gemini')) {
info.supported_endpoints = [ModelSupportedEndpoint.ChatCompletions]; // Will be handled specially in getEndpointImpl
} else {
// All other models (GLM, Kimi, Qwen, Grok, Big Pickle) use /chat/completions
info.supported_endpoints = [ModelSupportedEndpoint.ChatCompletions];
}

return info;
}

protected override async getEndpointImpl(model: LanguageModelChatInformation): Promise<OpenAIEndpoint> {
const modelInfo = await this.getModelInfo(model.id, this._apiKey);
let urlSuffix = '/chat/completions';

if (model.id.includes('claude')) {
urlSuffix = '/messages';
} else if (model.id.includes('gpt')) {
urlSuffix = '/responses';
} else if (model.id.includes('gemini')) {
// Special case for Gemini as per spec: https://opencode.ai/zen/v1/models/gemini-3-pro
urlSuffix = `/models/${model.id.replace('opencode/', '')}`;
}
// All other models use the default /chat/completions

const url = `${this._baseUrl}${urlSuffix}`;
return this._zenInstantiationService.createInstance(OpenAIEndpoint, modelInfo, this._apiKey ?? '', url);
}
}
142 changes: 142 additions & 0 deletions src/extension/byok/vscode-node/test/openCodeZenProvider.spec.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,142 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/

import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
import * as vscode from 'vscode';
import { BlockedExtensionService, IBlockedExtensionService } from '../../../../platform/chat/common/blockedExtensionService';
import { IFetcherService } from '../../../../platform/networking/common/fetcherService';
import { ITestingServicesAccessor } from '../../../../platform/test/node/services';
import { DisposableStore } from '../../../../util/vs/base/common/lifecycle';
import { SyncDescriptor } from '../../../../util/vs/platform/instantiation/common/descriptors';
import { IInstantiationService } from '../../../../util/vs/platform/instantiation/common/instantiation';
import { createExtensionUnitTestingServices } from '../../../test/node/services';
import { IBYOKStorageService } from '../byokStorageService';
import { OpenCodeZenLMProvider } from '../openCodeZenProvider';

describe('OpenCodeZenLMProvider', () => {
const disposables = new DisposableStore();
let accessor: ITestingServicesAccessor;
let instaService: IInstantiationService;
let provider: OpenCodeZenLMProvider;
let mockByokStorageService: IBYOKStorageService;

const mockModelsResponse = {
data: [
{
id: 'opencode/gpt-5.2',
name: 'GPT 5.2',
capabilities: {
supports: { tool_calls: true, vision: true },
limits: { max_prompt_tokens: 128000, max_output_tokens: 4096 }
},
supported_endpoints: ['/responses']
},
{
id: 'opencode/claude-sonnet-4-5',
name: 'Claude Sonnet 4.5',
capabilities: {
supports: { tool_calls: true, vision: true },
limits: { max_prompt_tokens: 200000, max_output_tokens: 8192 }
},
supported_endpoints: ['/v1/messages']
}
]
};

beforeEach(() => {
const testingServiceCollection = createExtensionUnitTestingServices();
testingServiceCollection.define(IBlockedExtensionService, new SyncDescriptor(BlockedExtensionService));

accessor = disposables.add(testingServiceCollection.createTestingAccessor());
instaService = accessor.get(IInstantiationService);

mockByokStorageService = {
getAPIKey: vi.fn().mockResolvedValue('test-zen-api-key'),
storeAPIKey: vi.fn().mockResolvedValue(undefined),
deleteAPIKey: vi.fn().mockResolvedValue(undefined),
getStoredModelConfigs: vi.fn().mockResolvedValue({}),
saveModelConfig: vi.fn().mockResolvedValue(undefined),
removeModelConfig: vi.fn().mockResolvedValue(undefined)
};

// Mock fetcher service instead of global fetch
const fetcherService = accessor.get(IFetcherService);
vi.spyOn(fetcherService, 'fetch').mockImplementation(async (url: string) => {
if (url.endsWith('/models')) {
return {
ok: true,
status: 200,
json: async () => mockModelsResponse
} as any;
}
return { ok: false, status: 404 } as any;
});

provider = instaService.createInstance(OpenCodeZenLMProvider, mockByokStorageService);
});

afterEach(() => {
disposables.clear();
vi.restoreAllMocks();
});

describe('getAllModels', () => {
it('should fetch and parse models from OpenCode Zen API', async () => {
// Ensure the provider has an API key so it tries to fetch
mockByokStorageService.getAPIKey = vi.fn().mockResolvedValue('test-zen-api-key');
const providerWithKey = instaService.createInstance(OpenCodeZenLMProvider, mockByokStorageService);

const models = await providerWithKey['getAllModels']();
expect(models['opencode/gpt-5.2'].maxInputTokens).toBe(128000);
});

it('should fallback to static models on fetch error if API key exists', async () => {
const fetcherService = accessor.get(IFetcherService);
vi.spyOn(fetcherService, 'fetch').mockImplementationOnce(async () => {
throw new Error('Network error');
});

// Set the API key so the fallback logic triggers
provider['_apiKey'] = 'test-zen-api-key';

const models = await provider['getAllModels']();

expect(models['opencode/gpt-5.2']).toBeDefined();
expect(models['opencode/gpt-5.2'].name).toBe('GPT 5.2');
});
});

describe('endpoint routing', () => {
it('should route to /responses for GPT models', async () => {
const models = await provider['getAllModels']();
expect(models['opencode/gpt-5.2']).toBeDefined();

const modelInfo = await provider['getModelInfo']('opencode/gpt-5.2', 'key');
expect(modelInfo.supported_endpoints).toContain('/responses');
});

it('should route to /messages for Claude models', async () => {
await provider['getAllModels']();

const modelInfo = await provider['getModelInfo']('opencode/claude-sonnet-4-5', 'key');
expect(modelInfo.supported_endpoints).toContain('/v1/messages');
});

it('should route to /models/{id} for Gemini models', async () => {
const mockModel: vscode.LanguageModelChatInformation = {
id: 'opencode/gemini-3-pro',
name: 'Gemini 3 Pro',
version: '1.0.0',
maxInputTokens: 2000000,
maxOutputTokens: 8192,
family: 'OpenCode Zen',
capabilities: { toolCalling: true, imageInput: true }
};

const endpoint = await provider['getEndpointImpl'](mockModel);
expect(endpoint.urlOrRequestMetadata).toBe('https://opencode.ai/zen/v1/models/gemini-3-pro');
});
});
});