Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 4 additions & 3 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -130,6 +130,7 @@
"react-dom": "^19.1.0",
"react-tooltip": "^5.28.1",
"tas-client-umd": "0.2.0",
"undici": "^7.11.0",
"v8-inspect-profiler": "^0.1.1",
"vscode-oniguruma": "1.7.0",
"vscode-regexpp": "^3.1.0",
Expand Down
2 changes: 1 addition & 1 deletion src/vs/code/electron-main/app.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1243,7 +1243,7 @@ export class CodeApplication extends Disposable {
const voidUpdatesChannel = ProxyChannel.fromService(accessor.get(IVoidUpdateService), disposables);
mainProcessElectronServer.registerChannel('void-channel-update', voidUpdatesChannel);

const sendLLMMessageChannel = new LLMMessageChannel(accessor.get(IMetricsService));
const sendLLMMessageChannel = new LLMMessageChannel(accessor.get(IMetricsService), this.configurationService);
mainProcessElectronServer.registerChannel('void-channel-llmMessage', sendLLMMessageChannel);

// Void added this
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ type Tab =
| 'featureOptions'
| 'mcp'
| 'general'
| 'network'
| 'all';


Expand Down Expand Up @@ -1042,6 +1043,7 @@ export const Settings = () => {
{ tab: 'featureOptions', label: 'Feature Options' },
{ tab: 'general', label: 'General' },
{ tab: 'mcp', label: 'MCP' },
{ tab: 'network', label: 'Network' },
{ tab: 'all', label: 'All Settings' },
];
const shouldShowTab = (tab: Tab) => selectedSection === 'all' || selectedSection === tab;
Expand Down Expand Up @@ -1550,8 +1552,41 @@ Use Model Context Protocol to provide Agent mode with more tools.
</div>


{/* Network section */}
<div className={shouldShowTab('network') ? `` : 'hidden'}>
<ErrorBoundary>
<h2 className={`text-3xl mb-2`}>Network</h2>

<div className='flex flex-col gap-y-8 my-4'>
<ErrorBoundary>
{/* LLM Proxy */}
<div>
<h4 className={`text-base`}>Use Proxy</h4>
<div className='text-sm text-void-fg-3 mt-1'>
<span>
Use proxy setting in LLM.{' '}
</span>
</div>

<div className='my-2'>
{/* Enable Switch */}
<ErrorBoundary>
<div className='flex items-center gap-x-2 my-2'>
<VoidSwitch
size='xs'
value={settingsState.globalSettings.enableNetworkProxy}
onChange={(newVal) => voidSettingsService.setGlobalSetting('enableNetworkProxy', newVal)}
/>
<span className='text-void-fg-3 text-xs pointer-events-none'>{settingsState.globalSettings.enableNetworkProxy ? 'Enabled' : 'Disabled'}</span>
</div>
</ErrorBoundary>
</div>

</div>
</ErrorBoundary>
</div>
</ErrorBoundary>
</div>

</div>

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ export class LLMMessageService extends Disposable implements ILLMMessageService
return null
}

const { settingsOfProvider, } = this.voidSettingsService.state
const { settingsOfProvider, globalSettings } = this.voidSettingsService.state

const mcpTools = this.mcpService.getMCPTools()

Expand All @@ -134,6 +134,7 @@ export class LLMMessageService extends Disposable implements ILLMMessageService
settingsOfProvider,
modelSelection,
mcpTools,
proxyRequest: globalSettings.enableNetworkProxy,
} satisfies MainSendLLMMessageParams);

return requestId
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -135,6 +135,7 @@ export type SendLLMMessageParams = {

settingsOfProvider: SettingsOfProvider;
mcpTools: InternalToolInfo[] | undefined;
proxyRequest: boolean;
} & SendLLMType


Expand Down
2 changes: 2 additions & 0 deletions src/vs/workbench/contrib/void/common/voidSettingsTypes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -452,6 +452,7 @@ export type GlobalSettings = {
isOnboardingComplete: boolean;
disableSystemMessage: boolean;
autoAcceptLLMChanges: boolean;
enableNetworkProxy: boolean;
}

export const defaultGlobalSettings: GlobalSettings = {
Expand All @@ -468,6 +469,7 @@ export const defaultGlobalSettings: GlobalSettings = {
isOnboardingComplete: false,
disableSystemMessage: false,
autoAcceptLLMChanges: false,
enableNetworkProxy: true,
}

export type GlobalSettingName = keyof GlobalSettings
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
import { setGlobalDispatcher, ProxyAgent, Agent } from 'undici';
import { HttpsProxyAgent } from 'https-proxy-agent';
import OpenAI, { ClientOptions } from 'openai'
import { IConfigurationService } from '../../../../../platform/configuration/common/configuration.js';


function getConfigValue<T>(configurationService: IConfigurationService, key: string): T | undefined {
const values = configurationService.inspect<T>(key);
return values.userLocalValue || values.defaultValue;
}


export const llmRequestProxy = {
config: {
proxyUrl: '' as string | undefined,
strictSSL: false,
authorization: '' as string | undefined,
},
proxyEnabled: false,
newOpenAI: function (options: ClientOptions) {
const params = {
...options,
}

if (this.proxyEnabled && this.config.proxyUrl) {
params.httpAgent = new HttpsProxyAgent(this.config.proxyUrl)
}

return new OpenAI(params)
},

configure(configurationService: IConfigurationService) {
const proxyUrl = getConfigValue<string>(configurationService, 'http.proxy');
const strictSSL = !!getConfigValue<boolean>(configurationService, 'http.proxyStrictSSL');
const authorization = getConfigValue<string>(configurationService, 'http.proxyAuthorization');

this.config.proxyUrl = proxyUrl
this.config.strictSSL = strictSSL
this.config.authorization = authorization
},


initialize(configurationService: IConfigurationService) {
// initialize proxy config
this.configure(configurationService)
},

enableProxy() {
if (this.config.proxyUrl) {
if (!this.proxyEnabled) {
this.proxyEnabled = true;
this.setCommonProxy(this.config.proxyUrl)
}
}
},
disableProxy() {
if (this.proxyEnabled) {
this.proxyEnabled = false;
this.clearCommonProxy()
}
},

setCommonProxy(proxyUrl: string) {
const dispatcher = new ProxyAgent({ uri: proxyUrl });
setGlobalDispatcher(dispatcher);
},
clearCommonProxy() {
setGlobalDispatcher(new Agent());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,8 @@ import { getSendableReasoningInfo, getModelCapabilities, getProviderCapabilities
import { extractReasoningWrapper, extractXMLToolsWrapper } from './extractGrammar.js';
import { availableTools, InternalToolInfo } from '../../common/prompt/prompts.js';
import { generateUuid } from '../../../../../base/common/uuid.js';
import { llmRequestProxy } from './llmRequestProxy.js';


const getGoogleApiKey = async () => {
// module‑level singleton
Expand Down Expand Up @@ -76,7 +78,7 @@ const newOpenAICompatibleSDK = async ({ settingsOfProvider, providerName, includ
}
if (providerName === 'openAI') {
const thisConfig = settingsOfProvider[providerName]
return new OpenAI({ apiKey: thisConfig.apiKey, ...commonPayloadOpts })
return llmRequestProxy.newOpenAI({ apiKey: thisConfig.apiKey, ...commonPayloadOpts })
}
else if (providerName === 'ollama') {
const thisConfig = settingsOfProvider[providerName]
Expand All @@ -88,15 +90,15 @@ const newOpenAICompatibleSDK = async ({ settingsOfProvider, providerName, includ
}
else if (providerName === 'liteLLM') {
const thisConfig = settingsOfProvider[providerName]
return new OpenAI({ baseURL: `${thisConfig.endpoint}/v1`, apiKey: 'noop', ...commonPayloadOpts })
return llmRequestProxy.newOpenAI({ baseURL: `${thisConfig.endpoint}/v1`, apiKey: 'noop', ...commonPayloadOpts })
}
else if (providerName === 'lmStudio') {
const thisConfig = settingsOfProvider[providerName]
return new OpenAI({ baseURL: `${thisConfig.endpoint}/v1`, apiKey: 'noop', ...commonPayloadOpts })
}
else if (providerName === 'openRouter') {
const thisConfig = settingsOfProvider[providerName]
return new OpenAI({
return llmRequestProxy.newOpenAI({
baseURL: 'https://openrouter.ai/api/v1',
apiKey: thisConfig.apiKey,
defaultHeaders: {
Expand All @@ -111,7 +113,7 @@ const newOpenAICompatibleSDK = async ({ settingsOfProvider, providerName, includ
const thisConfig = settingsOfProvider[providerName]
const baseURL = `https://${thisConfig.region}-aiplatform.googleapis.com/v1/projects/${thisConfig.project}/locations/${thisConfig.region}/endpoints/${'openapi'}`
const apiKey = await getGoogleApiKey()
return new OpenAI({ baseURL: baseURL, apiKey: apiKey, ...commonPayloadOpts })
return llmRequestProxy.newOpenAI({ baseURL: baseURL, apiKey: apiKey, ...commonPayloadOpts })
}
else if (providerName === 'microsoftAzure') {
// https://learn.microsoft.com/en-us/rest/api/aifoundry/model-inference/get-chat-completions/get-chat-completions?view=rest-aifoundry-model-inference-2024-05-01-preview&tabs=HTTP
Expand Down Expand Up @@ -142,30 +144,30 @@ const newOpenAICompatibleSDK = async ({ settingsOfProvider, providerName, includ
if (!baseURL.endsWith('/v1'))
baseURL = baseURL.replace(/\/+$/, '') + '/v1'

return new OpenAI({ baseURL, apiKey, ...commonPayloadOpts })
return llmRequestProxy.newOpenAI({ baseURL, apiKey, ...commonPayloadOpts })
}


else if (providerName === 'deepseek') {
const thisConfig = settingsOfProvider[providerName]
return new OpenAI({ baseURL: 'https://api.deepseek.com/v1', apiKey: thisConfig.apiKey, ...commonPayloadOpts })
return llmRequestProxy.newOpenAI({ baseURL: 'https://api.deepseek.com/v1', apiKey: thisConfig.apiKey, ...commonPayloadOpts })
}
else if (providerName === 'openAICompatible') {
const thisConfig = settingsOfProvider[providerName]
const headers = parseHeadersJSON(thisConfig.headersJSON)
return new OpenAI({ baseURL: thisConfig.endpoint, apiKey: thisConfig.apiKey, defaultHeaders: headers, ...commonPayloadOpts })
return llmRequestProxy.newOpenAI({ baseURL: thisConfig.endpoint, apiKey: thisConfig.apiKey, defaultHeaders: headers, ...commonPayloadOpts })
}
else if (providerName === 'groq') {
const thisConfig = settingsOfProvider[providerName]
return new OpenAI({ baseURL: 'https://api.groq.com/openai/v1', apiKey: thisConfig.apiKey, ...commonPayloadOpts })
return llmRequestProxy.newOpenAI({ baseURL: 'https://api.groq.com/openai/v1', apiKey: thisConfig.apiKey, ...commonPayloadOpts })
}
else if (providerName === 'xAI') {
const thisConfig = settingsOfProvider[providerName]
return new OpenAI({ baseURL: 'https://api.x.ai/v1', apiKey: thisConfig.apiKey, ...commonPayloadOpts })
return llmRequestProxy.newOpenAI({ baseURL: 'https://api.x.ai/v1', apiKey: thisConfig.apiKey, ...commonPayloadOpts })
}
else if (providerName === 'mistral') {
const thisConfig = settingsOfProvider[providerName]
return new OpenAI({ baseURL: 'https://api.mistral.ai/v1', apiKey: thisConfig.apiKey, ...commonPayloadOpts })
return llmRequestProxy.newOpenAI({ baseURL: 'https://api.mistral.ai/v1', apiKey: thisConfig.apiKey, ...commonPayloadOpts })
}

else throw new Error(`Void providerName was invalid: ${providerName}.`)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ import { SendLLMMessageParams, OnText, OnFinalMessage, OnError } from '../../com
import { IMetricsService } from '../../common/metricsService.js';
import { displayInfoOfProviderName } from '../../common/voidSettingsTypes.js';
import { sendLLMMessageToProviderImplementation } from './sendLLMMessage.impl.js';

import { llmRequestProxy } from './llmRequestProxy.js';

export const sendLLMMessage = async ({
messagesType,
Expand All @@ -24,11 +24,17 @@ export const sendLLMMessage = async ({
chatMode,
separateSystemMessage,
mcpTools,
proxyRequest,
}: SendLLMMessageParams,

metricsService: IMetricsService
) => {

if (proxyRequest) {
llmRequestProxy.enableProxy()
} else {
llmRequestProxy.disableProxy()
}

const { providerName, modelName } = modelSelection

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,10 @@ import { EventLLMMessageOnTextParams, EventLLMMessageOnErrorParams, EventLLMMess
import { sendLLMMessage } from './llmMessage/sendLLMMessage.js'
import { IMetricsService } from '../common/metricsService.js';
import { sendLLMMessageToProviderImplementation } from './llmMessage/sendLLMMessage.impl.js';
import { IConfigurationService } from '../../../../platform/configuration/common/configuration.js';
// import { IVoidSettingsService } from '../common/voidSettingsService.js';
import { llmRequestProxy } from './llmMessage/llmRequestProxy.js';


// NODE IMPLEMENTATION - calls actual sendLLMMessage() and returns listeners to it

Expand Down Expand Up @@ -48,7 +52,10 @@ export class LLMMessageChannel implements IServerChannel {
// stupidly, channels can't take in @IService
constructor(
private readonly metricsService: IMetricsService,
) { }
private readonly configurationService: IConfigurationService,
) {
llmRequestProxy.initialize(this.configurationService)
}

// browser uses this to listen for changes
listen(_: unknown, event: string): Event<any> {
Expand Down