-
Notifications
You must be signed in to change notification settings - Fork 43
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
feat(llm-observability): OpenAI TypeScript SDK (#343)
- Loading branch information
Showing
10 changed files
with
3,075 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,46 @@ | ||
# PostHog Node AI | ||
|
||
Initial Typescript SDK for LLM Observability | ||
|
||
// before | ||
|
||
```typescript | ||
import { OpenAI } from 'openai' | ||
|
||
const client = new OpenAI({ | ||
apiKey: process.env.OPENAI_API_KEY || '', | ||
}) | ||
|
||
client.chat.completions.create({ | ||
model: 'gpt-4', | ||
messages: [{ role: 'user', content: 'Hello, world!' }], | ||
}) | ||
``` | ||
|
||
// after | ||
|
||
```typescript | ||
import { OpenAI } from 'posthog-node-ai' | ||
|
||
const client = new OpenAI({ | ||
apiKey: process.env.OPENAI_API_KEY || '', | ||
posthog: phClient, | ||
}) | ||
|
||
client.chat.completions.create({ | ||
model: 'gpt-4', | ||
messages: [{ role: 'user', content: 'Hello, world!' }], | ||
posthog_distinct_id: 'test-user-id', | ||
posthog_properties: { | ||
test_property: 'test_value', | ||
}, | ||
}) | ||
``` | ||
|
||
Please see the main [PostHog docs](https://www.posthog.com/docs). | ||
|
||
Specifically, the [Node.js docs](https://posthog.com/docs/libraries/node-ai) details. | ||
|
||
## Questions? | ||
|
||
### [Check out our community page.](https://posthog.com/posts) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,37 @@ | ||
{ | ||
"name": "posthog-ai", | ||
"version": "1.0.0", | ||
"description": "PostHog Node.js AI integrations", | ||
"repository": { | ||
"type": "git", | ||
"url": "https://github.com/PostHog/posthog-js-lite.git", | ||
"directory": "posthog-node-ai" | ||
}, | ||
"main": "./dist/index.js", | ||
"module": "./dist/index.mjs", | ||
"types": "./dist/index.d.ts", | ||
"devDependencies": { | ||
"@types/bun": "latest", | ||
"@types/jest": "^29.5.14", | ||
"@types/node": "^20.0.0", | ||
"jest": "^29.7.0", | ||
"node-fetch": "^3.3.2", | ||
"ts-jest": "^29.2.5", | ||
"typescript": "^5.0.0" | ||
}, | ||
"keywords": [ | ||
"posthog", | ||
"ai", | ||
"openai", | ||
"anthropic", | ||
"llm" | ||
], | ||
"dependencies": { | ||
"openai": "^4.79.1", | ||
"uuid": "^11.0.5" | ||
}, | ||
"scripts": { | ||
"test": "jest", | ||
"prepublishOnly": "cd .. && yarn build" | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
import PostHogOpenAI from './openai' | ||
|
||
export { PostHogOpenAI as OpenAI } |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,170 @@ | ||
import type { ChatCompletionCreateParamsBase } from 'openai/resources/chat/completions' | ||
import type { ChatCompletionCreateParamsStreaming } from 'openai/resources/chat/completions' | ||
import OpenAIOrignal from 'openai' | ||
import type { PostHog } from 'posthog-node' | ||
import { v4 as uuidv4 } from 'uuid' | ||
import { PassThrough } from 'stream' | ||
import { mergeSystemPrompt, type MonitoringParams, sendEventToPosthog } from '../utils' | ||
|
||
type ChatCompletion = OpenAIOrignal.ChatCompletion | ||
type ChatCompletionChunk = OpenAIOrignal.ChatCompletionChunk | ||
import type { ChatCompletionCreateParamsNonStreaming } from 'openai/resources/chat/completions' | ||
import type { APIPromise, RequestOptions } from 'openai/core' | ||
import type { Stream } from 'openai/streaming' | ||
|
||
interface MonitoringOpenAIConfig { | ||
apiKey: string | ||
posthog: PostHog | ||
baseURL?: string | ||
} | ||
|
||
export class PostHogOpenAI extends OpenAIOrignal { | ||
private readonly phClient: PostHog | ||
|
||
constructor(config: MonitoringOpenAIConfig) { | ||
const { posthog, ...openAIConfig } = config | ||
super(openAIConfig) | ||
this.phClient = posthog | ||
this.chat = new WrappedChat(this, this.phClient) | ||
} | ||
|
||
public chat: WrappedChat | ||
} | ||
|
||
export class WrappedChat extends OpenAIOrignal.Chat { | ||
constructor(parentClient: PostHogOpenAI, phClient: PostHog) { | ||
super(parentClient) | ||
this.completions = new WrappedCompletions(parentClient, phClient) | ||
} | ||
|
||
public completions: WrappedCompletions | ||
} | ||
|
||
export class WrappedCompletions extends OpenAIOrignal.Chat.Completions { | ||
private readonly phClient: PostHog | ||
|
||
constructor(client: OpenAIOrignal, phClient: PostHog) { | ||
super(client) | ||
this.phClient = phClient | ||
} | ||
|
||
// --- Overload #1: Non-streaming | ||
public create( | ||
body: ChatCompletionCreateParamsNonStreaming & MonitoringParams, | ||
options?: RequestOptions | ||
): APIPromise<ChatCompletion> | ||
|
||
// --- Overload #2: Streaming | ||
public create( | ||
body: ChatCompletionCreateParamsStreaming & MonitoringParams, | ||
options?: RequestOptions | ||
): APIPromise<Stream<ChatCompletionChunk>> | ||
|
||
// --- Overload #3: Generic base | ||
public create( | ||
body: ChatCompletionCreateParamsBase & MonitoringParams, | ||
options?: RequestOptions | ||
): APIPromise<ChatCompletion | Stream<ChatCompletionChunk>> | ||
|
||
// --- Implementation Signature | ||
public create( | ||
body: ChatCompletionCreateParamsBase & MonitoringParams, | ||
options?: RequestOptions | ||
): APIPromise<ChatCompletion | Stream<ChatCompletionChunk>> { | ||
const { | ||
posthog_distinct_id, | ||
posthog_trace_id, | ||
posthog_properties, | ||
posthog_privacy_mode = false, | ||
posthog_groups, | ||
...openAIParams | ||
} = body | ||
|
||
const traceId = posthog_trace_id ?? uuidv4() | ||
const startTime = Date.now() | ||
|
||
const parentPromise = super.create(openAIParams, options) | ||
|
||
if (openAIParams.stream) { | ||
return parentPromise.then((value) => { | ||
const passThroughStream = new PassThrough({ objectMode: true }) | ||
let accumulatedContent = '' | ||
let usage: { input_tokens: number; output_tokens: number } = { | ||
input_tokens: 0, | ||
output_tokens: 0, | ||
} | ||
if ('tee' in value) { | ||
const openAIStream = value | ||
;(async () => { | ||
try { | ||
for await (const chunk of openAIStream) { | ||
const delta = chunk?.choices?.[0]?.delta?.content ?? '' | ||
accumulatedContent += delta | ||
if (chunk.usage) { | ||
usage = { | ||
input_tokens: chunk.usage.prompt_tokens ?? 0, | ||
output_tokens: chunk.usage.completion_tokens ?? 0, | ||
} | ||
} | ||
passThroughStream.write(chunk) | ||
} | ||
const latency = (Date.now() - startTime) / 1000 | ||
sendEventToPosthog({ | ||
client: this.phClient, | ||
distinctId: posthog_distinct_id ?? traceId, | ||
traceId, | ||
model: openAIParams.model, | ||
provider: 'openai', | ||
input: posthog_privacy_mode ? '' : mergeSystemPrompt(openAIParams, 'openai'), | ||
output: [{ content: accumulatedContent, role: 'assistant' }], | ||
latency, | ||
baseURL: (this as any).baseURL ?? '', | ||
params: body, | ||
httpStatus: 200, | ||
usage, | ||
}) | ||
passThroughStream.end() | ||
} catch (error) { | ||
// error handling | ||
passThroughStream.emit('error', error) | ||
} | ||
})() | ||
} | ||
return passThroughStream as unknown as Stream<ChatCompletionChunk> | ||
}) as APIPromise<Stream<ChatCompletionChunk>> | ||
} else { | ||
const wrappedPromise = parentPromise.then( | ||
(result) => { | ||
if ('choices' in result) { | ||
const latency = (Date.now() - startTime) / 1000 | ||
sendEventToPosthog({ | ||
client: this.phClient, | ||
distinctId: posthog_distinct_id ?? traceId, | ||
traceId, | ||
model: openAIParams.model, | ||
provider: 'openai', | ||
input: posthog_privacy_mode ? '' : mergeSystemPrompt(openAIParams, 'openai'), | ||
output: [{ content: result.choices[0].message.content, role: 'assistant' }], | ||
latency, | ||
baseURL: (this as any).baseURL ?? '', | ||
params: body, | ||
httpStatus: 200, | ||
usage: { | ||
input_tokens: result.usage?.prompt_tokens ?? 0, | ||
output_tokens: result.usage?.completion_tokens ?? 0, | ||
}, | ||
}) | ||
} | ||
return result | ||
}, | ||
(error) => { | ||
throw error | ||
} | ||
) as APIPromise<ChatCompletion> | ||
|
||
return wrappedPromise | ||
} | ||
} | ||
} | ||
|
||
export default PostHogOpenAI |
Oops, something went wrong.