Skip to content

Commit

Permalink
feat(llm-observability): OpenAI TypeScript SDK (#343)
Browse files Browse the repository at this point in the history
  • Loading branch information
k11kirky authored Jan 20, 2025
1 parent a36099a commit c60f8a2
Show file tree
Hide file tree
Showing 10 changed files with 3,075 additions and 0 deletions.
2 changes: 2 additions & 0 deletions .github/workflows/release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@ jobs:
npm_token_secret: NPM_TOKEN_BEN_WHITE # TODO: Change to standard token once able
- name: posthog-web
npm_token_secret: NPM_TOKEN # TODO: Change to standard token once able
- name: posthog-ai
npm_token_secret: NPM_TOKEN # TODO: Change to standard token once able

steps:
- name: Checkout the repository
Expand Down
46 changes: 46 additions & 0 deletions posthog-ai/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
# PostHog Node AI

Initial Typescript SDK for LLM Observability

// before

```typescript
import { OpenAI } from 'openai'

const client = new OpenAI({
apiKey: process.env.OPENAI_API_KEY || '',
})

client.chat.completions.create({
model: 'gpt-4',
messages: [{ role: 'user', content: 'Hello, world!' }],
})
```

// after

```typescript
import { OpenAI } from 'posthog-node-ai'

const client = new OpenAI({
apiKey: process.env.OPENAI_API_KEY || '',
posthog: phClient,
})

client.chat.completions.create({
model: 'gpt-4',
messages: [{ role: 'user', content: 'Hello, world!' }],
posthog_distinct_id: 'test-user-id',
posthog_properties: {
test_property: 'test_value',
},
})
```

Please see the main [PostHog docs](https://www.posthog.com/docs).

Specifically, the [Node.js docs](https://posthog.com/docs/libraries/node-ai) details.

## Questions?

### [Check out our community page.](https://posthog.com/posts)
37 changes: 37 additions & 0 deletions posthog-ai/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
{
"name": "posthog-ai",
"version": "1.0.0",
"description": "PostHog Node.js AI integrations",
"repository": {
"type": "git",
"url": "https://github.com/PostHog/posthog-js-lite.git",
"directory": "posthog-node-ai"
},
"main": "./dist/index.js",
"module": "./dist/index.mjs",
"types": "./dist/index.d.ts",
"devDependencies": {
"@types/bun": "latest",
"@types/jest": "^29.5.14",
"@types/node": "^20.0.0",
"jest": "^29.7.0",
"node-fetch": "^3.3.2",
"ts-jest": "^29.2.5",
"typescript": "^5.0.0"
},
"keywords": [
"posthog",
"ai",
"openai",
"anthropic",
"llm"
],
"dependencies": {
"openai": "^4.79.1",
"uuid": "^11.0.5"
},
"scripts": {
"test": "jest",
"prepublishOnly": "cd .. && yarn build"
}
}
3 changes: 3 additions & 0 deletions posthog-ai/src/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
import PostHogOpenAI from './openai'

export { PostHogOpenAI as OpenAI }
170 changes: 170 additions & 0 deletions posthog-ai/src/openai/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,170 @@
import type { ChatCompletionCreateParamsBase } from 'openai/resources/chat/completions'
import type { ChatCompletionCreateParamsStreaming } from 'openai/resources/chat/completions'
import OpenAIOrignal from 'openai'
import type { PostHog } from 'posthog-node'
import { v4 as uuidv4 } from 'uuid'
import { PassThrough } from 'stream'
import { mergeSystemPrompt, type MonitoringParams, sendEventToPosthog } from '../utils'

type ChatCompletion = OpenAIOrignal.ChatCompletion
type ChatCompletionChunk = OpenAIOrignal.ChatCompletionChunk
import type { ChatCompletionCreateParamsNonStreaming } from 'openai/resources/chat/completions'
import type { APIPromise, RequestOptions } from 'openai/core'
import type { Stream } from 'openai/streaming'

interface MonitoringOpenAIConfig {
apiKey: string
posthog: PostHog
baseURL?: string
}

export class PostHogOpenAI extends OpenAIOrignal {
private readonly phClient: PostHog

constructor(config: MonitoringOpenAIConfig) {
const { posthog, ...openAIConfig } = config
super(openAIConfig)
this.phClient = posthog
this.chat = new WrappedChat(this, this.phClient)
}

public chat: WrappedChat
}

export class WrappedChat extends OpenAIOrignal.Chat {
constructor(parentClient: PostHogOpenAI, phClient: PostHog) {
super(parentClient)
this.completions = new WrappedCompletions(parentClient, phClient)
}

public completions: WrappedCompletions
}

export class WrappedCompletions extends OpenAIOrignal.Chat.Completions {
private readonly phClient: PostHog

constructor(client: OpenAIOrignal, phClient: PostHog) {
super(client)
this.phClient = phClient
}

// --- Overload #1: Non-streaming
public create(
body: ChatCompletionCreateParamsNonStreaming & MonitoringParams,
options?: RequestOptions
): APIPromise<ChatCompletion>

// --- Overload #2: Streaming
public create(
body: ChatCompletionCreateParamsStreaming & MonitoringParams,
options?: RequestOptions
): APIPromise<Stream<ChatCompletionChunk>>

// --- Overload #3: Generic base
public create(
body: ChatCompletionCreateParamsBase & MonitoringParams,
options?: RequestOptions
): APIPromise<ChatCompletion | Stream<ChatCompletionChunk>>

// --- Implementation Signature
public create(
body: ChatCompletionCreateParamsBase & MonitoringParams,
options?: RequestOptions
): APIPromise<ChatCompletion | Stream<ChatCompletionChunk>> {
const {
posthog_distinct_id,
posthog_trace_id,
posthog_properties,
posthog_privacy_mode = false,
posthog_groups,
...openAIParams
} = body

const traceId = posthog_trace_id ?? uuidv4()
const startTime = Date.now()

const parentPromise = super.create(openAIParams, options)

if (openAIParams.stream) {
return parentPromise.then((value) => {
const passThroughStream = new PassThrough({ objectMode: true })
let accumulatedContent = ''
let usage: { input_tokens: number; output_tokens: number } = {
input_tokens: 0,
output_tokens: 0,
}
if ('tee' in value) {
const openAIStream = value
;(async () => {
try {
for await (const chunk of openAIStream) {
const delta = chunk?.choices?.[0]?.delta?.content ?? ''
accumulatedContent += delta
if (chunk.usage) {
usage = {
input_tokens: chunk.usage.prompt_tokens ?? 0,
output_tokens: chunk.usage.completion_tokens ?? 0,
}
}
passThroughStream.write(chunk)
}
const latency = (Date.now() - startTime) / 1000
sendEventToPosthog({
client: this.phClient,
distinctId: posthog_distinct_id ?? traceId,
traceId,
model: openAIParams.model,
provider: 'openai',
input: posthog_privacy_mode ? '' : mergeSystemPrompt(openAIParams, 'openai'),
output: [{ content: accumulatedContent, role: 'assistant' }],
latency,
baseURL: (this as any).baseURL ?? '',
params: body,
httpStatus: 200,
usage,
})
passThroughStream.end()
} catch (error) {
// error handling
passThroughStream.emit('error', error)
}
})()
}
return passThroughStream as unknown as Stream<ChatCompletionChunk>
}) as APIPromise<Stream<ChatCompletionChunk>>
} else {
const wrappedPromise = parentPromise.then(
(result) => {
if ('choices' in result) {
const latency = (Date.now() - startTime) / 1000
sendEventToPosthog({
client: this.phClient,
distinctId: posthog_distinct_id ?? traceId,
traceId,
model: openAIParams.model,
provider: 'openai',
input: posthog_privacy_mode ? '' : mergeSystemPrompt(openAIParams, 'openai'),
output: [{ content: result.choices[0].message.content, role: 'assistant' }],
latency,
baseURL: (this as any).baseURL ?? '',
params: body,
httpStatus: 200,
usage: {
input_tokens: result.usage?.prompt_tokens ?? 0,
output_tokens: result.usage?.completion_tokens ?? 0,
},
})
}
return result
},
(error) => {
throw error
}
) as APIPromise<ChatCompletion>

return wrappedPromise
}
}
}

export default PostHogOpenAI
Loading

0 comments on commit c60f8a2

Please sign in to comment.