Skip to content

Commit 283adf1

Browse files
committed
fix(ci): green PR 4937 checks
1 parent f0e8ec5 commit 283adf1

File tree

34 files changed

+1609
-1166
lines changed

34 files changed

+1609
-1166
lines changed

apps/www/next-env.d.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
/// <reference types="next" />
22
/// <reference types="next/image-types/global" />
33
/// <reference types="next/navigation-types/compat/navigation" />
4-
import "./.next/dev/types/routes.d.ts";
4+
import "./.next/types/routes.d.ts";
55

66
// NOTE: This file should not be edited
77
// see https://nextjs.org/docs/app/api-reference/config/typescript for more information.

apps/www/public/r/ai-kit.json

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,11 @@
1414
"https://platejs.org/r/ai-api.json"
1515
],
1616
"files": [
17+
{
18+
"path": "src/registry/lib/ai-stream-batching.ts",
19+
"content": "const DEFAULT_BATCH_WINDOW_IN_MS = 16;\nconst SLOW_BATCH_WINDOW_IN_MS = 32;\nconst SLOW_FLUSH_THRESHOLD_IN_MS = 24;\n\ntype AIStreamBatchTimer = ReturnType<typeof setTimeout>;\ntype AIStreamBatchFlushOptions = {\n force?: boolean;\n};\n\nexport type AIStreamBatcher = ReturnType<typeof createAIStreamBatcher>;\n\nexport function shouldFlushAIStreamChunkImmediately(chunk: string) {\n return chunk.includes('\\n') || chunk.includes('```') || chunk.includes('|');\n}\n\nexport function createAIStreamBatcher({\n applyChunk,\n cancel = clearTimeout,\n getNow = () => performance.now(),\n schedule = setTimeout,\n}: {\n applyChunk: (chunk: string, options?: AIStreamBatchFlushOptions) => void;\n cancel?: (timer: AIStreamBatchTimer) => void;\n getNow?: () => number;\n schedule?: (callback: () => void, delayInMs: number) => AIStreamBatchTimer;\n}) {\n let batchWindowInMs = DEFAULT_BATCH_WINDOW_IN_MS;\n let pendingChunk = '';\n let timer: AIStreamBatchTimer | null = null;\n\n const clearTimer = () => {\n if (timer) {\n cancel(timer);\n timer = null;\n }\n };\n\n const flush = (options?: AIStreamBatchFlushOptions) => {\n if (!pendingChunk) return false;\n\n const chunk = pendingChunk;\n\n pendingChunk = '';\n clearTimer();\n\n const startedAt = getNow();\n\n applyChunk(chunk, options);\n\n batchWindowInMs =\n getNow() - startedAt > SLOW_FLUSH_THRESHOLD_IN_MS\n ? SLOW_BATCH_WINDOW_IN_MS\n : DEFAULT_BATCH_WINDOW_IN_MS;\n\n return true;\n };\n\n const queue = ({ chunk, isFirst }: { chunk: string; isFirst: boolean }) => {\n if (!chunk) return;\n\n if (isFirst) {\n reset();\n pendingChunk = chunk;\n flush();\n\n return;\n }\n\n pendingChunk += chunk;\n\n if (shouldFlushAIStreamChunkImmediately(chunk)) {\n flush();\n\n return;\n }\n\n if (!timer) {\n timer = schedule(() => {\n timer = null;\n flush();\n }, batchWindowInMs);\n }\n };\n\n const reset = () => {\n pendingChunk = '';\n batchWindowInMs = DEFAULT_BATCH_WINDOW_IN_MS;\n clearTimer();\n };\n\n return {\n flush,\n getBatchWindowInMs: () => batchWindowInMs,\n getPendingChunk: () => pendingChunk,\n queue,\n reset,\n };\n}\n",
20+
"type": "registry:lib"
21+
},
1722
{
1823
"path": "src/registry/components/editor/plugins/ai-kit.tsx",
1924
"content": "'use client';\n\nimport { useEffect, useRef } from 'react';\n\nimport cloneDeep from 'lodash/cloneDeep.js';\nimport { BaseAIPlugin, withAIBatch } from '@platejs/ai';\nimport {\n AIChatPlugin,\n AIPlugin,\n applyAISuggestions,\n getInsertPreviewStart,\n resetStreamInsertChunk,\n streamInsertChunk,\n useChatChunk,\n} from '@platejs/ai/react';\nimport { ElementApi, getPluginType, KEYS, PathApi } from 'platejs';\nimport { usePluginOption } from 'platejs/react';\n\nimport { AILoadingBar, AIMenu } from '@/registry/ui/ai-menu';\nimport { AIAnchorElement, AILeaf } from '@/registry/ui/ai-node';\nimport { createAIStreamBatcher } from '@/registry/lib/ai-stream-batching';\n\nimport { useChat } from '../use-chat';\nimport { CursorOverlayKit } from './cursor-overlay-kit';\nimport { MarkdownKit } from './markdown-kit';\n\nexport const aiChatPlugin = AIChatPlugin.extend({\n options: {\n chatOptions: {\n api: '/api/ai/command',\n body: {},\n },\n },\n render: {\n afterContainer: AILoadingBar,\n afterEditable: AIMenu,\n node: AIAnchorElement,\n },\n shortcuts: { show: { keys: 'mod+j' } },\n useHooks: ({ editor, getOption }) => {\n useChat();\n\n const mode = usePluginOption(AIChatPlugin, 'mode');\n const toolName = usePluginOption(AIChatPlugin, 'toolName');\n const insertStreamBatcherRef = useRef<ReturnType<\n typeof createAIStreamBatcher\n > | null>(null);\n\n if (!insertStreamBatcherRef.current) {\n insertStreamBatcherRef.current = createAIStreamBatcher({\n applyChunk: (chunk, options) => {\n editor.tf.withoutSaving(() => {\n if (!getOption('streaming') && !options?.force) return;\n\n editor.tf.withScrolling(() => {\n streamInsertChunk(editor, chunk, {\n textProps: {\n [getPluginType(editor, KEYS.ai)]: true,\n },\n });\n });\n });\n },\n });\n }\n\n useEffect(\n () => () => {\n insertStreamBatcherRef.current?.reset();\n },\n []\n );\n\n useChatChunk({\n onChunk: ({ chunk, isFirst, nodes, text: content }) => {\n if (isFirst && mode === 'insert') {\n const { startBlock, startInEmptyParagraph } =\n getInsertPreviewStart(editor);\n\n editor.getTransforms(BaseAIPlugin).ai.beginPreview({\n originalBlocks:\n startInEmptyParagraph &&\n startBlock &&\n ElementApi.isElement(startBlock)\n ? [cloneDeep(startBlock)]\n : [],\n });\n\n editor.tf.withoutSaving(() => {\n editor.tf.insertNodes(\n {\n children: [{ text: '' }],\n type: getPluginType(editor, KEYS.aiChat),\n },\n {\n at: PathApi.next(editor.selection!.focus.path.slice(0, 1)),\n }\n );\n });\n editor.setOption(AIChatPlugin, 'streaming', true);\n }\n\n if (mode === 'insert' && nodes.length > 0) {\n insertStreamBatcherRef.current?.queue({ chunk, isFirst });\n }\n\n if (toolName === 'edit' && mode === 'chat') {\n withAIBatch(\n editor,\n () => {\n applyAISuggestions(editor, content);\n },\n {\n split: isFirst,\n }\n );\n }\n },\n onFinish: () => {\n insertStreamBatcherRef.current?.flush({ force: true });\n editor.setOption(AIChatPlugin, 'streaming', false);\n resetStreamInsertChunk(editor);\n insertStreamBatcherRef.current?.reset();\n },\n });\n },\n});\n\nexport const AIKit = [\n ...CursorOverlayKit,\n ...MarkdownKit,\n AIPlugin.withComponent(AILeaf),\n aiChatPlugin,\n];\n",

apps/www/public/r/markdown-streaming-demo.json

Lines changed: 1 addition & 1 deletion
Large diffs are not rendered by default.

apps/www/public/r/registry.json

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3144,6 +3144,10 @@
31443144
"https://platejs.org/r/ai-api.json"
31453145
],
31463146
"files": [
3147+
{
3148+
"path": "src/registry/lib/ai-stream-batching.ts",
3149+
"type": "registry:lib"
3150+
},
31473151
{
31483152
"path": "src/registry/components/editor/plugins/ai-kit.tsx",
31493153
"type": "registry:component"

apps/www/public/r/use-chat.json

Lines changed: 1 addition & 1 deletion
Large diffs are not rendered by default.

apps/www/public/tailwind.css

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

apps/www/src/__registry__/index.tsx

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1971,12 +1971,16 @@ export const Index: Record<string, any> = {
19711971
type: "registry:component",
19721972
registryDependencies: ["https://platejs.org/r/markdown-kit.json","https://platejs.org/r/cursor-overlay-kit.json","https://platejs.org/r/ai-menu.json","https://platejs.org/r/ai-node.json","https://platejs.org/r/ai-toolbar-button.json","https://platejs.org/r/ai-api.json"],
19731973
files: [{
1974+
path: "src/registry/lib/ai-stream-batching.ts",
1975+
type: "registry:lib",
1976+
target: ""
1977+
},{
19741978
path: "src/registry/components/editor/plugins/ai-kit.tsx",
19751979
type: "registry:component",
19761980
target: ""
19771981
}],
19781982
component: React.lazy(async () => {
1979-
const mod = await import("@/registry/components/editor/plugins/ai-kit.tsx")
1983+
const mod = await import("@/registry/lib/ai-stream-batching.ts")
19801984
const exportName = Object.keys(mod).find(key => typeof mod[key] === 'function' || typeof mod[key] === 'object') || item.name
19811985
return { default: mod.default || mod[exportName] }
19821986
}),

apps/www/src/registry/components/editor/use-chat.spec.tsx renamed to apps/www/src/registry/components/editor/use-chat.slow.tsx

File renamed without changes.

apps/www/src/registry/components/editor/use-chat.ts

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -73,18 +73,14 @@ const createProxyTransportFetch = (chatId: string): typeof fetch =>
7373

7474
export const useChat = () => {
7575
const editor = useEditorRef();
76-
const chatInstanceIdRef = React.useRef<string | undefined>(undefined);
76+
const [chatInstanceId] = React.useState(() => `editor:${nanoid()}`);
7777
const options: PluginChatOptions =
7878
usePluginOption(aiChatPlugin, 'chatOptions') ?? {};
7979
const {
8080
api = '/api/ai/command',
8181
transport: providedTransport,
8282
...chatOptions
8383
} = options;
84-
if (!chatInstanceIdRef.current) {
85-
chatInstanceIdRef.current = `editor:${nanoid()}`;
86-
}
87-
const chatInstanceId = chatInstanceIdRef.current;
8884

8985
// remove when you implement the route /api/ai/command
9086
const abortControllerRef = React.useRef<AbortController | null>(null);

apps/www/src/registry/examples/markdown-streaming-demo.tsx

Lines changed: 11 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ import {
55
type ErrorInfo,
66
type ReactNode,
77
useEffect,
8+
useEffectEvent,
89
useMemo,
910
useRef,
1011
useState,
@@ -378,6 +379,12 @@ export default function MarkdownStreamingDemo() {
378379
const currentChunkLabel =
379380
activeIndex === 0 ? 'before first chunk' : `#${activeIndex}`;
380381
const editorBoundaryResetKey = `${sourceIdentity}:${activeIndex}`;
382+
const syncTreeJson = useEffectEvent(() => {
383+
setTreeJson(encodeEditorTree(editor.children));
384+
});
385+
const stopPlayback = useEffectEvent(() => {
386+
setIsPlaying(false);
387+
});
381388

382389
async function handleCopyChunks() {
383390
try {
@@ -510,7 +517,7 @@ export default function MarkdownStreamingDemo() {
510517
};
511518
}
512519

513-
setTreeJson(encodeEditorTree(editor.children));
520+
syncTreeJson();
514521
return;
515522
}
516523

@@ -521,7 +528,7 @@ export default function MarkdownStreamingDemo() {
521528
sourceIdentity,
522529
streamedChunks: chunks.slice(0, activeIndex),
523530
};
524-
setTreeJson(encodeEditorTree(editor.children));
531+
syncTreeJson();
525532
return;
526533
}
527534

@@ -541,13 +548,13 @@ export default function MarkdownStreamingDemo() {
541548
};
542549
}
543550

544-
setTreeJson(encodeEditorTree(editor.children));
551+
syncTreeJson();
545552
}, [activeIndex, editor, sourceIdentity, transformedCurrentChunks]);
546553

547554
useEffect(() => {
548555
if (!isPlaying) return;
549556
if (activeIndex >= transformedCurrentChunks.length) {
550-
setIsPlaying(false);
557+
stopPlayback();
551558
return;
552559
}
553560

0 commit comments

Comments
 (0)