Skip to content

Commit a543a27

Browse files
thucpnleehuwuj
andauthored
feat: bump chat-ui with inline artifact (#675)
* feat: bump chat-ui with inline artifact * bump chat-ui 0.5.0 * update extractLastArtifact * fix: imports * fix: circle import * missing export * update document gen workflow * remove artifactEvent for annotations * update document * bump chat-ui 0.5.1 to fix parsing $ * bump chat-ui 0.5.2 * toArtifactEvent internal * update doc to use toArtifactEvent * do workflow transformmation internal * revert doc * keep contract * fix format * update get_last_artifact to extract inline annotations in Python * fix imports * Transforms ArtifactEvent to AgentStream with inline annotation format * Create thick-turtles-deny.md * donot use relative imports * toInlineAnnotationEvent * to_inline_annotation_event in python * refactor: move toInlineAnnotationEvent to inline.ts * update comment * rename ArtifactTransform to InlineAnnotationTransformer * add codegen example --------- Co-authored-by: leehuwuj <[email protected]>
1 parent 63edd74 commit a543a27

File tree

21 files changed

+1057
-74
lines changed

21 files changed

+1057
-74
lines changed

.changeset/thick-turtles-deny.md

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
---
2+
"create-llama": patch
3+
"@llamaindex/server": patch
4+
"@create-llama/llama-index-server": patch
5+
---
6+
7+
feat: bump chat-ui with inline artifact

packages/create-llama/templates/components/use-cases/typescript/code_generator/src/app/workflow.ts

Lines changed: 1 addition & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import { extractLastArtifact } from "@llamaindex/server";
1+
import { artifactEvent, extractLastArtifact } from "@llamaindex/server";
22
import { ChatMemoryBuffer, MessageContent, Settings } from "llamaindex";
33

44
import {
@@ -52,19 +52,6 @@ const synthesizeAnswerEvent = workflowEvent<object>();
5252

5353
const uiEvent = workflowEvent<UIEvent>();
5454

55-
const artifactEvent = workflowEvent<{
56-
type: "artifact";
57-
data: {
58-
type: "code";
59-
created_at: number;
60-
data: {
61-
language: string;
62-
file_name: string;
63-
code: string;
64-
};
65-
};
66-
}>();
67-
6855
export function workflowFactory(reqBody: any) {
6956
const llm = Settings.llm;
7057

packages/create-llama/templates/components/use-cases/typescript/document_generator/src/app/workflow.ts

Lines changed: 1 addition & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import { extractLastArtifact } from "@llamaindex/server";
1+
import { artifactEvent, extractLastArtifact } from "@llamaindex/server";
22
import { ChatMemoryBuffer, MessageContent, Settings } from "llamaindex";
33

44
import {
@@ -55,19 +55,6 @@ const synthesizeAnswerEvent = workflowEvent<{
5555

5656
const uiEvent = workflowEvent<UIEvent>();
5757

58-
const artifactEvent = workflowEvent<{
59-
type: "artifact";
60-
data: {
61-
type: "document";
62-
created_at: number;
63-
data: {
64-
title: string;
65-
content: string;
66-
type: "markdown" | "html";
67-
};
68-
};
69-
}>();
70-
7158
export function workflowFactory(reqBody: any) {
7259
const llm = Settings.llm;
7360

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
This example demonstrates how to use the code generation workflow.
2+
3+
```ts
4+
new LlamaIndexServer({
5+
workflow: workflowFactory,
6+
uiConfig: {
7+
starterQuestions: [
8+
"Generate a calculator app",
9+
"Create a simple todo list app",
10+
],
11+
componentsDir: "components",
12+
},
13+
port: 3000,
14+
}).start();
15+
```
16+
17+
Export OpenAI API key and start the server in dev mode.
18+
19+
```bash
20+
export OPENAI_API_KEY=<your-openai-api-key>
21+
npx nodemon --exec tsx index.ts
22+
```
Lines changed: 132 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,132 @@
1+
import { Badge } from "@/components/ui/badge";
2+
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
3+
import { Progress } from "@/components/ui/progress";
4+
import { Skeleton } from "@/components/ui/skeleton";
5+
import { cn } from "@/lib/utils";
6+
import { Markdown } from "@llamaindex/chat-ui/widgets";
7+
import { ListChecks, Loader2, Wand2 } from "lucide-react";
8+
import { useEffect, useState } from "react";
9+
10+
const STAGE_META = {
11+
plan: {
12+
icon: ListChecks,
13+
badgeText: "Step 1/2: Planning",
14+
gradient: "from-blue-100 via-blue-50 to-white",
15+
progress: 33,
16+
iconBg: "bg-blue-100 text-blue-600",
17+
badge: "bg-blue-100 text-blue-700",
18+
},
19+
generate: {
20+
icon: Wand2,
21+
badgeText: "Step 2/2: Generating",
22+
gradient: "from-violet-100 via-violet-50 to-white",
23+
progress: 66,
24+
iconBg: "bg-violet-100 text-violet-600",
25+
badge: "bg-violet-100 text-violet-700",
26+
},
27+
};
28+
29+
function ArtifactWorkflowCard({ event }) {
30+
const [visible, setVisible] = useState(event?.state !== "completed");
31+
const [fade, setFade] = useState(false);
32+
33+
useEffect(() => {
34+
if (event?.state === "completed") {
35+
setVisible(false);
36+
} else {
37+
setVisible(true);
38+
setFade(false);
39+
}
40+
}, [event?.state]);
41+
42+
if (!event || !visible) return null;
43+
44+
const { state, requirement } = event;
45+
const meta = STAGE_META[state];
46+
47+
if (!meta) return null;
48+
49+
return (
50+
<div className="flex min-h-[180px] w-full items-center justify-center py-2">
51+
<Card
52+
className={cn(
53+
"w-full rounded-xl shadow-md transition-all duration-500",
54+
"border-0",
55+
fade && "pointer-events-none opacity-0",
56+
`bg-gradient-to-br ${meta.gradient}`,
57+
)}
58+
style={{
59+
boxShadow:
60+
"0 2px 12px 0 rgba(80, 80, 120, 0.08), 0 1px 3px 0 rgba(80, 80, 120, 0.04)",
61+
}}
62+
>
63+
<CardHeader className="flex flex-row items-center gap-2 px-3 pb-1 pt-2">
64+
<div
65+
className={cn(
66+
"flex items-center justify-center rounded-full p-1",
67+
meta.iconBg,
68+
)}
69+
>
70+
<meta.icon className="h-5 w-5" />
71+
</div>
72+
<CardTitle className="flex items-center gap-2 text-base font-semibold">
73+
<Badge className={cn("ml-1", meta.badge, "px-2 py-0.5 text-xs")}>
74+
{meta.badgeText}
75+
</Badge>
76+
</CardTitle>
77+
</CardHeader>
78+
<CardContent className="px-3 py-1">
79+
{state === "plan" && (
80+
<div className="flex flex-col items-center gap-2 py-2">
81+
<Loader2 className="mb-1 h-6 w-6 animate-spin text-blue-400" />
82+
<div className="text-center text-sm font-medium text-blue-900">
83+
Analyzing your request...
84+
</div>
85+
<Skeleton className="mt-1 h-3 w-1/2 rounded-full" />
86+
</div>
87+
)}
88+
{state === "generate" && (
89+
<div className="flex flex-col gap-2 py-2">
90+
<div className="flex items-center gap-1">
91+
<Loader2 className="h-4 w-4 animate-spin text-violet-400" />
92+
<span className="text-sm font-medium text-violet-900">
93+
Working on the requirement:
94+
</span>
95+
</div>
96+
<div className="max-h-24 overflow-auto rounded-lg border border-violet-200 bg-violet-50 px-2 py-1 text-xs">
97+
{requirement ? (
98+
<Markdown content={requirement} />
99+
) : (
100+
<span className="italic text-violet-400">
101+
No requirements available yet.
102+
</span>
103+
)}
104+
</div>
105+
</div>
106+
)}
107+
</CardContent>
108+
<div className="px-3 pb-2 pt-1">
109+
<Progress
110+
value={meta.progress}
111+
className={cn(
112+
"h-1 rounded-full bg-gray-200",
113+
state === "plan" && "bg-blue-200",
114+
state === "generate" && "bg-violet-200",
115+
)}
116+
/>
117+
</div>
118+
</Card>
119+
</div>
120+
);
121+
}
122+
123+
export default function Component({ events }) {
124+
const aggregateEvents = () => {
125+
if (!events || events.length === 0) return null;
126+
return events[events.length - 1];
127+
};
128+
129+
const event = aggregateEvents();
130+
131+
return <ArtifactWorkflowCard event={event} />;
132+
}
Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
import { OpenAI } from "@llamaindex/openai";
2+
import { LlamaIndexServer } from "@llamaindex/server";
3+
import { Settings } from "llamaindex";
4+
import { workflowFactory } from "./src/app/workflow";
5+
6+
Settings.llm = new OpenAI({
7+
model: "gpt-4o-mini",
8+
});
9+
10+
new LlamaIndexServer({
11+
workflow: workflowFactory,
12+
uiConfig: {
13+
starterQuestions: [
14+
"Generate a calculator app",
15+
"Create a simple todo list app",
16+
],
17+
componentsDir: "components",
18+
},
19+
port: 3000,
20+
}).start();

0 commit comments

Comments
 (0)