Skip to content

Commit

Permalink
[syntax] import existing kits & add types
Browse files Browse the repository at this point in the history
switched to <kit>.<nodetype> syntax as well in the examples,
let's see how this feels.
  • Loading branch information
seefeldb committed Nov 13, 2023
1 parent 25efe98 commit 832b2ac
Show file tree
Hide file tree
Showing 2 changed files with 131 additions and 53 deletions.
32 changes: 27 additions & 5 deletions seeds/graph-playground/wild/syntax/wires-proposal-7-lib.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,10 @@ import {
GraphDescriptor,
NodeDescriptor,
SubGraphs,
Kit,
KitConstructor,
InputValues as OriginalInputValues,
NodeFactory as OriginalNodeFactory,
} from "@google-labs/breadboard";

export type NodeValue =
Expand Down Expand Up @@ -71,15 +74,15 @@ const handlers: NodeHandlers = {
output: reservedWord,
};

type NodeFactory<I extends InputValues, O extends OutputValues> = (
export type NodeFactory<I extends InputValues, O extends OutputValues> = (
config?: NodeImpl<InputValues, I> | Value<NodeValue> | InputsMaybeAsValues<I>
) => NodeProxy<I, O>;

export function addNodeType<I extends InputValues, O extends OutputValues>(
name: string,
fn: NodeHandlerFunction<I, O>
handler: NodeHandler<I, O>
): NodeFactory<I, O> {
(handlers[name] as unknown as NodeHandlerFunction<I, O>) = fn;
(handlers[name] as unknown as NodeHandler<I, O>) = handler;
return ((config?: InputsMaybeAsValues<I>) => {
return new NodeImpl(name, getCurrentContextRunner(), config).asProxy();
}) as unknown as NodeFactory<I, O>;
Expand All @@ -92,6 +95,25 @@ export function action<
return addNodeType(getNextNodeId("fn"), fn);
}

// Extracts handlers from kit and creates new kinds of nodes from them.
export function addKit<T extends Kit>(ctr: KitConstructor<T>) {
const kit = new ctr({} as unknown as OriginalNodeFactory);
const nodes = {} as { [key: string]: NodeFactory<InputValues, OutputValues> };
Object.entries(kit.handlers).forEach(([name, handler]) => {
const handlerFunction =
handler instanceof Function ? handler : handler.invoke;
nodes[name] = addNodeType(name, {
invoke: async (inputs) => {
return handlerFunction(
(await inputs) as OriginalInputValues,
{}
) as Promise<OutputValues>;
},
});
});
return nodes;
}

interface EdgeImpl<
FromI extends InputValues = InputValues,
FromO extends OutputValues = OutputValues,
Expand Down Expand Up @@ -229,7 +251,7 @@ class NodeImpl<

Object.entries(values).forEach(([key, value]) => {
if (isValue(value)) {
nodes.push((value as Value).asNodeInput());
nodes.push((value as Value).as(key).asNodeInput());
} else if (value instanceof NodeImpl) {
nodes.push([value.unProxy(), { [key]: key }]);
} else {
Expand Down Expand Up @@ -427,7 +449,7 @@ class NodeImpl<
else name = match.groups?.name || name;
}

node.type = "runJavascriptX";
node.type = "runJavascript";
node.configuration = { ...node.configuration, code, name };

return [node];
Expand Down
152 changes: 104 additions & 48 deletions seeds/graph-playground/wild/syntax/wires-proposal-7.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,48 @@
* SPDX-License-Identifier: Apache-2.0
*/

import { config } from "dotenv";

import {
NodeValue,
InputValues,
addNodeType,
OutputValues,
flow,
action,
addKit,
NodeFactory,
} from "./wires-proposal-7-lib.js";

import { Core } from "@google-labs/core-kit";
import { Starter } from "@google-labs/llm-starter";

config();

const core = addKit(Core) as unknown as {
passthrough: NodeFactory<InputValues, OutputValues>;
};
const llm = addKit(Starter) as unknown as {
promptTemplate: NodeFactory<
{ template: string; [key: string]: NodeValue },
{ prompt: string }
>;
secrets: NodeFactory<{ keys: string[] }, { [k: string]: string }>;
generateText: NodeFactory<
{ text: string; PALM_KEY: string },
{ completion: string }
>;
runJavascript: NodeFactory<
{
code: string;
name: string;
raw: boolean;
[key: string]: NodeValue;
},
{ result: NodeValue; [k: string]: NodeValue }
>;
};

/*
const passthroughHandler = async (inputs: PromiseLike<InputValues>) => {
return Promise.resolve(await inputs);
};
Expand Down Expand Up @@ -41,9 +75,10 @@ const generateText = addNodeType(
Promise.resolve({ completion: (await inputs).prompt })
);
const runJavascript = addNodeType("runJavascript", passthroughHandler);
*/

async function singleNode() {
const graph = passthrough({ foo: "bar" });
const graph = core.passthrough({ foo: "bar" });

const result = await graph;

Expand All @@ -54,7 +89,7 @@ await singleNode();
async function simpleFunction() {
const graph = flow(
async (inputs) => {
const { foo } = await passthrough(inputs);
const { foo } = await core.passthrough(inputs);
return { foo };
},
{ foo: "bar", baz: "bar" }
Expand All @@ -70,7 +105,7 @@ await simpleFunction();
async function simpleFunctionGraph() {
const graph = flow(
(inputs) => {
const p1 = passthrough(inputs);
const p1 = core.passthrough(inputs);
const { foo } = p1; // Get an output, as a Promise!
return { foo };
},
Expand Down Expand Up @@ -103,16 +138,16 @@ await customAction();
async function mathImperative() {
const graph = flow(
(inputs) => {
const { prompt } = promptTemplate({
const { prompt } = llm.promptTemplate({
template:
"Write Javascript to compute the result for this question:\nQuestion: {{question}}\nCode: ",
"Write a Javascript function called `run` to compute the result for this question:\nQuestion: {{question}}\nCode: ",
question: inputs.question,
});
const { completion } = generateText({
prompt,
PALM_KEY: secrets({ keys: ["PALM_KEY"] }),
const { completion } = llm.generateText({
text: prompt,
PALM_KEY: llm.secrets({ keys: ["PALM_KEY"] }).PALM_KEY,
});
const result = runJavascript({ code: completion });
const result = llm.runJavascript({ code: completion });
return result;
},
{ question: "1+1" }
Expand All @@ -127,14 +162,20 @@ await mathImperative();
async function mathChainGraph() {
const graph = flow(
(inputs) => {
return promptTemplate({
template:
"Write Javascript to compute the result for this question:\nQuestion: {{question}}\nCode: ",
question: inputs.question,
})
.to(generateText({ PALM_KEY: secrets({ keys: ["PALM_KEY"] }) }))
return llm
.promptTemplate({
template:
"Write a Javascript function called `run` to compute the result for this question:\nQuestion: {{question}}\nCode: ",
question: inputs.question,
})
.prompt.as("text")
.to(
llm.generateText({
PALM_KEY: llm.secrets({ keys: ["PALM_KEY"] }).PALM_KEY,
})
)
.completion.as("code")
.to(runJavascript());
.to(llm.runJavascript());
},
{ question: "1+1" }
);
Expand All @@ -146,20 +187,22 @@ async function mathChainGraph() {
await mathChainGraph();

async function mathChainDirectly() {
const graph = passthrough({ question: "1+1" })
const graph = core
.passthrough({ question: "1+1" })
.to(
promptTemplate({
llm.promptTemplate({
template:
"Write Javascript to compute the result for this question:\nQuestion: {{question}}\nCode: ",
"Write a Javascript function called `run` to compute the result for this question:\nQuestion: {{question}}\nCode: ",
})
)
.prompt.as("text")
.to(
generateText({
PALM_KEY: secrets({ keys: ["PALM_KEY"] }),
llm.generateText({
PALM_KEY: llm.secrets({ keys: ["PALM_KEY"] }).PALM_KEY,
})
)
.completion.as("code")
.to(runJavascript());
.to(llm.runJavascript());

const result = await graph;

Expand All @@ -169,14 +212,16 @@ await mathChainDirectly();

async function ifElse() {
const math = action((inputs) => {
return promptTemplate({
template:
"Write Javascript to compute the result for this question:\nQuestion: {{question}}\nCode: ",
question: inputs.question,
})
.to(generateText({ PALM_KEY: secrets({ keys: ["PALM_KEY"] }) }))
return llm
.promptTemplate({
template:
"Write a Javascript function called `run` to compute the result for this question:\nQuestion: {{question}}\nCode: ",
question: inputs.question,
})
.prompt.as("text")
.to(llm.generateText({ PALM_KEY: llm.secrets({ keys: ["PALM_KEY"] }) }))
.completion.as("code")
.to(runJavascript());
.to(llm.runJavascript());
});

const search = action((inputs) => {
Expand All @@ -186,11 +231,18 @@ async function ifElse() {

const graph = flow(
async (inputs) => {
const { completion } = await promptTemplate({
template:
"Is this question about math? Answer YES or NO.\nQuestion: {{question}}\nAnswer: ",
question: inputs.question,
}).to(generateText({ PALM_KEY: secrets({ keys: ["PALM_KEY"] }) }));
const { completion } = await llm
.promptTemplate({
template:
"Is this question about math? Answer YES or NO.\nQuestion: {{question}}\nAnswer: ",
question: inputs.question,
})
.prompt.as("text")
.to(
llm.generateText({
PALM_KEY: llm.secrets({ keys: ["PALM_KEY"] }).PALM_KEY,
})
);
if (completion && (completion as string).startsWith("YES")) {
return math({ question: inputs.question });
} else {
Expand All @@ -208,14 +260,16 @@ await ifElse();

async function ifElseSerializable() {
const math = action((inputs) => {
return promptTemplate({
template:
"Write Javascript to compute the result for this question:\nQuestion: {{question}}\nCode: ",
question: inputs.question,
})
.to(generateText({ PALM_KEY: secrets({ keys: ["PALM_KEY"] }) }))
return llm
.promptTemplate({
template:
"Write a Javascript function called `run` to compute the result for this question:\nQuestion: {{question}}\nCode: ",
question: inputs.question,
})
.prompt.as("text")
.to(llm.generateText({ PALM_KEY: llm.secrets({ keys: ["PALM_KEY"] }) }))
.completion.as("code")
.to(runJavascript());
.to(llm.runJavascript());
});

const search = action((inputs) => {
Expand All @@ -225,12 +279,14 @@ async function ifElseSerializable() {

const graph = flow(
async (inputs) => {
return promptTemplate({
template:
"Is this question about math? Answer YES or NO.\nQuestion: {{question}}\nAnswer: ",
question: inputs.question,
})
.to(generateText({ PALM_KEY: secrets({ keys: ["PALM_KEY"] }) }))
return llm
.promptTemplate({
template:
"Is this question about math? Answer YES or NO.\nQuestion: {{question}}\nAnswer: ",
question: inputs.question,
})
.prompt.as("text")
.to(llm.generateText({ PALM_KEY: llm.secrets({ keys: ["PALM_KEY"] }) }))
.to(
async (inputs) => {
const { completion, math, search } = await inputs;
Expand Down

0 comments on commit 832b2ac

Please sign in to comment.