Skip to content

Commit

Permalink
feat: update AI Text generator action to use version 1.2.0 for improv…
Browse files Browse the repository at this point in the history
…ed functionality

feat: enhance input handling by adding support for prompt and system file paths to read content from files
feat: update message handling to allow reading message content from file paths, improving flexibility in input management
  • Loading branch information
0xJord4n committed Dec 24, 2024
1 parent 206b219 commit 523314a
Show file tree
Hide file tree
Showing 5 changed files with 36 additions and 6 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Generate AI Text
uses: 0xjord4n/aixion@v1.1.0
uses: 0xjord4n/aixion@v1.2.0
with:
config: >
{
Expand Down
19 changes: 17 additions & 2 deletions dist/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,20 @@ const path_1 = __importDefault(__nccwpck_require__(6928));
exports["default"] = (input) => __awaiter(void 0, void 0, void 0, function* () {
const llmResponse = yield (0, ai_1.generateText)({
model: (0, provider_1.default)(input.provider, input.provider_options)(input.model),
prompt: input.prompt,
messages: input.messages,
prompt: input.prompt_path
? fs_1.default.readFileSync(input.prompt_path, { encoding: "utf-8" })
: input.prompt,
system: input.system_path
? fs_1.default.readFileSync(input.system_path, { encoding: "utf-8" })
: input.system,
messages: input.messages
? input.messages.map((message) => ({
role: message.role,
content: message.content_path
? fs_1.default.readFileSync(message.content_path, { encoding: "utf-8" })
: message.content,
}))
: undefined,
maxTokens: input.max_tokens,
temperature: input.temperature,
topP: input.top_p,
Expand Down Expand Up @@ -134,13 +146,16 @@ exports.providerOptionsSchema = zod.object({
exports.messageSchema = zod.object({
role: zod.nativeEnum(types_1.Role),
content: zod.string(),
content_path: zod.string().optional(),
});
exports.inputSchema = zod.object({
provider: zod.nativeEnum(types_1.Provider),
provider_options: exports.providerOptionsSchema,
save_path: zod.string().optional(),
prompt: zod.string().optional(),
prompt_path: zod.string().optional(),
system: zod.string().optional(),
system_path: zod.string().optional(),
messages: zod.array(exports.messageSchema).optional(),
model: zod.string(),
temperature: zod.number().optional(),
Expand Down
16 changes: 14 additions & 2 deletions src/action.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,20 @@ import path from "path";
export default async (input: z.infer<typeof inputSchema>): Promise<Output> => {
const llmResponse = await generateText({
model: provider(input.provider, input.provider_options)(input.model),
prompt: input.prompt,
messages: input.messages as CoreMessage[],
prompt: input.prompt_path
? fs.readFileSync(input.prompt_path, { encoding: "utf-8" })
: input.prompt,
system: input.system_path
? fs.readFileSync(input.system_path, { encoding: "utf-8" })
: input.system,
messages: input.messages
? (input.messages.map((message) => ({
role: message.role,
content: message.content_path
? fs.readFileSync(message.content_path, { encoding: "utf-8" })
: message.content,
})) as CoreMessage[])
: undefined,
maxTokens: input.max_tokens,
temperature: input.temperature,
topP: input.top_p,
Expand Down
3 changes: 3 additions & 0 deletions src/input.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,14 +10,17 @@ export const providerOptionsSchema = zod.object({
export const messageSchema = zod.object({
role: zod.nativeEnum(Role),
content: zod.string(),
content_path: zod.string().optional(),
});

export const inputSchema = zod.object({
provider: zod.nativeEnum(Provider),
provider_options: providerOptionsSchema,
save_path: zod.string().optional(),
prompt: zod.string().optional(),
prompt_path: zod.string().optional(),
system: zod.string().optional(),
system_path: zod.string().optional(),
messages: zod.array(messageSchema).optional(),
model: zod.string(),
temperature: zod.number().optional(),
Expand Down
2 changes: 1 addition & 1 deletion src/types.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { CoreMessage, LanguageModelUsage } from "ai";
import { LanguageModelUsage } from "ai";
import type { LanguageModelV1FinishReason } from "@ai-sdk/provider";

export enum Provider {
Expand Down

0 comments on commit 523314a

Please sign in to comment.