-
Couldn't load subscription status.
- Fork 9
feat(PM-1793): Create AI workflow api implementation #34
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Merged
Merged
Changes from 7 commits
Commits
Show all changes
14 commits
Select commit
Hold shift + click to select a range
8753af5
Merge branch 'pm-1788' into pm-1793
hentrymartin 245f8f7
feat: create ai workflow api
hentrymartin 14c1641
deploy to dev
hentrymartin 6172269
fix: lint
hentrymartin b7dfcf6
fix: errors
hentrymartin 0ac583d
fix: lint
hentrymartin 7fa9f50
fix: modified api url
hentrymartin e601a0b
Merge branch 'develop' into pm-1793
hentrymartin bd57ad1
fix: review comments
hentrymartin 71baf40
fix: lint
hentrymartin 1e407f0
fix: lint
hentrymartin f1b5c62
fix: removed last migration
hentrymartin 0a7864c
fix: removed last migration
hentrymartin 8915092
fix: lint
hentrymartin File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -43,6 +43,9 @@ const modelMappingKeys = [ | |
| 'review', | ||
| 'review_item', | ||
| 'review_item_comment', | ||
| 'llm_provider', | ||
| 'llm_model', | ||
| 'ai_workflow' | ||
| ]; | ||
| const subModelMappingKeys = { | ||
| review_item_comment: ['reviewItemComment', 'appeal', 'appealResponse'], | ||
|
|
@@ -102,6 +105,9 @@ const reviewItemCommentAppealResponseIdMap = readIdMap( | |
| ); | ||
| const uploadIdMap = readIdMap('uploadIdMap'); | ||
| const submissionIdMap = readIdMap('submissionIdMap'); | ||
| const llmProviderIdMap = readIdMap('llmProviderIdMap'); | ||
| const llmModelIdMap = readIdMap('llmModelIdMap'); | ||
| const aiWorkflowIdMap = readIdMap('aiWorkflowIdMap'); | ||
|
|
||
| // read resourceSubmissionSet | ||
| const rsSetFile = '.tmp/resourceSubmissionSet.json'; | ||
|
|
@@ -808,7 +814,6 @@ async function processType(type: string, subtype?: string) { | |
| case 'scorecard': { | ||
| console.log(`[${type}][${file}] Processing file`); | ||
| const processedData = jsonData[key] | ||
| .filter((sc) => !scorecardIdMap.has(sc.scorecard_id)) | ||
| .map((sc) => { | ||
| const id = nanoid(14); | ||
| scorecardIdMap.set(sc.scorecard_id, id); | ||
|
|
@@ -1342,6 +1347,177 @@ async function processType(type: string, subtype?: string) { | |
| } | ||
| break; | ||
| } | ||
| case 'llm_provider': { | ||
| console.log(`[${type}][${subtype}][${file}] Processing file`); | ||
| const idToLegacyIdMap = {}; | ||
| const processedData = jsonData[key] | ||
| .map((c) => { | ||
| const id = nanoid(14); | ||
| llmProviderIdMap.set( | ||
| c.llm_provider_id, | ||
| id, | ||
| ); | ||
| idToLegacyIdMap[id] = c.llm_provider_id; | ||
| return { | ||
| id: id, | ||
| name: c.name, | ||
| createdAt: new Date(c.create_date), | ||
| createdBy: c.create_user, | ||
| }; | ||
| }); | ||
|
|
||
| const totalBatches = Math.ceil(processedData.length / batchSize); | ||
| for (let i = 0; i < processedData.length; i += batchSize) { | ||
| const batchIndex = i / batchSize + 1; | ||
| console.log( | ||
| `[${type}][${subtype}][${file}] Processing batch ${batchIndex}/${totalBatches}`, | ||
| ); | ||
| const batch = processedData.slice(i, i + batchSize); | ||
| await prisma.llmProvider | ||
| .createMany({ | ||
| data: batch, | ||
| }) | ||
| .catch(async () => { | ||
| console.error( | ||
| `[${type}][${subtype}][${file}] An error occurred, retrying individually`, | ||
| ); | ||
| for (const item of batch) { | ||
| await prisma.llmProvider | ||
| .create({ | ||
| data: item, | ||
| }) | ||
| .catch((err) => { | ||
| llmProviderIdMap.delete( | ||
| idToLegacyIdMap[item.id], | ||
| ); | ||
| console.error( | ||
| `[${type}][${subtype}][${file}] Error code: ${err.code}, LegacyId: ${idToLegacyIdMap[item.id]}`, | ||
| ); | ||
| }); | ||
| } | ||
| }); | ||
| } | ||
| break; | ||
| } | ||
| case 'llm_model': { | ||
| console.log(`[${type}][${subtype}][${file}] Processing file`); | ||
| const idToLegacyIdMap = {}; | ||
| const processedData = jsonData[key] | ||
| .map((c) => { | ||
| const id = nanoid(14); | ||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Consider using a more descriptive variable name than |
||
| llmModelIdMap.set( | ||
| c.llm_model_id, | ||
| id, | ||
| ); | ||
| idToLegacyIdMap[id] = c.llm_model_id; | ||
| console.log(llmProviderIdMap.get(c.provider_id), 'c.provider_id') | ||
| return { | ||
| id: id, | ||
| providerId: llmProviderIdMap.get(c.provider_id), | ||
| name: c.name, | ||
| description: c.description, | ||
| icon: c.icon, | ||
| url: c.url, | ||
| createdAt: new Date(c.create_date), | ||
| createdBy: c.create_user, | ||
| }; | ||
| }); | ||
|
|
||
| console.log(llmProviderIdMap, processedData, 'processedData') | ||
|
|
||
| const totalBatches = Math.ceil(processedData.length / batchSize); | ||
| for (let i = 0; i < processedData.length; i += batchSize) { | ||
| const batchIndex = i / batchSize + 1; | ||
| console.log( | ||
| `[${type}][${subtype}][${file}] Processing batch ${batchIndex}/${totalBatches}`, | ||
| ); | ||
| const batch = processedData.slice(i, i + batchSize); | ||
| await prisma.llmModel | ||
| .createMany({ | ||
| data: batch, | ||
| }) | ||
| .catch(async () => { | ||
| console.error( | ||
| `[${type}][${subtype}][${file}] An error occurred, retrying individually`, | ||
| ); | ||
| for (const item of batch) { | ||
| await prisma.llmModel | ||
| .create({ | ||
| data: item, | ||
| }) | ||
| .catch((err) => { | ||
| llmModelIdMap.delete( | ||
| idToLegacyIdMap[item.id], | ||
| ); | ||
| console.error( | ||
| `[${type}][${subtype}][${file}] Error code: ${err.code}, LegacyId: ${idToLegacyIdMap[item.id]}`, | ||
| ); | ||
| }); | ||
| } | ||
| }); | ||
| } | ||
| break; | ||
| } | ||
| case 'ai_workflow': { | ||
| console.log(`[${type}][${subtype}][${file}] Processing file`); | ||
| const idToLegacyIdMap = {}; | ||
| const processedData = jsonData[key] | ||
| .map((c) => { | ||
| const id = nanoid(14); | ||
| aiWorkflowIdMap.set( | ||
| c.ai_workflow_id, | ||
| id, | ||
| ); | ||
| idToLegacyIdMap[id] = c.ai_workflow_id; | ||
| return { | ||
| id: id, | ||
| llmId: llmModelIdMap.get(c.llm_id), | ||
| name: c.name, | ||
| description: c.description, | ||
| defUrl: c.def_url, | ||
| gitId: c.git_id, | ||
| gitOwner: c.git_owner, | ||
| scorecardId: scorecardIdMap.get(c.scorecard_id), | ||
| createdAt: new Date(c.create_date), | ||
| createdBy: c.create_user, | ||
| updatedAt: new Date(c.modify_date), | ||
| updatedBy: c.modify_user, | ||
| }; | ||
| }); | ||
|
|
||
| const totalBatches = Math.ceil(processedData.length / batchSize); | ||
| for (let i = 0; i < processedData.length; i += batchSize) { | ||
| const batchIndex = i / batchSize + 1; | ||
| console.log( | ||
| `[${type}][${subtype}][${file}] Processing batch ${batchIndex}/${totalBatches}`, | ||
| ); | ||
| const batch = processedData.slice(i, i + batchSize); | ||
| await prisma.aiWorkflow | ||
| .createMany({ | ||
| data: batch, | ||
| }) | ||
| .catch(async () => { | ||
| console.error( | ||
| `[${type}][${subtype}][${file}] An error occurred, retrying individually`, | ||
| ); | ||
| for (const item of batch) { | ||
| await prisma.aiWorkflow | ||
| .create({ | ||
| data: item, | ||
| }) | ||
| .catch((err) => { | ||
| aiWorkflowIdMap.delete( | ||
| idToLegacyIdMap[item.id], | ||
| ); | ||
| console.error( | ||
| `[${type}][${subtype}][${file}] Error code: ${err.code}, LegacyId: ${idToLegacyIdMap[item.id]}`, | ||
| ); | ||
| }); | ||
| } | ||
| }); | ||
| } | ||
| break; | ||
| } | ||
| default: | ||
| console.warn(`No processor defined for type: ${type}`); | ||
| return; | ||
|
|
@@ -1509,6 +1685,9 @@ migrate() | |
| }, | ||
| { key: 'uploadIdMap', value: uploadIdMap }, | ||
| { key: 'submissionIdMap', value: submissionIdMap }, | ||
| { key: 'llmProviderIdMap', value: llmProviderIdMap }, | ||
| { key: 'llmModelIdMap', value: llmModelIdMap }, | ||
| { key: 'aiWorkflowIdMap', value: aiWorkflowIdMap } | ||
| ].forEach((f) => { | ||
| if (!fs.existsSync('.tmp')) { | ||
| fs.mkdirSync('.tmp'); | ||
|
|
||
121 changes: 121 additions & 0 deletions
121
prisma/migrations/20250829225539_ai_workflows/migration.sql
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,121 @@ | ||
| -- CreateTable | ||
| CREATE TABLE "llmProvider" ( | ||
| "id" VARCHAR(14) NOT NULL DEFAULT nanoid(), | ||
| "name" VARCHAR NOT NULL, | ||
| "createdAt" TIMESTAMP(3) NOT NULL, | ||
| "createdBy" TEXT NOT NULL, | ||
|
|
||
| CONSTRAINT "llmProvider_pkey" PRIMARY KEY ("id") | ||
| ); | ||
|
|
||
| -- CreateTable | ||
| CREATE TABLE "llmModel" ( | ||
| "id" VARCHAR(14) NOT NULL DEFAULT nanoid(), | ||
| "providerId" VARCHAR(14) NOT NULL, | ||
| "name" VARCHAR NOT NULL, | ||
| "description" TEXT NOT NULL, | ||
| "icon" VARCHAR, | ||
| "url" VARCHAR, | ||
| "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, | ||
| "createdBy" TEXT NOT NULL, | ||
|
|
||
| CONSTRAINT "llmModel_pkey" PRIMARY KEY ("id") | ||
| ); | ||
|
|
||
| -- CreateTable | ||
| CREATE TABLE "aiWorkflow" ( | ||
| "id" VARCHAR(14) NOT NULL DEFAULT nanoid(), | ||
| "name" VARCHAR NOT NULL, | ||
| "llmId" VARCHAR(14) NOT NULL, | ||
| "description" TEXT NOT NULL, | ||
| "defUrl" VARCHAR NOT NULL, | ||
| "gitId" VARCHAR NOT NULL, | ||
| "gitOwner" VARCHAR NOT NULL, | ||
| "scorecardId" VARCHAR(14) NOT NULL, | ||
| "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, | ||
| "createdBy" TEXT NOT NULL, | ||
| "updatedAt" TIMESTAMP(3) NOT NULL, | ||
| "updatedBy" TEXT NOT NULL, | ||
|
|
||
| CONSTRAINT "aiWorkflow_pkey" PRIMARY KEY ("id") | ||
| ); | ||
|
|
||
| -- CreateTable | ||
| CREATE TABLE "aiWorkflowRun" ( | ||
| "id" VARCHAR(14) NOT NULL DEFAULT nanoid(), | ||
| "workflowId" VARCHAR(14) NOT NULL, | ||
| "submissionId" VARCHAR(14) NOT NULL, | ||
| "startedAt" TIMESTAMP(3), | ||
| "completedAt" TIMESTAMP(3), | ||
| "gitRunId" VARCHAR NOT NULL, | ||
| "score" DOUBLE PRECISION, | ||
| "status" VARCHAR NOT NULL, | ||
|
|
||
| CONSTRAINT "aiWorkflowRun_pkey" PRIMARY KEY ("id") | ||
| ); | ||
|
|
||
| -- CreateTable | ||
| CREATE TABLE "aiWorkflowRunItem" ( | ||
| "id" VARCHAR(14) NOT NULL DEFAULT nanoid(), | ||
| "workflowRunId" VARCHAR(14) NOT NULL, | ||
| "scorecardQuestionId" VARCHAR(14) NOT NULL, | ||
| "content" TEXT NOT NULL, | ||
| "upVotes" INTEGER NOT NULL DEFAULT 0, | ||
| "downVotes" INTEGER NOT NULL DEFAULT 0, | ||
| "questionScore" DOUBLE PRECISION, | ||
| "createdAt" TIMESTAMP(3) NOT NULL, | ||
| "createdBy" TEXT NOT NULL, | ||
|
|
||
| CONSTRAINT "aiWorkflowRunItem_pkey" PRIMARY KEY ("id") | ||
| ); | ||
|
|
||
| -- CreateTable | ||
| CREATE TABLE "aiWorkflowRunItemComment" ( | ||
| "id" VARCHAR(14) NOT NULL DEFAULT nanoid(), | ||
| "workflowRunItemId" VARCHAR(14) NOT NULL, | ||
| "userId" TEXT NOT NULL, | ||
| "content" TEXT NOT NULL, | ||
| "parentId" VARCHAR(14), | ||
| "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, | ||
| "createdBy" TEXT NOT NULL, | ||
| "updatedAt" TIMESTAMP(3) NOT NULL, | ||
| "updatedBy" TEXT NOT NULL, | ||
|
|
||
| CONSTRAINT "aiWorkflowRunItemComment_pkey" PRIMARY KEY ("id") | ||
| ); | ||
|
|
||
| -- CreateIndex | ||
| CREATE UNIQUE INDEX "llmProvider_name_key" ON "llmProvider"("name"); | ||
|
|
||
| -- CreateIndex | ||
| CREATE UNIQUE INDEX "llmModel_name_key" ON "llmModel"("name"); | ||
|
|
||
| -- CreateIndex | ||
| CREATE UNIQUE INDEX "aiWorkflow_name_key" ON "aiWorkflow"("name"); | ||
|
|
||
| -- AddForeignKey | ||
| ALTER TABLE "llmModel" ADD CONSTRAINT "llmModel_providerId_fkey" FOREIGN KEY ("providerId") REFERENCES "llmProvider"("id") ON DELETE RESTRICT ON UPDATE CASCADE; | ||
|
|
||
| -- AddForeignKey | ||
| ALTER TABLE "aiWorkflow" ADD CONSTRAINT "aiWorkflow_llmId_fkey" FOREIGN KEY ("llmId") REFERENCES "llmModel"("id") ON DELETE RESTRICT ON UPDATE CASCADE; | ||
|
|
||
| -- AddForeignKey | ||
| ALTER TABLE "aiWorkflow" ADD CONSTRAINT "aiWorkflow_scorecardId_fkey" FOREIGN KEY ("scorecardId") REFERENCES "scorecard"("id") ON DELETE RESTRICT ON UPDATE CASCADE; | ||
|
|
||
| -- AddForeignKey | ||
| ALTER TABLE "aiWorkflowRun" ADD CONSTRAINT "aiWorkflowRun_workflowId_fkey" FOREIGN KEY ("workflowId") REFERENCES "aiWorkflow"("id") ON DELETE RESTRICT ON UPDATE CASCADE; | ||
|
|
||
| -- AddForeignKey | ||
| ALTER TABLE "aiWorkflowRun" ADD CONSTRAINT "aiWorkflowRun_submissionId_fkey" FOREIGN KEY ("submissionId") REFERENCES "submission"("id") ON DELETE RESTRICT ON UPDATE CASCADE; | ||
|
|
||
| -- AddForeignKey | ||
| ALTER TABLE "aiWorkflowRunItem" ADD CONSTRAINT "aiWorkflowRunItem_workflowRunId_fkey" FOREIGN KEY ("workflowRunId") REFERENCES "aiWorkflowRun"("id") ON DELETE RESTRICT ON UPDATE CASCADE; | ||
|
|
||
| -- AddForeignKey | ||
| ALTER TABLE "aiWorkflowRunItem" ADD CONSTRAINT "aiWorkflowRunItem_scorecardQuestionId_fkey" FOREIGN KEY ("scorecardQuestionId") REFERENCES "scorecardQuestion"("id") ON DELETE RESTRICT ON UPDATE CASCADE; | ||
|
|
||
| -- AddForeignKey | ||
| ALTER TABLE "aiWorkflowRunItemComment" ADD CONSTRAINT "aiWorkflowRunItemComment_workflowRunItemId_fkey" FOREIGN KEY ("workflowRunItemId") REFERENCES "aiWorkflowRunItem"("id") ON DELETE RESTRICT ON UPDATE CASCADE; | ||
|
|
||
| -- AddForeignKey | ||
| ALTER TABLE "aiWorkflowRunItemComment" ADD CONSTRAINT "aiWorkflowRunItemComment_parentId_fkey" FOREIGN KEY ("parentId") REFERENCES "aiWorkflowRunItemComment"("id") ON DELETE SET NULL ON UPDATE CASCADE; |
Oops, something went wrong.
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
The log statement uses the variable
file, butfileis not defined within the current scope. Ensure thatfileis defined or passed to the function if needed.