-
Notifications
You must be signed in to change notification settings - Fork 9
feat(PM-1793): Create AI workflow api implementation #34
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 10 commits
8753af5
245f8f7
14c1641
6172269
b7dfcf6
0ac583d
7fa9f50
e601a0b
bd57ad1
71baf40
1e407f0
f1b5c62
0a7864c
8915092
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -75,6 +75,7 @@ workflows: | |
only: | ||
- develop | ||
- feat/ai-workflows | ||
- pm-1793 | ||
|
||
- 'build-prod': | ||
context: org-global | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -43,6 +43,9 @@ const modelMappingKeys = [ | |
'review', | ||
'review_item', | ||
'review_item_comment', | ||
'llm_provider', | ||
'llm_model', | ||
'ai_workflow', | ||
]; | ||
const subModelMappingKeys = { | ||
review_item_comment: ['reviewItemComment', 'appeal', 'appealResponse'], | ||
|
@@ -102,6 +105,9 @@ const reviewItemCommentAppealResponseIdMap = readIdMap( | |
); | ||
const uploadIdMap = readIdMap('uploadIdMap'); | ||
const submissionIdMap = readIdMap('submissionIdMap'); | ||
const llmProviderIdMap = readIdMap('llmProviderIdMap'); | ||
const llmModelIdMap = readIdMap('llmModelIdMap'); | ||
const aiWorkflowIdMap = readIdMap('aiWorkflowIdMap'); | ||
|
||
// read resourceSubmissionSet | ||
const rsSetFile = '.tmp/resourceSubmissionSet.json'; | ||
|
@@ -807,28 +813,26 @@ async function processType(type: string, subtype?: string) { | |
} | ||
case 'scorecard': { | ||
console.log(`[${type}][${file}] Processing file`); | ||
const processedData = jsonData[key] | ||
.filter((sc) => !scorecardIdMap.has(sc.scorecard_id)) | ||
.map((sc) => { | ||
const id = nanoid(14); | ||
scorecardIdMap.set(sc.scorecard_id, id); | ||
return { | ||
id: id, | ||
legacyId: sc.scorecard_id, | ||
status: scorecardStatusMap[sc.scorecard_status_id], | ||
type: scorecardTypeMap[sc.scorecard_type_id], | ||
challengeTrack: projectCategoryMap[sc.project_category_id].type, | ||
challengeType: projectCategoryMap[sc.project_category_id].name, | ||
name: sc.name, | ||
version: sc.version, | ||
minScore: parseFloat(sc.min_score), | ||
maxScore: parseFloat(sc.max_score), | ||
createdAt: new Date(sc.create_date), | ||
createdBy: sc.create_user, | ||
updatedAt: new Date(sc.modify_date), | ||
updatedBy: sc.modify_user, | ||
}; | ||
}); | ||
const processedData = jsonData[key].map((sc) => { | ||
const id = nanoid(14); | ||
scorecardIdMap.set(sc.scorecard_id, id); | ||
return { | ||
id: id, | ||
legacyId: sc.scorecard_id, | ||
status: scorecardStatusMap[sc.scorecard_status_id], | ||
type: scorecardTypeMap[sc.scorecard_type_id], | ||
challengeTrack: projectCategoryMap[sc.project_category_id].type, | ||
challengeType: projectCategoryMap[sc.project_category_id].name, | ||
name: sc.name, | ||
version: sc.version, | ||
minScore: parseFloat(sc.min_score), | ||
maxScore: parseFloat(sc.max_score), | ||
createdAt: new Date(sc.create_date), | ||
createdBy: sc.create_user, | ||
updatedAt: new Date(sc.modify_date), | ||
updatedBy: sc.modify_user, | ||
}; | ||
}); | ||
const totalBatches = Math.ceil(processedData.length / batchSize); | ||
for (let i = 0; i < processedData.length; i += batchSize) { | ||
const batchIndex = i / batchSize + 1; | ||
|
@@ -1342,6 +1346,159 @@ async function processType(type: string, subtype?: string) { | |
} | ||
break; | ||
} | ||
case 'llm_provider': { | ||
console.log(`[${type}][${subtype}][${file}] Processing file`); | ||
const idToLegacyIdMap = {}; | ||
const processedData = jsonData[key].map((c) => { | ||
const id = nanoid(14); | ||
llmProviderIdMap.set(c.llm_provider_id, id); | ||
idToLegacyIdMap[id] = c.llm_provider_id; | ||
return { | ||
id: id, | ||
name: c.name, | ||
createdAt: new Date(c.create_date), | ||
createdBy: c.create_user, | ||
}; | ||
}); | ||
|
||
const totalBatches = Math.ceil(processedData.length / batchSize); | ||
for (let i = 0; i < processedData.length; i += batchSize) { | ||
const batchIndex = i / batchSize + 1; | ||
console.log( | ||
`[${type}][${subtype}][${file}] Processing batch ${batchIndex}/${totalBatches}`, | ||
); | ||
const batch = processedData.slice(i, i + batchSize); | ||
await prisma.llmProvider | ||
.createMany({ | ||
data: batch, | ||
}) | ||
.catch(async () => { | ||
console.error( | ||
`[${type}][${subtype}][${file}] An error occurred, retrying individually`, | ||
); | ||
for (const item of batch) { | ||
await prisma.llmProvider | ||
.create({ | ||
data: item, | ||
}) | ||
.catch((err) => { | ||
llmProviderIdMap.delete(idToLegacyIdMap[item.id]); | ||
console.error( | ||
`[${type}][${subtype}][${file}] Error code: ${err.code}, LegacyId: ${idToLegacyIdMap[item.id]}`, | ||
); | ||
}); | ||
} | ||
}); | ||
} | ||
break; | ||
} | ||
case 'llm_model': { | ||
console.log(`[${type}][${subtype}][${file}] Processing file`); | ||
const idToLegacyIdMap = {}; | ||
const processedData = jsonData[key].map((c) => { | ||
const id = nanoid(14); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Consider using a more descriptive variable name than |
||
llmModelIdMap.set(c.llm_model_id, id); | ||
idToLegacyIdMap[id] = c.llm_model_id; | ||
console.log(llmProviderIdMap.get(c.provider_id), 'c.provider_id'); | ||
return { | ||
id: id, | ||
providerId: llmProviderIdMap.get(c.provider_id), | ||
name: c.name, | ||
description: c.description, | ||
icon: c.icon, | ||
url: c.url, | ||
createdAt: new Date(c.create_date), | ||
createdBy: c.create_user, | ||
}; | ||
}); | ||
|
||
console.log(llmProviderIdMap, processedData, 'processedData'); | ||
|
||
const totalBatches = Math.ceil(processedData.length / batchSize); | ||
for (let i = 0; i < processedData.length; i += batchSize) { | ||
const batchIndex = i / batchSize + 1; | ||
console.log( | ||
`[${type}][${subtype}][${file}] Processing batch ${batchIndex}/${totalBatches}`, | ||
); | ||
const batch = processedData.slice(i, i + batchSize); | ||
await prisma.llmModel | ||
.createMany({ | ||
data: batch, | ||
}) | ||
.catch(async () => { | ||
console.error( | ||
`[${type}][${subtype}][${file}] An error occurred, retrying individually`, | ||
); | ||
for (const item of batch) { | ||
await prisma.llmModel | ||
.create({ | ||
data: item, | ||
}) | ||
.catch((err) => { | ||
llmModelIdMap.delete(idToLegacyIdMap[item.id]); | ||
console.error( | ||
`[${type}][${subtype}][${file}] Error code: ${err.code}, LegacyId: ${idToLegacyIdMap[item.id]}`, | ||
); | ||
}); | ||
} | ||
}); | ||
} | ||
break; | ||
} | ||
case 'ai_workflow': { | ||
console.log(`[${type}][${subtype}][${file}] Processing file`); | ||
const idToLegacyIdMap = {}; | ||
const processedData = jsonData[key].map((c) => { | ||
const id = nanoid(14); | ||
aiWorkflowIdMap.set(c.ai_workflow_id, id); | ||
idToLegacyIdMap[id] = c.ai_workflow_id; | ||
return { | ||
id: id, | ||
llmId: llmModelIdMap.get(c.llm_id), | ||
name: c.name, | ||
description: c.description, | ||
defUrl: c.def_url, | ||
gitId: c.git_id, | ||
gitOwner: c.git_owner, | ||
scorecardId: scorecardIdMap.get(c.scorecard_id), | ||
createdAt: new Date(c.create_date), | ||
createdBy: c.create_user, | ||
updatedAt: new Date(c.modify_date), | ||
updatedBy: c.modify_user, | ||
}; | ||
}); | ||
|
||
const totalBatches = Math.ceil(processedData.length / batchSize); | ||
for (let i = 0; i < processedData.length; i += batchSize) { | ||
const batchIndex = i / batchSize + 1; | ||
console.log( | ||
`[${type}][${subtype}][${file}] Processing batch ${batchIndex}/${totalBatches}`, | ||
); | ||
const batch = processedData.slice(i, i + batchSize); | ||
await prisma.aiWorkflow | ||
.createMany({ | ||
data: batch, | ||
}) | ||
.catch(async () => { | ||
console.error( | ||
`[${type}][${subtype}][${file}] An error occurred, retrying individually`, | ||
); | ||
for (const item of batch) { | ||
await prisma.aiWorkflow | ||
.create({ | ||
data: item, | ||
}) | ||
.catch((err) => { | ||
aiWorkflowIdMap.delete(idToLegacyIdMap[item.id]); | ||
console.error( | ||
`[${type}][${subtype}][${file}] Error code: ${err.code}, LegacyId: ${idToLegacyIdMap[item.id]}`, | ||
); | ||
}); | ||
} | ||
}); | ||
} | ||
break; | ||
} | ||
default: | ||
console.warn(`No processor defined for type: ${type}`); | ||
return; | ||
|
@@ -1509,6 +1666,9 @@ migrate() | |
}, | ||
{ key: 'uploadIdMap', value: uploadIdMap }, | ||
{ key: 'submissionIdMap', value: submissionIdMap }, | ||
{ key: 'llmProviderIdMap', value: llmProviderIdMap }, | ||
{ key: 'llmModelIdMap', value: llmModelIdMap }, | ||
{ key: 'aiWorkflowIdMap', value: aiWorkflowIdMap }, | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. There is an extra comma at the end of the object in line 1671. This may lead to syntax errors in environments that do not support trailing commas. Consider removing the trailing comma. |
||
].forEach((f) => { | ||
if (!fs.existsSync('.tmp')) { | ||
fs.mkdirSync('.tmp'); | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,121 @@ | ||
-- CreateTable | ||
CREATE TABLE "llmProvider" ( | ||
"id" VARCHAR(14) NOT NULL DEFAULT nanoid(), | ||
"name" VARCHAR NOT NULL, | ||
"createdAt" TIMESTAMP(3) NOT NULL, | ||
"createdBy" TEXT NOT NULL, | ||
|
||
CONSTRAINT "llmProvider_pkey" PRIMARY KEY ("id") | ||
); | ||
|
||
-- CreateTable | ||
CREATE TABLE "llmModel" ( | ||
"id" VARCHAR(14) NOT NULL DEFAULT nanoid(), | ||
"providerId" VARCHAR(14) NOT NULL, | ||
"name" VARCHAR NOT NULL, | ||
"description" TEXT NOT NULL, | ||
"icon" VARCHAR, | ||
"url" VARCHAR, | ||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, | ||
"createdBy" TEXT NOT NULL, | ||
|
||
CONSTRAINT "llmModel_pkey" PRIMARY KEY ("id") | ||
); | ||
|
||
-- CreateTable | ||
CREATE TABLE "aiWorkflow" ( | ||
"id" VARCHAR(14) NOT NULL DEFAULT nanoid(), | ||
"name" VARCHAR NOT NULL, | ||
"llmId" VARCHAR(14) NOT NULL, | ||
"description" TEXT NOT NULL, | ||
"defUrl" VARCHAR NOT NULL, | ||
"gitId" VARCHAR NOT NULL, | ||
"gitOwner" VARCHAR NOT NULL, | ||
"scorecardId" VARCHAR(14) NOT NULL, | ||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, | ||
"createdBy" TEXT NOT NULL, | ||
"updatedAt" TIMESTAMP(3) NOT NULL, | ||
"updatedBy" TEXT NOT NULL, | ||
|
||
CONSTRAINT "aiWorkflow_pkey" PRIMARY KEY ("id") | ||
); | ||
|
||
-- CreateTable | ||
CREATE TABLE "aiWorkflowRun" ( | ||
"id" VARCHAR(14) NOT NULL DEFAULT nanoid(), | ||
"workflowId" VARCHAR(14) NOT NULL, | ||
"submissionId" VARCHAR(14) NOT NULL, | ||
"startedAt" TIMESTAMP(3), | ||
"completedAt" TIMESTAMP(3), | ||
"gitRunId" VARCHAR NOT NULL, | ||
"score" DOUBLE PRECISION, | ||
"status" VARCHAR NOT NULL, | ||
|
||
CONSTRAINT "aiWorkflowRun_pkey" PRIMARY KEY ("id") | ||
); | ||
|
||
-- CreateTable | ||
CREATE TABLE "aiWorkflowRunItem" ( | ||
"id" VARCHAR(14) NOT NULL DEFAULT nanoid(), | ||
"workflowRunId" VARCHAR(14) NOT NULL, | ||
"scorecardQuestionId" VARCHAR(14) NOT NULL, | ||
"content" TEXT NOT NULL, | ||
"upVotes" INTEGER NOT NULL DEFAULT 0, | ||
"downVotes" INTEGER NOT NULL DEFAULT 0, | ||
"questionScore" DOUBLE PRECISION, | ||
"createdAt" TIMESTAMP(3) NOT NULL, | ||
"createdBy" TEXT NOT NULL, | ||
|
||
CONSTRAINT "aiWorkflowRunItem_pkey" PRIMARY KEY ("id") | ||
); | ||
|
||
-- CreateTable | ||
CREATE TABLE "aiWorkflowRunItemComment" ( | ||
"id" VARCHAR(14) NOT NULL DEFAULT nanoid(), | ||
"workflowRunItemId" VARCHAR(14) NOT NULL, | ||
"userId" TEXT NOT NULL, | ||
"content" TEXT NOT NULL, | ||
"parentId" VARCHAR(14), | ||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, | ||
"createdBy" TEXT NOT NULL, | ||
"updatedAt" TIMESTAMP(3) NOT NULL, | ||
"updatedBy" TEXT NOT NULL, | ||
|
||
CONSTRAINT "aiWorkflowRunItemComment_pkey" PRIMARY KEY ("id") | ||
); | ||
|
||
-- CreateIndex | ||
CREATE UNIQUE INDEX "llmProvider_name_key" ON "llmProvider"("name"); | ||
|
||
-- CreateIndex | ||
CREATE UNIQUE INDEX "llmModel_name_key" ON "llmModel"("name"); | ||
|
||
-- CreateIndex | ||
CREATE UNIQUE INDEX "aiWorkflow_name_key" ON "aiWorkflow"("name"); | ||
|
||
-- AddForeignKey | ||
ALTER TABLE "llmModel" ADD CONSTRAINT "llmModel_providerId_fkey" FOREIGN KEY ("providerId") REFERENCES "llmProvider"("id") ON DELETE RESTRICT ON UPDATE CASCADE; | ||
|
||
-- AddForeignKey | ||
ALTER TABLE "aiWorkflow" ADD CONSTRAINT "aiWorkflow_llmId_fkey" FOREIGN KEY ("llmId") REFERENCES "llmModel"("id") ON DELETE RESTRICT ON UPDATE CASCADE; | ||
|
||
-- AddForeignKey | ||
ALTER TABLE "aiWorkflow" ADD CONSTRAINT "aiWorkflow_scorecardId_fkey" FOREIGN KEY ("scorecardId") REFERENCES "scorecard"("id") ON DELETE RESTRICT ON UPDATE CASCADE; | ||
|
||
-- AddForeignKey | ||
ALTER TABLE "aiWorkflowRun" ADD CONSTRAINT "aiWorkflowRun_workflowId_fkey" FOREIGN KEY ("workflowId") REFERENCES "aiWorkflow"("id") ON DELETE RESTRICT ON UPDATE CASCADE; | ||
|
||
-- AddForeignKey | ||
ALTER TABLE "aiWorkflowRun" ADD CONSTRAINT "aiWorkflowRun_submissionId_fkey" FOREIGN KEY ("submissionId") REFERENCES "submission"("id") ON DELETE RESTRICT ON UPDATE CASCADE; | ||
|
||
-- AddForeignKey | ||
ALTER TABLE "aiWorkflowRunItem" ADD CONSTRAINT "aiWorkflowRunItem_workflowRunId_fkey" FOREIGN KEY ("workflowRunId") REFERENCES "aiWorkflowRun"("id") ON DELETE RESTRICT ON UPDATE CASCADE; | ||
|
||
-- AddForeignKey | ||
ALTER TABLE "aiWorkflowRunItem" ADD CONSTRAINT "aiWorkflowRunItem_scorecardQuestionId_fkey" FOREIGN KEY ("scorecardQuestionId") REFERENCES "scorecardQuestion"("id") ON DELETE RESTRICT ON UPDATE CASCADE; | ||
|
||
-- AddForeignKey | ||
ALTER TABLE "aiWorkflowRunItemComment" ADD CONSTRAINT "aiWorkflowRunItemComment_workflowRunItemId_fkey" FOREIGN KEY ("workflowRunItemId") REFERENCES "aiWorkflowRunItem"("id") ON DELETE RESTRICT ON UPDATE CASCADE; | ||
|
||
-- AddForeignKey | ||
ALTER TABLE "aiWorkflowRunItemComment" ADD CONSTRAINT "aiWorkflowRunItemComment_parentId_fkey" FOREIGN KEY ("parentId") REFERENCES "aiWorkflowRunItemComment"("id") ON DELETE SET NULL ON UPDATE CASCADE; |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
The log statement uses the variable
file
, butfile
is not defined within the current scope. Ensure thatfile
is defined or passed to the function if needed.