Skip to content
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
182 changes: 181 additions & 1 deletion prisma/migrate.ts
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,9 @@ const modelMappingKeys = [
'review',
'review_item',
'review_item_comment',
'llm_provider',
'llm_model',
'ai_workflow'
];
const subModelMappingKeys = {
review_item_comment: ['reviewItemComment', 'appeal', 'appealResponse'],
Expand Down Expand Up @@ -102,6 +105,9 @@ const reviewItemCommentAppealResponseIdMap = readIdMap(
);
const uploadIdMap = readIdMap('uploadIdMap');
const submissionIdMap = readIdMap('submissionIdMap');
const llmProviderIdMap = readIdMap('llmProviderIdMap');
const llmModelIdMap = readIdMap('llmModelIdMap');
const aiWorkflowIdMap = readIdMap('aiWorkflowIdMap');

// read resourceSubmissionSet
const rsSetFile = '.tmp/resourceSubmissionSet.json';
Expand Down Expand Up @@ -808,7 +814,6 @@ async function processType(type: string, subtype?: string) {
case 'scorecard': {
console.log(`[${type}][${file}] Processing file`);
const processedData = jsonData[key]
.filter((sc) => !scorecardIdMap.has(sc.scorecard_id))
.map((sc) => {
const id = nanoid(14);
scorecardIdMap.set(sc.scorecard_id, id);
Expand Down Expand Up @@ -1342,6 +1347,178 @@ async function processType(type: string, subtype?: string) {
}
break;
}
case 'llm_provider': {
console.log(`[${type}][${subtype}][${file}] Processing file`);
const idToLegacyIdMap = {};
const processedData = jsonData[key]
.map((c) => {
const id = nanoid(14);
llmProviderIdMap.set(
c.llm_provider_id,
id,
);
idToLegacyIdMap[id] = c.llm_provider_id;
return {
id: id,
name: c.name,
createdAt: new Date(c.create_date),
createdBy: c.create_user,
};
});

const totalBatches = Math.ceil(processedData.length / batchSize);
for (let i = 0; i < processedData.length; i += batchSize) {
const batchIndex = i / batchSize + 1;
console.log(
`[${type}][${subtype}][${file}] Processing batch ${batchIndex}/${totalBatches}`,
);
const batch = processedData.slice(i, i + batchSize);
await prisma.llmProvider
.createMany({
data: batch,
})
.catch(async () => {
console.error(
`[${type}][${subtype}][${file}] An error occurred, retrying individually`,
);
for (const item of batch) {
await prisma.llmProvider
.create({
data: item,
})
.catch((err) => {
llmProviderIdMap.delete(
idToLegacyIdMap[item.id],
);
console.error(
`[${type}][${subtype}][${file}] Error code: ${err.code}, LegacyId: ${idToLegacyIdMap[item.id]}`,
);
});
}
});
}
break;
}
case 'llm_model': {
console.log(`[${type}][${subtype}][${file}] Processing file`);
const idToLegacyIdMap = {};
const processedData = jsonData[key]
.map((c) => {
const id = nanoid(14);
llmModelIdMap.set(
c.llm_model_id,
id,
);
idToLegacyIdMap[id] = c.llm_model_id;
console.log(llmProviderIdMap.get(c.provider_id), 'c.provider_id')
return {
id: id,
providerId: llmProviderIdMap.get(c.provider_id),
name: c.name,
description: c.description,
icon: c.icon,
url: c.url,
createdAt: new Date(c.create_date),
createdBy: c.create_user,
};
});

console.log(llmProviderIdMap, processedData, 'processedData')

const totalBatches = Math.ceil(processedData.length / batchSize);
for (let i = 0; i < processedData.length; i += batchSize) {
const batchIndex = i / batchSize + 1;
console.log(
`[${type}][${subtype}][${file}] Processing batch ${batchIndex}/${totalBatches}`,
);
const batch = processedData.slice(i, i + batchSize);
await prisma.llmModel
.createMany({
data: batch,
})
.catch(async () => {
console.error(
`[${type}][${subtype}][${file}] An error occurred, retrying individually`,
);
for (const item of batch) {
await prisma.llmModel
.create({
data: item,
})
.catch((err) => {
llmModelIdMap.delete(
idToLegacyIdMap[item.id],
);
console.error(
`[${type}][${subtype}][${file}] Error code: ${err.code}, LegacyId: ${idToLegacyIdMap[item.id]}`,
);
});
}
});
}
break;
}
case 'ai_workflow': {
console.log(`[${type}][${subtype}][${file}] Processing file`);
const idToLegacyIdMap = {};
const processedData = jsonData[key]
.map((c) => {
const id = nanoid(14);
aiWorkflowIdMap.set(
c.ai_workflow_id,
id,
);
idToLegacyIdMap[id] = c.ai_workflow_id;
return {
id: id,
llmId: llmModelIdMap.get(c.llm_id),
name: c.name,
description: c.description,
defUrl: c.def_url,
gitId: c.git_id,
gitOwner: c.git_owner,
scorecardId: scorecardIdMap.get(c.scorecard_id),
createdAt: new Date(c.create_date),
createdBy: c.create_user,
updatedAt: new Date(c.modify_date),
updatedBy: c.modify_user,
};
});

const totalBatches = Math.ceil(processedData.length / batchSize);
for (let i = 0; i < processedData.length; i += batchSize) {
const batchIndex = i / batchSize + 1;
console.log(
`[${type}][${subtype}][${file}] Processing batch ${batchIndex}/${totalBatches}`,
);
const batch = processedData.slice(i, i + batchSize);
await prisma.aiWorkflow
.createMany({
data: batch,
})
.catch(async () => {
console.error(
`[${type}][${subtype}][${file}] An error occurred, retrying individually`,
);
for (const item of batch) {
console.log(item, 'alskdjlaksd sldfk');
await prisma.aiWorkflow
.create({
data: item,
})
.catch((err) => {
aiWorkflowIdMap.delete(
idToLegacyIdMap[item.id],
);
console.error(
`[${type}][${subtype}][${file}] Error code: ${err.code}, LegacyId: ${idToLegacyIdMap[item.id]}`,
);
});
}
});
}
break;
}
default:
console.warn(`No processor defined for type: ${type}`);
return;
Expand Down Expand Up @@ -1509,6 +1686,9 @@ migrate()
},
{ key: 'uploadIdMap', value: uploadIdMap },
{ key: 'submissionIdMap', value: submissionIdMap },
{ key: 'llmProviderIdMap', value: llmProviderIdMap },
{ key: 'llmModelIdMap', value: llmModelIdMap },
{ key: 'aiWorkflowIdMap', value: aiWorkflowIdMap }
].forEach((f) => {
if (!fs.existsSync('.tmp')) {
fs.mkdirSync('.tmp');
Expand Down
121 changes: 121 additions & 0 deletions prisma/migrations/20250829225539_ai_workflows/migration.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,121 @@
-- CreateTable
CREATE TABLE "llmProvider" (
"id" VARCHAR(14) NOT NULL DEFAULT nanoid(),
"name" VARCHAR NOT NULL,
"createdAt" TIMESTAMP(3) NOT NULL,
"createdBy" TEXT NOT NULL,

CONSTRAINT "llmProvider_pkey" PRIMARY KEY ("id")
);

-- CreateTable
CREATE TABLE "llmModel" (
"id" VARCHAR(14) NOT NULL DEFAULT nanoid(),
"providerId" VARCHAR(14) NOT NULL,
"name" VARCHAR NOT NULL,
"description" TEXT NOT NULL,
"icon" VARCHAR,
"url" VARCHAR,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"createdBy" TEXT NOT NULL,

CONSTRAINT "llmModel_pkey" PRIMARY KEY ("id")
);

-- CreateTable
CREATE TABLE "aiWorkflow" (
"id" VARCHAR(14) NOT NULL DEFAULT nanoid(),
"name" VARCHAR NOT NULL,
"llmId" VARCHAR(14) NOT NULL,
"description" TEXT NOT NULL,
"defUrl" VARCHAR NOT NULL,
"gitId" VARCHAR NOT NULL,
"gitOwner" VARCHAR NOT NULL,
"scorecardId" VARCHAR(14) NOT NULL,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"createdBy" TEXT NOT NULL,
"updatedAt" TIMESTAMP(3) NOT NULL,
"updatedBy" TEXT NOT NULL,

CONSTRAINT "aiWorkflow_pkey" PRIMARY KEY ("id")
);

-- CreateTable
CREATE TABLE "aiWorkflowRun" (
"id" VARCHAR(14) NOT NULL DEFAULT nanoid(),
"workflowId" VARCHAR(14) NOT NULL,
"submissionId" VARCHAR(14) NOT NULL,
"startedAt" TIMESTAMP(3),
"completedAt" TIMESTAMP(3),
"gitRunId" VARCHAR NOT NULL,
"score" DOUBLE PRECISION,
"status" VARCHAR NOT NULL,

CONSTRAINT "aiWorkflowRun_pkey" PRIMARY KEY ("id")
);

-- CreateTable
CREATE TABLE "aiWorkflowRunItem" (
"id" VARCHAR(14) NOT NULL DEFAULT nanoid(),
"workflowRunId" VARCHAR(14) NOT NULL,
"scorecardQuestionId" VARCHAR(14) NOT NULL,
"content" TEXT NOT NULL,
"upVotes" INTEGER NOT NULL DEFAULT 0,
"downVotes" INTEGER NOT NULL DEFAULT 0,
"questionScore" DOUBLE PRECISION,
"createdAt" TIMESTAMP(3) NOT NULL,
"createdBy" TEXT NOT NULL,

CONSTRAINT "aiWorkflowRunItem_pkey" PRIMARY KEY ("id")
);

-- CreateTable
CREATE TABLE "aiWorkflowRunItemComment" (
"id" VARCHAR(14) NOT NULL DEFAULT nanoid(),
"workflowRunItemId" VARCHAR(14) NOT NULL,
"userId" TEXT NOT NULL,
"content" TEXT NOT NULL,
"parentId" VARCHAR(14),
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"createdBy" TEXT NOT NULL,
"updatedAt" TIMESTAMP(3) NOT NULL,
"updatedBy" TEXT NOT NULL,

CONSTRAINT "aiWorkflowRunItemComment_pkey" PRIMARY KEY ("id")
);

-- CreateIndex
CREATE UNIQUE INDEX "llmProvider_name_key" ON "llmProvider"("name");

-- CreateIndex
CREATE UNIQUE INDEX "llmModel_name_key" ON "llmModel"("name");

-- CreateIndex
CREATE UNIQUE INDEX "aiWorkflow_name_key" ON "aiWorkflow"("name");

-- AddForeignKey
ALTER TABLE "llmModel" ADD CONSTRAINT "llmModel_providerId_fkey" FOREIGN KEY ("providerId") REFERENCES "llmProvider"("id") ON DELETE RESTRICT ON UPDATE CASCADE;

-- AddForeignKey
ALTER TABLE "aiWorkflow" ADD CONSTRAINT "aiWorkflow_llmId_fkey" FOREIGN KEY ("llmId") REFERENCES "llmModel"("id") ON DELETE RESTRICT ON UPDATE CASCADE;

-- AddForeignKey
ALTER TABLE "aiWorkflow" ADD CONSTRAINT "aiWorkflow_scorecardId_fkey" FOREIGN KEY ("scorecardId") REFERENCES "scorecard"("id") ON DELETE RESTRICT ON UPDATE CASCADE;

-- AddForeignKey
ALTER TABLE "aiWorkflowRun" ADD CONSTRAINT "aiWorkflowRun_workflowId_fkey" FOREIGN KEY ("workflowId") REFERENCES "aiWorkflow"("id") ON DELETE RESTRICT ON UPDATE CASCADE;

-- AddForeignKey
ALTER TABLE "aiWorkflowRun" ADD CONSTRAINT "aiWorkflowRun_submissionId_fkey" FOREIGN KEY ("submissionId") REFERENCES "submission"("id") ON DELETE RESTRICT ON UPDATE CASCADE;

-- AddForeignKey
ALTER TABLE "aiWorkflowRunItem" ADD CONSTRAINT "aiWorkflowRunItem_workflowRunId_fkey" FOREIGN KEY ("workflowRunId") REFERENCES "aiWorkflowRun"("id") ON DELETE RESTRICT ON UPDATE CASCADE;

-- AddForeignKey
ALTER TABLE "aiWorkflowRunItem" ADD CONSTRAINT "aiWorkflowRunItem_scorecardQuestionId_fkey" FOREIGN KEY ("scorecardQuestionId") REFERENCES "scorecardQuestion"("id") ON DELETE RESTRICT ON UPDATE CASCADE;

-- AddForeignKey
ALTER TABLE "aiWorkflowRunItemComment" ADD CONSTRAINT "aiWorkflowRunItemComment_workflowRunItemId_fkey" FOREIGN KEY ("workflowRunItemId") REFERENCES "aiWorkflowRunItem"("id") ON DELETE RESTRICT ON UPDATE CASCADE;

-- AddForeignKey
ALTER TABLE "aiWorkflowRunItemComment" ADD CONSTRAINT "aiWorkflowRunItemComment_parentId_fkey" FOREIGN KEY ("parentId") REFERENCES "aiWorkflowRunItemComment"("id") ON DELETE SET NULL ON UPDATE CASCADE;
Loading