Skip to content

Commit f80c435

Browse files
authored
Merge pull request #33 from topcoder-platform/pm-1788
feat(PM-1788): Prisma models for AI workflow
2 parents 4dc16e6 + 8972d58 commit f80c435

File tree

3 files changed

+513
-125
lines changed

3 files changed

+513
-125
lines changed

prisma/migrate.ts

Lines changed: 180 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -43,6 +43,9 @@ const modelMappingKeys = [
4343
'review',
4444
'review_item',
4545
'review_item_comment',
46+
'llm_provider',
47+
'llm_model',
48+
'ai_workflow'
4649
];
4750
const subModelMappingKeys = {
4851
review_item_comment: ['reviewItemComment', 'appeal', 'appealResponse'],
@@ -102,6 +105,9 @@ const reviewItemCommentAppealResponseIdMap = readIdMap(
102105
);
103106
const uploadIdMap = readIdMap('uploadIdMap');
104107
const submissionIdMap = readIdMap('submissionIdMap');
108+
const llmProviderIdMap = readIdMap('llmProviderIdMap');
109+
const llmModelIdMap = readIdMap('llmModelIdMap');
110+
const aiWorkflowIdMap = readIdMap('aiWorkflowIdMap');
105111

106112
// read resourceSubmissionSet
107113
const rsSetFile = '.tmp/resourceSubmissionSet.json';
@@ -808,7 +814,6 @@ async function processType(type: string, subtype?: string) {
808814
case 'scorecard': {
809815
console.log(`[${type}][${file}] Processing file`);
810816
const processedData = jsonData[key]
811-
.filter((sc) => !scorecardIdMap.has(sc.scorecard_id))
812817
.map((sc) => {
813818
const id = nanoid(14);
814819
scorecardIdMap.set(sc.scorecard_id, id);
@@ -1342,6 +1347,177 @@ async function processType(type: string, subtype?: string) {
13421347
}
13431348
break;
13441349
}
1350+
case 'llm_provider': {
1351+
console.log(`[${type}][${subtype}][${file}] Processing file`);
1352+
const idToLegacyIdMap = {};
1353+
const processedData = jsonData[key]
1354+
.map((c) => {
1355+
const id = nanoid(14);
1356+
llmProviderIdMap.set(
1357+
c.llm_provider_id,
1358+
id,
1359+
);
1360+
idToLegacyIdMap[id] = c.llm_provider_id;
1361+
return {
1362+
id: id,
1363+
name: c.name,
1364+
createdAt: new Date(c.create_date),
1365+
createdBy: c.create_user,
1366+
};
1367+
});
1368+
1369+
const totalBatches = Math.ceil(processedData.length / batchSize);
1370+
for (let i = 0; i < processedData.length; i += batchSize) {
1371+
const batchIndex = i / batchSize + 1;
1372+
console.log(
1373+
`[${type}][${subtype}][${file}] Processing batch ${batchIndex}/${totalBatches}`,
1374+
);
1375+
const batch = processedData.slice(i, i + batchSize);
1376+
await prisma.llmProvider
1377+
.createMany({
1378+
data: batch,
1379+
})
1380+
.catch(async () => {
1381+
console.error(
1382+
`[${type}][${subtype}][${file}] An error occurred, retrying individually`,
1383+
);
1384+
for (const item of batch) {
1385+
await prisma.llmProvider
1386+
.create({
1387+
data: item,
1388+
})
1389+
.catch((err) => {
1390+
llmProviderIdMap.delete(
1391+
idToLegacyIdMap[item.id],
1392+
);
1393+
console.error(
1394+
`[${type}][${subtype}][${file}] Error code: ${err.code}, LegacyId: ${idToLegacyIdMap[item.id]}`,
1395+
);
1396+
});
1397+
}
1398+
});
1399+
}
1400+
break;
1401+
}
1402+
case 'llm_model': {
1403+
console.log(`[${type}][${subtype}][${file}] Processing file`);
1404+
const idToLegacyIdMap = {};
1405+
const processedData = jsonData[key]
1406+
.map((c) => {
1407+
const id = nanoid(14);
1408+
llmModelIdMap.set(
1409+
c.llm_model_id,
1410+
id,
1411+
);
1412+
idToLegacyIdMap[id] = c.llm_model_id;
1413+
console.log(llmProviderIdMap.get(c.provider_id), 'c.provider_id')
1414+
return {
1415+
id: id,
1416+
providerId: llmProviderIdMap.get(c.provider_id),
1417+
name: c.name,
1418+
description: c.description,
1419+
icon: c.icon,
1420+
url: c.url,
1421+
createdAt: new Date(c.create_date),
1422+
createdBy: c.create_user,
1423+
};
1424+
});
1425+
1426+
console.log(llmProviderIdMap, processedData, 'processedData')
1427+
1428+
const totalBatches = Math.ceil(processedData.length / batchSize);
1429+
for (let i = 0; i < processedData.length; i += batchSize) {
1430+
const batchIndex = i / batchSize + 1;
1431+
console.log(
1432+
`[${type}][${subtype}][${file}] Processing batch ${batchIndex}/${totalBatches}`,
1433+
);
1434+
const batch = processedData.slice(i, i + batchSize);
1435+
await prisma.llmModel
1436+
.createMany({
1437+
data: batch,
1438+
})
1439+
.catch(async () => {
1440+
console.error(
1441+
`[${type}][${subtype}][${file}] An error occurred, retrying individually`,
1442+
);
1443+
for (const item of batch) {
1444+
await prisma.llmModel
1445+
.create({
1446+
data: item,
1447+
})
1448+
.catch((err) => {
1449+
llmModelIdMap.delete(
1450+
idToLegacyIdMap[item.id],
1451+
);
1452+
console.error(
1453+
`[${type}][${subtype}][${file}] Error code: ${err.code}, LegacyId: ${idToLegacyIdMap[item.id]}`,
1454+
);
1455+
});
1456+
}
1457+
});
1458+
}
1459+
break;
1460+
}
1461+
case 'ai_workflow': {
1462+
console.log(`[${type}][${subtype}][${file}] Processing file`);
1463+
const idToLegacyIdMap = {};
1464+
const processedData = jsonData[key]
1465+
.map((c) => {
1466+
const id = nanoid(14);
1467+
aiWorkflowIdMap.set(
1468+
c.ai_workflow_id,
1469+
id,
1470+
);
1471+
idToLegacyIdMap[id] = c.ai_workflow_id;
1472+
return {
1473+
id: id,
1474+
llmId: llmModelIdMap.get(c.llm_id),
1475+
name: c.name,
1476+
description: c.description,
1477+
defUrl: c.def_url,
1478+
gitId: c.git_id,
1479+
gitOwner: c.git_owner,
1480+
scorecardId: scorecardIdMap.get(c.scorecard_id),
1481+
createdAt: new Date(c.create_date),
1482+
createdBy: c.create_user,
1483+
updatedAt: new Date(c.modify_date),
1484+
updatedBy: c.modify_user,
1485+
};
1486+
});
1487+
1488+
const totalBatches = Math.ceil(processedData.length / batchSize);
1489+
for (let i = 0; i < processedData.length; i += batchSize) {
1490+
const batchIndex = i / batchSize + 1;
1491+
console.log(
1492+
`[${type}][${subtype}][${file}] Processing batch ${batchIndex}/${totalBatches}`,
1493+
);
1494+
const batch = processedData.slice(i, i + batchSize);
1495+
await prisma.aiWorkflow
1496+
.createMany({
1497+
data: batch,
1498+
})
1499+
.catch(async () => {
1500+
console.error(
1501+
`[${type}][${subtype}][${file}] An error occurred, retrying individually`,
1502+
);
1503+
for (const item of batch) {
1504+
await prisma.aiWorkflow
1505+
.create({
1506+
data: item,
1507+
})
1508+
.catch((err) => {
1509+
aiWorkflowIdMap.delete(
1510+
idToLegacyIdMap[item.id],
1511+
);
1512+
console.error(
1513+
`[${type}][${subtype}][${file}] Error code: ${err.code}, LegacyId: ${idToLegacyIdMap[item.id]}`,
1514+
);
1515+
});
1516+
}
1517+
});
1518+
}
1519+
break;
1520+
}
13451521
default:
13461522
console.warn(`No processor defined for type: ${type}`);
13471523
return;
@@ -1509,6 +1685,9 @@ migrate()
15091685
},
15101686
{ key: 'uploadIdMap', value: uploadIdMap },
15111687
{ key: 'submissionIdMap', value: submissionIdMap },
1688+
{ key: 'llmProviderIdMap', value: llmProviderIdMap },
1689+
{ key: 'llmModelIdMap', value: llmModelIdMap },
1690+
{ key: 'aiWorkflowIdMap', value: aiWorkflowIdMap }
15121691
].forEach((f) => {
15131692
if (!fs.existsSync('.tmp')) {
15141693
fs.mkdirSync('.tmp');
Lines changed: 121 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,121 @@
1+
-- CreateTable
2+
CREATE TABLE "llmProvider" (
3+
"id" VARCHAR(14) NOT NULL DEFAULT nanoid(),
4+
"name" VARCHAR NOT NULL,
5+
"createdAt" TIMESTAMP(3) NOT NULL,
6+
"createdBy" TEXT NOT NULL,
7+
8+
CONSTRAINT "llmProvider_pkey" PRIMARY KEY ("id")
9+
);
10+
11+
-- CreateTable
12+
CREATE TABLE "llmModel" (
13+
"id" VARCHAR(14) NOT NULL DEFAULT nanoid(),
14+
"providerId" VARCHAR(14) NOT NULL,
15+
"name" VARCHAR NOT NULL,
16+
"description" TEXT NOT NULL,
17+
"icon" VARCHAR,
18+
"url" VARCHAR,
19+
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
20+
"createdBy" TEXT NOT NULL,
21+
22+
CONSTRAINT "llmModel_pkey" PRIMARY KEY ("id")
23+
);
24+
25+
-- CreateTable
26+
CREATE TABLE "aiWorkflow" (
27+
"id" VARCHAR(14) NOT NULL DEFAULT nanoid(),
28+
"name" VARCHAR NOT NULL,
29+
"llmId" VARCHAR(14) NOT NULL,
30+
"description" TEXT NOT NULL,
31+
"defUrl" VARCHAR NOT NULL,
32+
"gitId" VARCHAR NOT NULL,
33+
"gitOwner" VARCHAR NOT NULL,
34+
"scorecardId" VARCHAR(14) NOT NULL,
35+
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
36+
"createdBy" TEXT NOT NULL,
37+
"updatedAt" TIMESTAMP(3) NOT NULL,
38+
"updatedBy" TEXT NOT NULL,
39+
40+
CONSTRAINT "aiWorkflow_pkey" PRIMARY KEY ("id")
41+
);
42+
43+
-- CreateTable
44+
CREATE TABLE "aiWorkflowRun" (
45+
"id" VARCHAR(14) NOT NULL DEFAULT nanoid(),
46+
"workflowId" VARCHAR(14) NOT NULL,
47+
"submissionId" VARCHAR(14) NOT NULL,
48+
"startedAt" TIMESTAMP(3),
49+
"completedAt" TIMESTAMP(3),
50+
"gitRunId" VARCHAR NOT NULL,
51+
"score" DOUBLE PRECISION,
52+
"status" VARCHAR NOT NULL,
53+
54+
CONSTRAINT "aiWorkflowRun_pkey" PRIMARY KEY ("id")
55+
);
56+
57+
-- CreateTable
58+
CREATE TABLE "aiWorkflowRunItem" (
59+
"id" VARCHAR(14) NOT NULL DEFAULT nanoid(),
60+
"workflowRunId" VARCHAR(14) NOT NULL,
61+
"scorecardQuestionId" VARCHAR(14) NOT NULL,
62+
"content" TEXT NOT NULL,
63+
"upVotes" INTEGER NOT NULL DEFAULT 0,
64+
"downVotes" INTEGER NOT NULL DEFAULT 0,
65+
"questionScore" DOUBLE PRECISION,
66+
"createdAt" TIMESTAMP(3) NOT NULL,
67+
"createdBy" TEXT NOT NULL,
68+
69+
CONSTRAINT "aiWorkflowRunItem_pkey" PRIMARY KEY ("id")
70+
);
71+
72+
-- CreateTable
73+
CREATE TABLE "aiWorkflowRunItemComment" (
74+
"id" VARCHAR(14) NOT NULL DEFAULT nanoid(),
75+
"workflowRunItemId" VARCHAR(14) NOT NULL,
76+
"userId" TEXT NOT NULL,
77+
"content" TEXT NOT NULL,
78+
"parentId" VARCHAR(14),
79+
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
80+
"createdBy" TEXT NOT NULL,
81+
"updatedAt" TIMESTAMP(3) NOT NULL,
82+
"updatedBy" TEXT NOT NULL,
83+
84+
CONSTRAINT "aiWorkflowRunItemComment_pkey" PRIMARY KEY ("id")
85+
);
86+
87+
-- CreateIndex
88+
CREATE UNIQUE INDEX "llmProvider_name_key" ON "llmProvider"("name");
89+
90+
-- CreateIndex
91+
CREATE UNIQUE INDEX "llmModel_name_key" ON "llmModel"("name");
92+
93+
-- CreateIndex
94+
CREATE UNIQUE INDEX "aiWorkflow_name_key" ON "aiWorkflow"("name");
95+
96+
-- AddForeignKey
97+
ALTER TABLE "llmModel" ADD CONSTRAINT "llmModel_providerId_fkey" FOREIGN KEY ("providerId") REFERENCES "llmProvider"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
98+
99+
-- AddForeignKey
100+
ALTER TABLE "aiWorkflow" ADD CONSTRAINT "aiWorkflow_llmId_fkey" FOREIGN KEY ("llmId") REFERENCES "llmModel"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
101+
102+
-- AddForeignKey
103+
ALTER TABLE "aiWorkflow" ADD CONSTRAINT "aiWorkflow_scorecardId_fkey" FOREIGN KEY ("scorecardId") REFERENCES "scorecard"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
104+
105+
-- AddForeignKey
106+
ALTER TABLE "aiWorkflowRun" ADD CONSTRAINT "aiWorkflowRun_workflowId_fkey" FOREIGN KEY ("workflowId") REFERENCES "aiWorkflow"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
107+
108+
-- AddForeignKey
109+
ALTER TABLE "aiWorkflowRun" ADD CONSTRAINT "aiWorkflowRun_submissionId_fkey" FOREIGN KEY ("submissionId") REFERENCES "submission"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
110+
111+
-- AddForeignKey
112+
ALTER TABLE "aiWorkflowRunItem" ADD CONSTRAINT "aiWorkflowRunItem_workflowRunId_fkey" FOREIGN KEY ("workflowRunId") REFERENCES "aiWorkflowRun"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
113+
114+
-- AddForeignKey
115+
ALTER TABLE "aiWorkflowRunItem" ADD CONSTRAINT "aiWorkflowRunItem_scorecardQuestionId_fkey" FOREIGN KEY ("scorecardQuestionId") REFERENCES "scorecardQuestion"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
116+
117+
-- AddForeignKey
118+
ALTER TABLE "aiWorkflowRunItemComment" ADD CONSTRAINT "aiWorkflowRunItemComment_workflowRunItemId_fkey" FOREIGN KEY ("workflowRunItemId") REFERENCES "aiWorkflowRunItem"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
119+
120+
-- AddForeignKey
121+
ALTER TABLE "aiWorkflowRunItemComment" ADD CONSTRAINT "aiWorkflowRunItemComment_parentId_fkey" FOREIGN KEY ("parentId") REFERENCES "aiWorkflowRunItemComment"("id") ON DELETE SET NULL ON UPDATE CASCADE;

0 commit comments

Comments
 (0)