-
Notifications
You must be signed in to change notification settings - Fork 48
Policy upload to Filebase/IPFS #639
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 6 commits
fb912bb
564159b
15c8b09
fd1ba33
7c30100
3f25c4a
d4ef498
fd9679c
1bc7176
3f8ad18
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,117 @@ | ||
import { deployments, getNamedAccounts, getChainId, ethers } from "hardhat"; | ||
import { PolicyRegistry } from "../typechain-types"; | ||
import fs from "fs"; | ||
const AWS = require("aws-sdk"); | ||
const { v4: uuidv4 } = require("uuid"); | ||
const id = uuidv4(); | ||
const currentDate = new Date(); | ||
const formattedDate = `${currentDate.getFullYear()}/${(currentDate.getMonth() + 1) | ||
alcercu marked this conversation as resolved.
Show resolved
Hide resolved
|
||
.toString() | ||
.padStart(2, "0")}/${currentDate.getDate().toString().padStart(2, "0")}`; | ||
const S3_PATH = formattedDate + "/" + id + "/"; | ||
|
||
const s3 = new AWS.S3({ | ||
endpoint: "https://s3.filebase.com", | ||
region: "us-east-1", | ||
signatureVersion: "v4", | ||
accessKeyId: process.env.FILEBASE_ACCESS_KEY, | ||
secretAccessKey: process.env.FILEBASE_SECRET_KEY, | ||
}); | ||
enum HomeChains { | ||
ARBITRUM_ONE = 42161, | ||
ARBITRUM_RINKEBY = 421611, | ||
ARBITRUM_GOERLI = 421613, | ||
HARDHAT = 31337, | ||
} | ||
async function main(filePath: string) { | ||
let courtsV1; | ||
fs.readFile(filePath, "utf8", (err, jsonString) => { | ||
if (err) { | ||
console.log("File read failed:", err); | ||
return; | ||
} | ||
const json = JSON.parse(jsonString); | ||
courtsV1 = json.map((courtDetails) => ({ | ||
...courtDetails, | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. What's the format of
as it will already be assigned. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. removed extra step of destructuring. |
||
name: courtDetails.name, | ||
description: courtDetails.description, | ||
summary: courtDetails.summary, | ||
court: courtDetails.court, | ||
uri: courtDetails.uri, | ||
})); | ||
}); | ||
|
||
// fallback to hardhat node signers on local network | ||
// const governor = (await getNamedAccounts()).governor ?? (await ethers.getSigners())[0].address; | ||
const governor = (await ethers.getSigners())[0]; | ||
const chainId = Number(await getChainId()); | ||
if (!HomeChains[chainId]) { | ||
console.error(`Aborting: script is not compatible with ${chainId}`); | ||
return; | ||
} else { | ||
console.log("deploying to %s with deployer %s", HomeChains[chainId], governor); | ||
} | ||
|
||
//--------uncomment once configuration is set in deployments------ | ||
alcercu marked this conversation as resolved.
Show resolved
Hide resolved
|
||
// const policyRegistryDeployment = await deployments.get("PolicyRegistry"); | ||
const policyRegistry = (await ethers.getContractAt( | ||
"PolicyRegistry", | ||
"0xAF0F49Fe110b48bd512F00d51D141F023c9a9106" // arbitrumgoerli contract address | ||
alcercu marked this conversation as resolved.
Show resolved
Hide resolved
|
||
// policyRegistryDeployment.address | ||
)) as PolicyRegistry; | ||
for (const courtObject of courtsV1) { | ||
var courtV2 = courtObject.court + 1; | ||
var filename = courtObject.name.replace(" ", "-").concat(".json"); | ||
const data = { name: courtObject.name, description: courtObject.description, summary: courtObject.summary }; | ||
let response = await uploadFormDataToIPFS(data, filename); | ||
console.log(response); | ||
|
||
if (response && response.statusCode === 200) { | ||
try { | ||
console.log(courtV2, courtObject.name); | ||
const data = await JSON.parse(response.body); | ||
const cid = "/ipfs/" + data.message.Metadata.cid; | ||
alcercu marked this conversation as resolved.
Show resolved
Hide resolved
|
||
console.log(cid, "cid"); | ||
await policyRegistry.connect(governor).setPolicy(courtV2, courtObject.name, cid); | ||
} catch (error) { | ||
console.log(error); | ||
} | ||
} | ||
} | ||
} | ||
|
||
const uploadFormDataToIPFS = async (data, filename) => { | ||
alcercu marked this conversation as resolved.
Show resolved
Hide resolved
|
||
try { | ||
const params = { | ||
Bucket: process.env.FILEBASE_BUCKET_NAME, | ||
Key: S3_PATH + filename, | ||
ContentType: "application/json", | ||
Body: Buffer.from(JSON.stringify(data)), | ||
}; | ||
const request = await s3.upload(params).promise(); | ||
|
||
const head_params = { | ||
Bucket: process.env.FILEBASE_BUCKET_NAME, | ||
Key: request.key, | ||
}; | ||
const head = await s3.headObject(head_params).promise(); | ||
|
||
return { | ||
statusCode: 200, | ||
body: JSON.stringify({ message: head }), | ||
}; | ||
} catch (error) { | ||
console.log(error); | ||
|
||
return { | ||
statusCode: 500, | ||
body: JSON.stringify({ message: error }), | ||
}; | ||
} | ||
}; | ||
main("./config/policies.v1.mainnet.json") | ||
.then(() => process.exit(0)) | ||
.catch((error) => { | ||
console.error(error); | ||
process.exit(1); | ||
}); |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,31 +1,152 @@ | ||
import { Handler } from "@netlify/functions"; | ||
import fetch from "node-fetch"; | ||
|
||
const ESTUARI_API_KEY = process.env["ESTUARY_API_KEY"]; | ||
const ESTUARI_URL = process.env["ESTUARY_GATEWAY"]; | ||
|
||
export const handler: Handler = async (event, context) => { | ||
context.callbackWaitsForEmptyEventLoop = false; | ||
if (event.body) { | ||
const newHeaders = event.headers; | ||
delete newHeaders.host; | ||
const response = await fetch(ESTUARI_URL, { | ||
method: "POST", | ||
headers: { | ||
Authorization: `Bearer ${ESTUARI_API_KEY}`, | ||
...newHeaders, | ||
}, | ||
body: Buffer.from(event.body, "base64"), | ||
}); | ||
import { Handler, HandlerEvent } from "@netlify/functions"; | ||
import AWS from "aws-sdk"; | ||
import { v4 as uuidv4 } from "uuid"; | ||
import amqp from "amqplib"; | ||
import busboy from "busboy"; | ||
|
||
const envVariables = { | ||
accessKey: process.env.FILEBASE_ACCESS_KEY, | ||
secretKey: process.env.FILEBASE_SECRET_KEY, | ||
bucketName: process.env.FILEBASE_BUCKET_NAME, | ||
rabbitMQURL: process.env.RABBITMQ_URL, | ||
}; | ||
|
||
const parsedResponse = await response.json(); | ||
const s3 = new AWS.S3({ | ||
endpoint: "https://s3.filebase.com", | ||
region: "us-east-1", | ||
signatureVersion: "v4", | ||
accessKeyId: envVariables.accessKey, | ||
secretAccessKey: envVariables.secretKey, | ||
}); | ||
|
||
export const handler: Handler = async function (event) { | ||
if (!validEnvVariables()) { | ||
return { | ||
statusCode: 500, | ||
body: JSON.stringify({ message: "Env variables missing" }), | ||
}; | ||
} | ||
if (!event.body) { | ||
return { | ||
statusCode: response.status, | ||
body: JSON.stringify(parsedResponse), | ||
statusCode: 500, | ||
body: JSON.stringify({ message: "Invalid body format" }), | ||
}; | ||
} | ||
|
||
const file = parseMultiPartData(event.body, event.headers); | ||
const s3Key = await uploadToS3(file["name"], file["parts"]); | ||
const cid = await getCID(s3Key); | ||
console.log(cid); | ||
await rabbitMQUpload(cid); | ||
|
||
return { | ||
statusCode: 500, | ||
body: JSON.stringify({ message: "Invalid body format" }), | ||
statusCode: 200, | ||
body: JSON.stringify({ message: cid }), | ||
}; | ||
}; | ||
|
||
interface IFile { | ||
name: string; | ||
parts: Buffer[]; | ||
} | ||
|
||
const parseMultiPartData = ( | ||
body: string, | ||
headers: HandlerEvent["headers"] | ||
): IFile => { | ||
const file: IFile = { | ||
name: "", | ||
parts: [], | ||
}; | ||
|
||
const bb = busboy({ headers }); | ||
|
||
bb.on("file", (_, filestream, metadata) => { | ||
file.name = metadata.filename; | ||
filestream.on("data", (data) => { | ||
file.parts.push(data); | ||
}); | ||
}); | ||
|
||
bb.write(Buffer.from(body, "base64")); | ||
|
||
return file; | ||
}; | ||
|
||
const validEnvVariables = (): boolean => { | ||
return Object.values(envVariables).reduce( | ||
(acc, current) => acc && typeof current !== "undefined", | ||
true | ||
); | ||
}; | ||
|
||
interface IUploadedPart { | ||
ETag: string; | ||
PartNumber: number; | ||
} | ||
|
||
const uploadToS3 = async (name: string, parts: Buffer[]) => { | ||
const multipartInfo: AWS.S3.CreateMultipartUploadRequest = { | ||
Bucket: envVariables.bucketName!, | ||
Key: generateS3Path() + name, | ||
}; | ||
const uploadID = await s3 | ||
.createMultipartUpload(multipartInfo) | ||
.promise() | ||
.then((result) => result.UploadId); | ||
const uploadedParts: IUploadedPart[] = []; | ||
for (const [i, part] of parts.entries()) { | ||
const partNumber = i + 1; | ||
const partInfo: AWS.S3.UploadPartRequest = { | ||
...multipartInfo, | ||
UploadId: uploadID!, | ||
Body: part, | ||
PartNumber: partNumber, | ||
}; | ||
const test = await s3.uploadPart(partInfo).promise(); | ||
uploadedParts.push({ | ||
ETag: test.ETag!, | ||
PartNumber: partNumber, | ||
}); | ||
} | ||
const completeMultipartUploadParams: AWS.S3.CompleteMultipartUploadRequest = { | ||
...multipartInfo, | ||
MultipartUpload: { | ||
Parts: uploadedParts, | ||
}, | ||
UploadId: uploadID!, | ||
}; | ||
await s3.completeMultipartUpload(completeMultipartUploadParams).promise(); | ||
|
||
return multipartInfo.Key; | ||
}; | ||
|
||
const getCID = async (key: string) => { | ||
const headParams: AWS.S3.HeadObjectRequest = { | ||
Bucket: envVariables.bucketName!, | ||
Key: key, | ||
}; | ||
const head = await s3.headObject(headParams).promise(); | ||
|
||
return head.Metadata?.cid; | ||
}; | ||
|
||
const generateS3Path = (): string => { | ||
const currentDate = new Date(); | ||
const formattedDate = currentDate | ||
.toISOString() | ||
.slice(0, 10) | ||
.replace(/-/g, "/"); | ||
const id = uuidv4(); | ||
return `${formattedDate}/${id}/`; | ||
}; | ||
|
||
const rabbitMQUpload = async (cid: any) => { | ||
const conn = await amqp.connect(envVariables.rabbitMQURL!); | ||
const channel = await conn.createChannel(); | ||
const exchange = "filebase"; | ||
await channel.assertExchange(exchange, "fanout", { durable: true }); | ||
channel.publish(exchange, "", Buffer.from(cid)); | ||
await channel.close(); | ||
await conn.close(); | ||
}; |
Uh oh!
There was an error while loading. Please reload this page.