From c81586c1952098ab72da51c38ad0720bcdc01d6a Mon Sep 17 00:00:00 2001 From: LoveL Date: Thu, 21 Mar 2024 12:16:33 +0800 Subject: [PATCH] =?UTF-8?q?fix:=20=F0=9F=90=9B=20Optimize=20Proof=20Submis?= =?UTF-8?q?sion?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ✅ Closes: #19 --- .gitignore | 2 +- .vscode/launch.json | 3 +- package-lock.json | 36 +- package.json | 2 +- src/cmd.ts | 99 +++- src/dataset/metadata/repo/index.ts | 355 +++++++------ src/dataset/proof/repo/index.ts | 631 +++++++++++++---------- src/dataset/proof/types/index.ts | 10 +- src/finance/repo/index.ts | 333 ++++++------ src/shared/constant.ts | 1 + src/shared/utils/utils.ts | 44 ++ testdata/challenges.proofs | 54 +- testdata/dataset.proof | 12 +- testdata/datasetMetadata.json | 6 +- testdata/datasetReplicaRequirements.json | 6 +- testdata/mappingFiles.proof | 9 + 16 files changed, 904 insertions(+), 699 deletions(-) create mode 100644 testdata/mappingFiles.proof diff --git a/.gitignore b/.gitignore index bd69123..145f6df 100644 --- a/.gitignore +++ b/.gitignore @@ -6,4 +6,4 @@ shed-linux shed-macos shed-win.exe .env -yarn.lock +*.lock diff --git a/.vscode/launch.json b/.vscode/launch.json index bfa46d3..594c4f6 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -9,7 +9,8 @@ "type": "node", "request": "launch", "runtimeArgs": ["-r", "ts-node/register"], - "args": ["${workspaceFolder}/src/index.ts", "getEscrowRequirement", "--datasetId", "1", "--type", "5"], + "args": ["${workspaceFolder}/src/index.ts", "updateDatasetTimeoutParameters", "-i", "5", "-p", "200000", "-a", "200000"], + //"args": ["${workspaceFolder}/src/index.ts", "deposit", "-i", "5", "-m", "0", "-o", "0x09C6DEE9DB5e7dF2b18283c0CFCf714fEDB692d7", "-a", "10000000000"], "cwd": "${workspaceFolder}", "sourceMaps": true } diff --git a/package-lock.json b/package-lock.json index 5c40a22..8e3032c 100644 --- a/package-lock.json +++ b/package-lock.json @@ -17,7 +17,7 @@ "shed": "dist/index.js" }, "devDependencies": { - "@dataswapjs/dataswapjs": "^0.25.0", + "@dataswapjs/dataswapjs": "^0.25.1", "@types/mocha": "^10.0.6", "@types/node": "^20.11.26", "@types/yargs": "^17.0.32", @@ -302,16 +302,16 @@ } }, "node_modules/@dataswapjs/dataswapjs": { - "version": "0.25.0", - "resolved": "https://registry.npmjs.org/@dataswapjs/dataswapjs/-/dataswapjs-0.25.0.tgz", - "integrity": "sha512-DfTloN9ypu1qGcQ0o4maDRUWxwhVRgRDg/JOP7hhz16JroAxo0UI22Q1ZzVbAFkcZjSMkDrJ/m31y+XVIRlJGw==", + "version": "0.25.1", + "resolved": "https://registry.npmjs.org/@dataswapjs/dataswapjs/-/dataswapjs-0.25.1.tgz", + "integrity": "sha512-jrYOoqNL6BFXSrt51qY31K30RaEdA8k1U0/L6Xdp/TIz5U8AfZp+oDFGx7OpwXT9ssyTUcLnckaBhZzp7KJ6ew==", "dev": true, "hasInstallScript": true, "dependencies": { "@dataswapcore/contracts": "^0.8.0", "@glif/filecoin-address": "^2.0.43", "@unipackage/datastore": "^2.0.0", - "@unipackage/filecoin": "^2.2.0", + "@unipackage/filecoin": "^2.2.1", "@unipackage/utils": "^1.5.0", "dotenv": "^16.3.1", "rfc4648": "^1.5.3" @@ -4731,9 +4731,9 @@ } }, "node_modules/@unipackage/filecoin": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/@unipackage/filecoin/-/filecoin-2.2.0.tgz", - "integrity": "sha512-BGx6D/hGIbZW91AiB1BpbygcIuj8wrLWsHf5sJvErvJ44V2gYpX9WBLqgvJFJWpFNx1IP2wQudXXCjuXQSNqSg==", + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/@unipackage/filecoin/-/filecoin-2.2.1.tgz", + "integrity": "sha512-ix2dKVBZawdV5jhqGAmpI1FEI04qg/erWObQoCS2Kj6FDSNs4XMpnAhL4dTh7dg77MOnICaVLM2hXYCtwg176w==", "dev": true, "dependencies": { "@glif/filecoin-actor-utils": "^2.0.62", @@ -4743,15 +4743,15 @@ "@types/cbor": "^6.0.0", "@unipackage/datastore": "^2.0.0", "@unipackage/ddd": "^1.1.0", - "@unipackage/net": "^2.5.3", + "@unipackage/net": "^2.5.4", "cbor": "^9.0.1", "mongoose": "^7.6.3" } }, "node_modules/@unipackage/net": { - "version": "2.5.3", - "resolved": "https://registry.npmjs.org/@unipackage/net/-/net-2.5.3.tgz", - "integrity": "sha512-GEmK1gwEEVKz1qgS6EKv03xjc40c27LyHz6qbKpuVvnZ83T8JB9pxwWJIJ0FQdPftxHxyjb+/XEkXL0uqrGbKQ==", + "version": "2.5.4", + "resolved": "https://registry.npmjs.org/@unipackage/net/-/net-2.5.4.tgz", + "integrity": "sha512-vM8EbSiAwUry0gAUQI4F1Nid9ssdNcpSNnEUeSR+Ef3UAShKFf7+dXBvsfEwON5K3gRfiNQGuJ868cj2VLzVbg==", "dev": true, "dependencies": { "@dataswapcore/abi": "^0.1.2", @@ -8974,9 +8974,9 @@ } }, "node_modules/fp-ts": { - "version": "2.16.3", - "resolved": "https://registry.npmjs.org/fp-ts/-/fp-ts-2.16.3.tgz", - "integrity": "sha512-REm0sOecd4inACbAiFeOxpyOgB+f0OqhmZBBVOc5Ku1HqdroDU5uxYu9mybKj+be4DQ5L2YI6LuosjAfmuJCBQ==", + "version": "2.16.4", + "resolved": "https://registry.npmjs.org/fp-ts/-/fp-ts-2.16.4.tgz", + "integrity": "sha512-EkV/l6oHaf/w/DlVc5UiqLibqTV1S+idiDdcWQ+UjnLLflL9pZG28ebJfPLor8ifoL8NgEFDIo9fOvHyiSCrJQ==", "dev": true }, "node_modules/fresh": { @@ -12338,9 +12338,9 @@ } }, "node_modules/mongoose": { - "version": "7.6.9", - "resolved": "https://registry.npmjs.org/mongoose/-/mongoose-7.6.9.tgz", - "integrity": "sha512-3lR1fA/gS1E9Bn0woFqIysnnjCFDYtVo3yY+rGsVg1Q7kHX+gUTgAHTEKXrkwKxk2gHFdUfAsLt/Zjrdf6+nZA==", + "version": "7.6.10", + "resolved": "https://registry.npmjs.org/mongoose/-/mongoose-7.6.10.tgz", + "integrity": "sha512-vfvGxXwFk6rZVRaMC+8pgXj1uOR2RafZpgaA3fO6ygDJN7dXnBQ3ehuacwaVD+U3hmZetqHimORJhvLEpdRl1w==", "dev": true, "dependencies": { "bson": "^5.5.0", diff --git a/package.json b/package.json index fdfa4a8..4eb0283 100644 --- a/package.json +++ b/package.json @@ -38,7 +38,7 @@ }, "homepage": "https://github.com/dataswap/shed#readme", "devDependencies": { - "@dataswapjs/dataswapjs": "^0.25.0", + "@dataswapjs/dataswapjs": "^0.25.1", "@types/mocha": "^10.0.6", "@types/node": "^20.11.26", "@types/yargs": "^17.0.32", diff --git a/src/cmd.ts b/src/cmd.ts index 6c6ffd8..9420f30 100644 --- a/src/cmd.ts +++ b/src/cmd.ts @@ -19,16 +19,9 @@ ********************************************************************************/ import yargs from "yargs" -import { - submitDatasetMetadata, - updateDatasetTimeoutParameters, - submitDatasetReplicaRequirements, -} from "./dataset/metadata/repo" -import { - submitDatasetProof, - submitDatasetChallengeProofs, -} from "./dataset/proof/repo" -import { getEscrowRequirement, deposit } from "./finance/repo" +import { DatasetMetadatas } from "./dataset/metadata/repo" +import { DatasetProofs } from "./dataset/proof/repo" +import { Finance } from "./finance/repo" import { Context } from "./shared/context" @@ -112,6 +105,30 @@ const argv = yargs type: "number", }, }) + .command("completeEscrow", "completeEscrow", { + datasetId: { + description: "Dataset Id", + alias: "i", + demandOption: true, + type: "number", + }, + }) + .command("submitDatasetProofCompleted", "Submit dataset proof completed", { + datasetId: { + description: "Dataset Id", + alias: "i", + demandOption: true, + type: "number", + }, + }) + .command("auditorStake", "auditor stake amount", { + datasetId: { + description: "Dataset Id", + alias: "i", + demandOption: true, + type: "number", + }, + }) .command( "submitDatasetChallengeProofs", "Submit dataset challenge proofs", @@ -162,6 +179,15 @@ const argv = yargs type: "string", }, }) + .command("getDatasetState", "Get dataset state", { + datasetId: { + description: + "dataset state: None = 0,MetadataSubmitted=1,RequirementSubmitted=2,WaitEscrow=3,ProofSubmitted=4,Approved=5,Rejected=6", + alias: "i", + demandOption: true, + type: "number", + }, + }) .command("getEscrowRequirement", "Get escrow requirement", { datasetId: { description: "Dataset Id", @@ -169,10 +195,16 @@ const argv = yargs type: "number", }, size: { - description: "Data size", + description: + "Data size, when DatacapCollateralRequirment and DatacapChunkLandRequirment", alias: "s", type: "number", }, + replicasCount: { + description: "Replicas count, when DatacapCollateralRequirment", + alias: "r", + type: "number", + }, type: { description: "escrow type:(DatacapCollateralRequirment=0; DatacapChunkLandRequirment=1; ChallengeCommissionRequirment=2; ChallengeAuditCollateralRequirment=3; ProofAuditCollateralRequirment=4; DisputeAuditCollateralRequirment=5)", @@ -192,29 +224,29 @@ const argv = yargs export async function run(context: Context) { switch (argv._[0]) { case "submitDatasetMetadata": - await submitDatasetMetadata({ + await new DatasetMetadatas().submitDatasetMetadata({ context, path: String(argv.path), }) break case "updateDatasetTimeoutParameters": console.log( - await updateDatasetTimeoutParameters({ + await new DatasetMetadatas().updateDatasetTimeoutParameters({ context, datasetId: Number(argv.datasetId), - proofBlockCount: argv.proofBlockCount as bigint, - auditBlockCount: argv.auditBlockCount as bigint, + proofBlockCount: BigInt(String(argv.proofBlockCount)), + auditBlockCount: BigInt(String(argv.auditBlockCount)), }) ) break case "submitDatasetReplicaRequirements": - await submitDatasetReplicaRequirements({ + await new DatasetMetadatas().submitDatasetReplicaRequirements({ context, path: String(argv.path), }) break case "submitDatasetProof": - await submitDatasetProof({ + await new DatasetProofs().submitDatasetProof({ context, datasetId: Number(argv.datasetId), dataType: Number(argv.dataType), @@ -223,31 +255,56 @@ export async function run(context: Context) { chunk: Number(argv.chunk), }) break + case "completeEscrow": + await new DatasetProofs().completeEscrow({ + context, + datasetId: Number(argv.datasetId), + }) + break + case "submitDatasetProofCompleted": + await new DatasetProofs().submitDatasetProofCompleted({ + context, + datasetId: Number(argv.datasetId), + }) + break + case "auditorStake": + await new DatasetProofs().auditorStake({ + context, + datasetId: Number(argv.datasetId), + }) + break case "submitDatasetChallengeProofs": - await submitDatasetChallengeProofs({ + await new DatasetProofs().submitDatasetChallengeProofs({ context, datasetId: Number(argv.datasetId), path: String(argv.path), }) break case "deposit": - await deposit({ + await new Finance().deposit({ context, datasetId: Number(argv.datasetId), matchingId: Number(argv.matchingId), owner: String(argv.owner), token: String(argv.token), - amount: argv.amount as bigint, + amount: BigInt(String(argv.amount)), + }) + break + case "getDatasetState": + await new DatasetMetadatas().getDatasetState({ + context, + datasetId: Number(argv.datasetId), }) break case "getEscrowRequirement": console.log( "amount: ", - await getEscrowRequirement({ + await new Finance().getEscrowRequirement({ context, datasetId: Number(argv.datasetId), size: Number(argv.size), type: Number(argv.type), + replicasCount: Number(argv.replicasCount), }) ) break diff --git a/src/dataset/metadata/repo/index.ts b/src/dataset/metadata/repo/index.ts index 3fbe0ef..f2c8b68 100644 --- a/src/dataset/metadata/repo/index.ts +++ b/src/dataset/metadata/repo/index.ts @@ -20,112 +20,133 @@ import fs from "fs" import { DatasetState } from "@dataswapjs/dataswapjs" -import { handleEvmError } from "../../../shared/utils/utils" -import { chainSuccessInterval } from "../../../shared/constant" +import { handleEvmError, logMethodCall } from "../../../shared/utils/utils" +import { chainSuccessInterval, blockPeriod } from "../../../shared/constant" import { DatasetMetadata, DatasetReplicaRequirements } from "../types" import { Context } from "../../../shared/context" /** - * Submits dataset metadata to the blockchain. - * @param options - The options object containing the context and file path. - * @returns A promise indicating whether the submission was successful. + * Represents a collection of dataset metadata related operations. */ -export async function submitDatasetMetadata(options: { - context: Context - path: string -}): Promise { - console.log( - "Start submitDatasetMetadata:", - "network:", - options.context.network, - "path:", - options.path - ) - - const datasetMetadata = JSON.parse( - fs.readFileSync(options.path).toString() - ) as DatasetMetadata - - let datasetId - if ( - await handleEvmError( - options.context.evm.datasetMetadata.hasDatasetMetadata( - datasetMetadata.accessMethod +export class DatasetMetadatas { + /** + * Submits dataset metadata to the blockchain. + * @param options - The options object containing the context and file path. + * @returns A promise indicating whether the submission was successful. + */ + @logMethodCall(["context"]) + async submitDatasetMetadata(options: { + context: Context + path: string + }): Promise<{ + datasetId: number + proofBlockCount: bigint + auditBlockCount: bigint + }> { + const datasetMetadata = JSON.parse( + fs.readFileSync(options.path).toString() + ) as DatasetMetadata + + let datasetId + if ( + await handleEvmError( + options.context.evm.datasetMetadata.hasDatasetMetadata( + datasetMetadata.accessMethod + ) ) - ) - ) { - console.log("Dataset metadata had submited") - datasetId = await handleEvmError( - options.context.evm.datasetMetadata.getDatasetIdForAccessMethod( - datasetMetadata.accessMethod + ) { + console.log("Dataset metadata had submited") + datasetId = Number( + await handleEvmError( + options.context.evm.datasetMetadata.getDatasetIdForAccessMethod( + datasetMetadata.accessMethod + ) + ) ) - ) - } else { + } else { + options.context.evm.datasetMetadata + .getWallet() + .add(process.env.storageClientPrivateKey!) + const tx = await handleEvmError( + options.context.evm.datasetMetadata.submitDatasetMetadata( + datasetMetadata.client, + datasetMetadata.title, + datasetMetadata.industry, + datasetMetadata.name, + datasetMetadata.description, + datasetMetadata.source, + datasetMetadata.accessMethod, + datasetMetadata.sizeInBytes, + datasetMetadata.isPublic, + datasetMetadata.version + ) + ) + + // Get transaction receipt and event arguments + const receipt = + await options.context.evm.datasetMetadata.getTransactionReceipt( + tx.hash + ) + + const ret = options.context.evm.datasetMetadata.getEvmEventArgs( + receipt!, + "DatasetMetadataSubmitted" + ) + + datasetId = Number(ret.data.datasetId) + } + + const datasetTimeoutParameters = + await this.updateDatasetTimeoutParameters({ + context: options.context, + datasetId, + proofBlockCount: datasetMetadata.proofBlockCount, + auditBlockCount: datasetMetadata.auditBlockCount, + }) + + return { + datasetId, + proofBlockCount: datasetTimeoutParameters.proofBlockCount, + auditBlockCount: datasetTimeoutParameters.auditBlockCount, + } + } + + /** + * Update dataset timeout parameters to the blockchain. + * @param options - The options object containing the context and file path. + * @returns A promise indicating whether the submission was successful. + */ + @logMethodCall(["context"]) + async updateDatasetTimeoutParameters(options: { + context: Context + datasetId: number + proofBlockCount: bigint + auditBlockCount: bigint + }): Promise<{ + state: boolean + proofBlockCount: bigint + auditBlockCount: bigint + }> { options.context.evm.datasetMetadata .getWallet() .add(process.env.storageClientPrivateKey!) - datasetId = await handleEvmError( - options.context.evm.datasetMetadata.submitDatasetMetadata( - datasetMetadata.client, - datasetMetadata.title, - datasetMetadata.industry, - datasetMetadata.name, - datasetMetadata.description, - datasetMetadata.source, - datasetMetadata.accessMethod, - datasetMetadata.sizeInBytes, - datasetMetadata.isPublic, - datasetMetadata.version - ) + + const minProofBlockCount = await handleEvmError( + options.context.evm.filplus.datasetRuleMinProofTimeout() + ) + const minAuditBlockCount = await handleEvmError( + options.context.evm.filplus.datasetRuleMinAuditTimeout() ) - } - const datasetTimeoutParameters = await updateDatasetTimeoutParameters({ - context: options.context, - datasetId, - proofBlockCount: datasetMetadata.proofBlockCount, - auditBlockCount: datasetMetadata.auditBlockCount, - }) - - console.log({ - datasetId, - proofBlockCount: datasetTimeoutParameters.proofBlockCount, - auditBlockCount: datasetTimeoutParameters.auditBlockCount, - }) -} + options.proofBlockCount = + options.proofBlockCount > minProofBlockCount + ? options.proofBlockCount + : minProofBlockCount + BigInt(1) + options.auditBlockCount = + options.auditBlockCount > minAuditBlockCount + ? options.auditBlockCount + : minAuditBlockCount + BigInt(1) -/** - * Update dataset timeout parameters to the blockchain. - * @param options - The options object containing the context and file path. - * @returns A promise indicating whether the submission was successful. - */ -export async function updateDatasetTimeoutParameters(options: { - context: Context - datasetId: number - proofBlockCount: bigint - auditBlockCount: bigint -}): Promise<{ - state: boolean - proofBlockCount: bigint - auditBlockCount: bigint -}> { - console.log( - "Start updateDatasetTimeoutParameters:", - "network:", - options.context.network, - "datasetId", - options.datasetId, - "proofBlockCount:", - options.proofBlockCount, - "auditBlockCount", - options.auditBlockCount - ) - - options.context.evm.datasetMetadata - .getWallet() - .add(process.env.storageClientPrivateKey!) - - if (await isDatasetTimeoutParametersValid(options)) { const tx = await handleEvmError( options.context.evm.datasetMetadata.updateDatasetTimeoutParameters( options.datasetId, @@ -134,93 +155,85 @@ export async function updateDatasetTimeoutParameters(options: { ) ) // Wait chain success interval - await handleEvmError( - options.context.evm.datasetMetadata.waitForBlockHeight( - tx.height + chainSuccessInterval + await options.context.evm.datasetMetadata.waitForBlockHeight( + tx.blockNumber + chainSuccessInterval, + blockPeriod + ) + + const datasetTimeoutParameters = await handleEvmError( + options.context.evm.datasetMetadata.getDatasetTimeoutParameters( + options.datasetId ) ) + const state = + options.proofBlockCount == + datasetTimeoutParameters.proofBlockCount && + options.auditBlockCount == datasetTimeoutParameters.auditBlockCount + ? true + : false + return { + state, + proofBlockCount: datasetTimeoutParameters.proofBlockCount, + auditBlockCount: datasetTimeoutParameters.auditBlockCount, + } } - const datasetTimeoutParameters = await handleEvmError( - options.context.evm.datasetMetadata.getDatasetTimeoutParameters( - options.datasetId + /** + * Submits dataset replica requirements to the blockchain. + * @param options - The options object containing the context and file path. + * @returns A promise indicating whether the submission was successful. + */ + @logMethodCall(["context"]) + async submitDatasetReplicaRequirements(options: { + context: Context + path: string + }): Promise { + const datasetReplicaRequirements = JSON.parse( + fs.readFileSync(options.path).toString() + ) as DatasetReplicaRequirements + + const state = await handleEvmError( + options.context.evm.datasetMetadata.getDatasetState( + datasetReplicaRequirements.datasetId + ) ) - ) - const state = - options.proofBlockCount == datasetTimeoutParameters.proofBlockCount && - options.auditBlockCount == datasetTimeoutParameters.auditBlockCount - ? true - : false - return { - state, - proofBlockCount: datasetTimeoutParameters.proofBlockCount, - auditBlockCount: datasetTimeoutParameters.auditBlockCount, - } -} + if (state != DatasetState.MetadataSubmitted) { + console.log("Dataset state is not MetadataSubmitted, do nothing~") + return true + } -/** - * Submits dataset replica requirements to the blockchain. - * @param options - The options object containing the context and file path. - * @returns A promise indicating whether the submission was successful. - */ -export async function submitDatasetReplicaRequirements(options: { - context: Context - path: string -}): Promise { - console.log( - "Start submitDatasetReplicaRequirements:", - "network:", - options.context.network, - "path:", - options.path - ) - - const datasetReplicaRequirements = JSON.parse( - fs.readFileSync(options.path).toString() - ) as DatasetReplicaRequirements - - const state = await handleEvmError( - options.context.evm.datasetMetadata.getDatasetState( - datasetReplicaRequirements.datasetId + options.context.evm.datasetRequirement + .getWallet() + .add(process.env.storageClientPrivateKey!) + await handleEvmError( + options.context.evm.datasetRequirement.submitDatasetReplicaRequirements( + datasetReplicaRequirements.datasetId, + datasetReplicaRequirements.dataPreparers, + datasetReplicaRequirements.storageProviders, + datasetReplicaRequirements.regions, + datasetReplicaRequirements.countrys, + datasetReplicaRequirements.citys, + datasetReplicaRequirements.amount + ) ) - ) - if (state != DatasetState.MetadataSubmitted) { - console.log("Dataset state is not MetadataSubmitted, do nothing~") + return true } - options.context.evm.datasetRequirement - .getWallet() - .add(process.env.storageClientPrivateKey!) - await handleEvmError( - options.context.evm.datasetRequirement.submitDatasetReplicaRequirements( - datasetReplicaRequirements.datasetId, - datasetReplicaRequirements.dataPreparers, - datasetReplicaRequirements.storageProviders, - datasetReplicaRequirements.regions, - datasetReplicaRequirements.countrys, - datasetReplicaRequirements.citys, - datasetReplicaRequirements.amount + /** + * Get dataset state from the blockchain. + * @param options - The options object containing the context and file path. + * @returns A promise indicating whether the submission was successful. + */ + @logMethodCall(["context"]) + async getDatasetState(options: { + context: Context + datasetId: number + }): Promise { + return await handleEvmError( + options.context.evm.datasetMetadata.getDatasetState( + options.datasetId + ) ) - ) - - return true -} - -async function isDatasetTimeoutParametersValid(options: { - context: Context - proofBlockCount: bigint - auditBlockCount: bigint -}): Promise { - const minProofBlockCount = await handleEvmError( - options.context.evm.filplus.datasetRuleMinProofTimeout() - ) - const minAuditBlockCount = await handleEvmError( - options.context.evm.filplus.datasetRuleMinAuditTimeout() - ) - - return options.proofBlockCount >= minProofBlockCount && - options.auditBlockCount >= minAuditBlockCount - ? true - : false + } } diff --git a/src/dataset/proof/repo/index.ts b/src/dataset/proof/repo/index.ts index 5fff1a4..707427a 100644 --- a/src/dataset/proof/repo/index.ts +++ b/src/dataset/proof/repo/index.ts @@ -23,342 +23,415 @@ import fs from "fs" import { chainSuccessInterval, + blockPeriod, defaultEthAddress, } from "../../../shared/constant" -import { handleEvmError, FileLock } from "../../../shared/utils/utils" +import { + handleEvmError, + FileLock, + logMethodCall, +} from "../../../shared/utils/utils" import { DatasetProof, DatasetProofSubmitInfo } from "../types" import { Context } from "../../../shared/context" /** - * Submits the dataset challenge proof to the blockchain network. - * @param network The network to submit the dataset proof to. - * @param datasetId The ID of the dataset. - * @param path The file path of the dataset proof. - * @returns A promise that resolves to true if the submission is successful, otherwise false. + * Represents a collection of methods for interacting with dataset proofs on the blockchain network. */ -export async function submitDatasetChallengeProofs(options: { - context: Context - datasetId: number - path: string -}): Promise { - const lock = new FileLock(String(options.datasetId) + options.path) - if (!lock.acquireLock()) { - console.log( - "Failed to acquire lock, another process may be using the file" +export class DatasetProofs { + /** + * Submits the dataset proof to the blockchain network. + * @param network The network to submit the dataset proof to. + * @param datasetId The ID of the dataset. + * @param dataType The type of the dataset. + * @param mappingFilesAccessMethod The method to access mapping files. + * @param path The file path of the dataset proof. + * @param chunk The number of dataset proof chunks to submit at a time. + * @returns A promise that resolves to true if the submission is successful, otherwise false. + */ + @logMethodCall(["context"]) + async submitDatasetProof(options: { + context: Context + datasetId: number + dataType: DataType + mappingFilesAccessMethod: string + path: string + chunk: number + }): Promise { + const lock = new FileLock( + String(options.datasetId) + String(options.dataType) ) - } + if (!lock.acquireLock()) { + console.log( + "Failed to acquire lock, another process may be using the file" + ) + } - try { - console.log( - "Start submitDatasetChallengeProofs:", - "network:", - options.context.network, - "datasetId:", - options.datasetId, - "dataType:", - "path:", - options.path - ) + try { + const submitInfo = new DatasetProofSubmitInfo({ + datasetId: options.datasetId, + dataType: options.dataType, + mappingFilesAccessMethod: options.mappingFilesAccessMethod, + chunk: options.chunk, + completed: false, + leafIndex: 0, + leafHashes: [], + leafSizes: [], + }) + + if ( + !(await this.checkSubmissionProofsCriteria( + options.context.evm.datasetProof, + options.datasetId, + options.dataType + )) + ) { + return false + } + + const datasetProof = JSON.parse( + fs.readFileSync(options.path).toString() + ) - const datasetChallengeProof = JSON.parse( - fs.readFileSync(options.path).toString() - ) + options.context.evm.datasetProof + .getWallet() + .add(process.env.datasetPreparerPrivateKey!) + + if ( + !(await this.handlerSubmitDatasetProofRoot({ + context: options.context, + submitInfo, + datasetProof, + })) + ) { + return false + } + + if ( + !(await this.handlerSubmitDatasetProof({ + context: options.context, + submitInfo, + datasetProof, + })) + ) { + return false + } - const criteria = await checkSubmissionChallengeProofsCriteria( - options.context.evm.datasetChallenge, - options.datasetId, - process.env.datasetAuditerAccount!, - datasetChallengeProof.RandomSeed - ) - if (!criteria) { - return false + return true + } finally { + lock.releaseLock() } + } - options.context.evm.datasetChallenge + /** + * Submits the completion of dataset proof for a given dataset ID. + * + * @param options - The options object containing the context and dataset ID. + * @returns A Promise that resolves when the dataset proof completion is submitted successfully. + */ + @logMethodCall(["context"]) + async submitDatasetProofCompleted(options: { + context: Context + datasetId: number + }): Promise { + options.context.evm.datasetProof .getWallet() .add(process.env.datasetAuditerPrivateKey!) await handleEvmError( - options.context.evm.datasetChallenge.submitDatasetChallengeProofs( - datasetChallengeProof.DatasetId, - datasetChallengeProof.RandomSeed, - datasetChallengeProof.Leaves, - datasetChallengeProof.Siblings, - datasetChallengeProof.Paths + options.context.evm.datasetProof.submitDatasetProofCompleted( + options.datasetId ) ) - return true - } finally { - lock.releaseLock() } -} -/** - * Submits the dataset proof to the blockchain network. - * @param network The network to submit the dataset proof to. - * @param datasetId The ID of the dataset. - * @param dataType The type of the dataset. - * @param mappingFilesAccessMethod The method to access mapping files. - * @param path The file path of the dataset proof. - * @param chunk The number of dataset proof chunks to submit at a time. - * @returns A promise that resolves to true if the submission is successful, otherwise false. - */ -export async function submitDatasetProof(options: { - context: Context - datasetId: number - dataType: DataType - mappingFilesAccessMethod: string - path: string - chunk: number -}): Promise { - const lock = new FileLock( - String(options.datasetId) + String(options.dataType) - ) - if (!lock.acquireLock()) { - console.log( - "Failed to acquire lock, another process may be using the file" - ) - } + /** + * Submits the dataset challenge proof to the blockchain network. + * @param network The network to submit the dataset proof to. + * @param datasetId The ID of the dataset. + * @param path The file path of the dataset proof. + * @returns A promise that resolves to true if the submission is successful, otherwise false. + */ + @logMethodCall(["context"]) + async submitDatasetChallengeProofs(options: { + context: Context + datasetId: number + path: string + }): Promise { + const lock = new FileLock(String(options.datasetId) + options.path) + if (!lock.acquireLock()) { + console.log( + "Failed to acquire lock, another process may be using the file" + ) + } - try { - console.log( - "Start submitDatasetProof:", - "network:", - options.context.network, - "datasetId:", - options.datasetId, - "dataType:", - options.dataType, - "mappingFilesAccessMethod:", - options.mappingFilesAccessMethod, - "path:", - options.path, - "chunk:", - options.chunk - ) + try { + const datasetChallengeProof = JSON.parse( + fs.readFileSync(options.path).toString() + ) - const submitInfo = new DatasetProofSubmitInfo({ - datasetId: options.datasetId, - dataType: options.dataType, - mappingFilesAccessMethod: options.mappingFilesAccessMethod, - chunk: options.chunk, - completed: false, - leafIndex: 0, - leafHashes: [], - leafSizes: [], - }) + if ( + !(await this.checkSubmissionChallengeProofsCriteria( + options.context.evm.datasetChallenge, + options.datasetId, + process.env.datasetAuditerAccount!, + datasetChallengeProof.RandomSeed + )) + ) { + return false + } + + options.context.evm.datasetChallenge + .getWallet() + .add(process.env.datasetAuditerPrivateKey!) + await handleEvmError( + options.context.evm.datasetChallenge.submitDatasetChallengeProofs( + datasetChallengeProof.DatasetId, + datasetChallengeProof.RandomSeed, + datasetChallengeProof.Leaves, + datasetChallengeProof.Siblings, + datasetChallengeProof.Paths + ) + ) - if ( - !(await checkSubmissionProofsCriteria( - options.context.evm.datasetProof, - options.datasetId, - options.dataType - )) - ) { return true + } finally { + lock.releaseLock() } - - const datasetProof = JSON.parse( - fs.readFileSync(options.path).toString() - ) - - return await handlerSubmitDatasetProof({ - context: options.context, - submitInfo, - datasetProof, - }) - } finally { - lock.releaseLock() } -} -/** - * Checks if the criteria for submitting dataset challenge proofs are met. - * @param datasetChallengeEvm - The Ethereum Virtual Machine instance for dataset challenge proofs. - * @param datasetId - The ID of the dataset. - * @param auditor - The auditor's address who submits the challenge proof. - * @param randomSeed - The random seed used in generating the challenge. - * @returns A Promise that resolves to a boolean indicating whether the criteria are met (true) or not (false). - */ -async function checkSubmissionChallengeProofsCriteria( - datasetChallengeEvm: any, - datasetId: number, - auditor: string, - randomSeed: bigint -): Promise { - if ( + /** + * Auditor stake to the blockchain network. + * @param network The network to submit the dataset proof to. + * @param datasetId The ID of the dataset. + * @returns A promise that resolves to true if the submission is successful, otherwise false. + */ + @logMethodCall(["context"]) + async auditorStake(options: { + context: Context + datasetId: number + }): Promise { + options.context.evm.datasetChallenge + .getWallet() + .add(process.env.datasetAuditerPrivateKey!) await handleEvmError( - datasetChallengeEvm.isDatasetChallengeProofDuplicate( - datasetId, - auditor, - randomSeed + options.context.evm.datasetChallenge.auditorStake( + options.datasetId, + BigInt("1000000000000000000") ) ) - ) { - console.log("Dataset challenge proof had submited, do nothing~") - return false + return true } - if ( - !(await handleEvmError( - datasetChallengeEvm.isWinner(datasetId, auditor) - )) - ) { - console.log( - "Can't submit the dataset challenge proof(not the winner), do nothing~" + /** + * Complete escrow to the blockchain network. + * @param network The network to submit the dataset proof to. + * @param datasetId The ID of the dataset. + * @returns A promise that resolves to true if the submission is successful, otherwise false. + */ + @logMethodCall(["context"]) + async completeEscrow(options: { + context: Context + datasetId: number + }): Promise { + options.context.evm.datasetProof + .getWallet() + .add(process.env.datasetAuditerPrivateKey!) + await handleEvmError( + options.context.evm.datasetProof.completeEscrow(options.datasetId) ) - return false + return true } - return true -} + /** + * Handles the submission of the dataset proof root to the Ethereum Virtual Machine (EVM). + * @param submitInfo - Information about the dataset proof submission. + * @param datasetProof - The dataset proof data. + * @returns A promise resolving to true if the submission is successful, otherwise false. + */ + private async handlerSubmitDatasetProofRoot(options: { + context: Context + submitInfo: DatasetProofSubmitInfo + datasetProof: DatasetProof + }): Promise { + const root = "" + /// TODO: Add getDatasetProofRoot function, https://github.com/dataswap/core/issues/354 + /*await handleEvmError( + options.context.evm.datasetProof.getDatasetProofRoot( + options.submitInfo.datasetId, + options.submitInfo.dataType + ) + )*/ + + if (root == "") { + console.log("Root is null, start submitDatasetProofRoot~") + + const tx = await handleEvmError( + options.context.evm.datasetProof.submitDatasetProofRoot( + options.submitInfo.datasetId, + options.submitInfo.dataType, + options.submitInfo.mappingFilesAccessMethod, + options.datasetProof.Root + ) + ) -/** - * Checks if the criteria for submitting dataset proofs are met. - * @param datasetProofEvm - The Ethereum Virtual Machine instance for dataset proofs. - * @param datasetId - The ID of the dataset. - * @param dataType - The type of the dataset. - * @returns A Promise that resolves to a boolean indicating whether the criteria are met (true) or not (false). - */ -async function checkSubmissionProofsCriteria( - datasetProofEvm: any, - datasetId: number, - dataType: DataType -): Promise { - if ( - await handleEvmError( - datasetProofEvm.isDatasetProofallCompleted(datasetId, dataType) - ) - ) { - console.log("All dataset proof had completed, do nothing~") - return false - } + // Wait chain success interval + await options.context.evm.datasetMetadata.waitForBlockHeight( + tx.blockNumber + chainSuccessInterval, + blockPeriod + ) - return true -} + const submitter = await handleEvmError( + options.context.evm.datasetProof.getDatasetProofSubmitter( + options.submitInfo.datasetId + ) + ) + if (submitter == defaultEthAddress) { + console.error("submitDatasetProofRoot fail") + return false + } + console.log("submitDatasetProofRoot success") + } else { + console.log("submitDatasetProofRoot had submited") + } -/** - * Handles the submission of the dataset proof. - * @param submitInfo Information about the dataset proof submission. - * @param datasetProof The dataset proof data. - * @returns A promise that resolves to true if the submission is successful, otherwise false. - */ -async function handlerSubmitDatasetProof(options: { - context: Context - submitInfo: DatasetProofSubmitInfo - datasetProof: DatasetProof -}): Promise { - options.context.evm.datasetProof - .getWallet() - .add(process.env.datasetPreparerPrivateKey!) - - if (!handlerSubmitDatasetProofRoot(options)) { - return false + return true } - const index = await handleEvmError( - options.context.evm.datasetProof.getDatasetProofCount( - options.submitInfo.datasetId, - options.submitInfo.dataType - ) - ) - options.submitInfo.updateleafIndex(index) - - while (!options.submitInfo.completed) { - options.submitInfo.updateDatasetProof(options.datasetProof) - console.log( - "Start submitDatasetProof, leafIndex:", - options.submitInfo.leafIndex - ) - const tx = await handleEvmError( - options.context.evm.datasetProof.submitDatasetProof( - options.submitInfo.datasetId, - options.submitInfo.dataType, - options.submitInfo.leafHashes, - options.submitInfo.leafIndex, - options.submitInfo.leafSizes, - options.submitInfo.completed + /** + * Handles the submission of the dataset proof. + * @param submitInfo Information about the dataset proof submission. + * @param datasetProof The dataset proof data. + * @returns A promise that resolves to true if the submission is successful, otherwise false. + */ + private async handlerSubmitDatasetProof(options: { + context: Context + submitInfo: DatasetProofSubmitInfo + datasetProof: DatasetProof + }): Promise { + const index = Number( + await handleEvmError( + options.context.evm.datasetProof.getDatasetProofCount( + options.submitInfo.datasetId, + options.submitInfo.dataType + ) ) ) + options.submitInfo.updateLeafIndex(index) + + while (!options.submitInfo.completed) { + options.submitInfo.updateDatasetProof(options.datasetProof) + console.log( + "Start submitDatasetProof", + "submitInfo", + options.submitInfo + ) + const tx = await handleEvmError( + options.context.evm.datasetProof.submitDatasetProof( + options.submitInfo.datasetId, + options.submitInfo.dataType, + options.submitInfo.leafHashes, + options.submitInfo.leafIndex, + options.submitInfo.leafSizes, + options.submitInfo.completed + ) + ) - // Wait chain success interval - await handleEvmError( - options.context.evm.datasetProof.waitForBlockHeight( - tx.height + chainSuccessInterval + // Wait chain success interval + await options.context.evm.datasetMetadata.waitForBlockHeight( + tx.blockNumber + chainSuccessInterval, + blockPeriod ) - ) - const index = await handleEvmError( - options.context.evm.datasetProof.getDatasetProofCount( - options.submitInfo.datasetId, - options.submitInfo.dataType + const index = Number( + await handleEvmError( + options.context.evm.datasetProof.getDatasetProofCount( + options.submitInfo.datasetId, + options.submitInfo.dataType + ) + ) ) - ) - const current = Math.min( - options.submitInfo.leafIndex + options.submitInfo.chunk, - options.datasetProof.LeafHashes.length - ) - if (index !== current) { - console.log("SubmitDatasetProof fail, leafIndex:", index) - return false + const current = Math.min( + options.submitInfo.leafIndex + options.submitInfo.chunk, + options.datasetProof.LeafHashes.length + ) + if (index !== current) { + console.log("SubmitDatasetProof fail, leafIndex:", index) + return false + } + + console.log( + "SubmitDatasetProof success, leafIndex:", + options.submitInfo.leafIndex + ) + + options.submitInfo.updateLeafIndex(index) } - options.submitInfo.updateleafIndex(index) - console.log("SubmitDatasetProof success, leafIndex:", index) - console.log("submitInfo", options.submitInfo) + return true } - return true -} - -/** - * Handles the submission of the dataset proof root to the Ethereum Virtual Machine (EVM). - * @param submitInfo - Information about the dataset proof submission. - * @param datasetProof - The dataset proof data. - * @returns A promise resolving to true if the submission is successful, otherwise false. - */ -async function handlerSubmitDatasetProofRoot(options: { - context: Context - submitInfo: DatasetProofSubmitInfo - datasetProof: DatasetProof -}): Promise { - const submitter = await handleEvmError( - options.context.evm.datasetProof.getDatasetProofSubmitter( - options.submitInfo.datasetId - ) - ) - if (submitter == defaultEthAddress) { - console.log("Submitter is null, start submitDatasetProofRoot~") - - const tx = await handleEvmError( - options.context.evm.datasetProof.submitDatasetProofRoot( - options.submitInfo.datasetId, - options.submitInfo.dataType, - options.submitInfo.mappingFilesAccessMethod, - options.datasetProof.Root + /** + * Checks if the criteria for submitting dataset challenge proofs are met. + * @param datasetChallengeEvm - The Ethereum Virtual Machine instance for dataset challenge proofs. + * @param datasetId - The ID of the dataset. + * @param auditor - The auditor's address who submits the challenge proof. + * @param randomSeed - The random seed used in generating the challenge. + * @returns A Promise that resolves to a boolean indicating whether the criteria are met (true) or not (false). + */ + private async checkSubmissionChallengeProofsCriteria( + datasetChallengeEvm: any, + datasetId: number, + auditor: string, + randomSeed: bigint + ): Promise { + if ( + await handleEvmError( + datasetChallengeEvm.isDatasetChallengeProofDuplicate( + datasetId, + auditor, + randomSeed + ) ) - ) + ) { + console.log("Dataset challenge proof had submited, do nothing~") + return false + } - // Wait chain success interval - await handleEvmError( - options.context.evm.datasetProof.waitForBlockHeight( - tx.height + chainSuccessInterval + if ( + !(await handleEvmError( + datasetChallengeEvm.isWinner(datasetId, auditor) + )) + ) { + console.log( + "Can't submit the dataset challenge proof(not the winner), do nothing~" ) - ) + return false + } + + return true + } - const submitter = await handleEvmError( - options.context.evm.datasetProof.getDatasetProofSubmitter( - options.submitInfo.datasetId + /** + * Checks if the criteria for submitting dataset proofs are met. + * @param datasetProofEvm - The Ethereum Virtual Machine instance for dataset proofs. + * @param datasetId - The ID of the dataset. + * @param dataType - The type of the dataset. + * @returns A Promise that resolves to a boolean indicating whether the criteria are met (true) or not (false). + */ + private async checkSubmissionProofsCriteria( + datasetProofEvm: any, + datasetId: number, + dataType: DataType + ): Promise { + if ( + await handleEvmError( + datasetProofEvm.isDatasetProofallCompleted(datasetId, dataType) ) - ) - if (submitter == defaultEthAddress) { - console.error("submitDatasetProofRoot fail") + ) { + console.log("All dataset proof had completed, do nothing~") return false } - console.log("submitDatasetProofRoot success") - } else { - console.log("submitDatasetProofRoot had submited") - } - return true + return true + } } diff --git a/src/dataset/proof/types/index.ts b/src/dataset/proof/types/index.ts index 9b92e77..35b4a9a 100644 --- a/src/dataset/proof/types/index.ts +++ b/src/dataset/proof/types/index.ts @@ -56,10 +56,18 @@ export class DatasetProofSubmitInfo extends Entity { }) } - updateleafIndex(leafIndex: number) { + /** + * Updates the leaf index for dataset proof. + * @param leafIndex - The new leaf index to be set. + */ + updateLeafIndex(leafIndex: number) { this.leafIndex = leafIndex } + /** + * Updates the dataset proof based on the provided dataset proof object. + * @param datasetProof - The dataset proof object to update from. + */ updateDatasetProof(datasetProof: DatasetProof) { if (this.leafIndex + this.chunk >= datasetProof.LeafHashes.length) { this.leafHashes = datasetProof.LeafHashes.slice( diff --git a/src/finance/repo/index.ts b/src/finance/repo/index.ts index 9064099..358f35c 100644 --- a/src/finance/repo/index.ts +++ b/src/finance/repo/index.ts @@ -19,194 +19,181 @@ ********************************************************************************/ import { Context } from "../../shared/context" -import { handleEvmError } from "../../shared/utils/utils" +import { handleEvmError, logMethodCall } from "../../shared/utils/utils" import { Types } from "../types" /** - * Deposits funds into a smart contract for a specific dataset and matching ID. - * @param options - The options object containing the context, dataset ID, matching ID, owner, and token. - * @returns A promise indicating whether the deposit was successful. + * Represents a collection of methods for interacting with finance on the blockchain network. */ -export async function deposit(options: { - context: Context - datasetId: number - matchingId: number - owner: string - token: string - amount: bigint -}) { - console.log( - "Start deposit:", - "datasetId:", - options.datasetId, - "matchingId:", - options.matchingId, - "owner:", - options.owner, - "token:", - options.token, - "amount:", - options.amount - ) - - options.context.evm.finance.getWallet().add(process.env.depositPrivateKey!) - - await handleEvmError( - options.context.evm.finance.deposit( - options.datasetId, - options.matchingId, - options.owner, - options.token, - { - value: options.amount, - } +export class Finance { + /** + * Deposits funds into a smart contract for a specific dataset and matching ID. + * @param options - The options object containing the context, dataset ID, matching ID, owner, and token. + * @returns A promise indicating whether the deposit was successful. + */ + @logMethodCall(["context"]) + async deposit(options: { + context: Context + datasetId: number + matchingId: number + owner: string + token: string + amount: bigint + }): Promise { + options.context.evm.finance + .getWallet() + .add(process.env.depositPrivateKey!) + + await handleEvmError( + options.context.evm.finance.deposit( + options.datasetId, + options.matchingId, + options.owner, + options.token, + { + value: options.amount, + } + ) ) - ) -} + return true + } -/** - * Retrieves the escrow requirement based on the provided options. - * @param options - The options object containing the context, dataset ID, size, and type. - * @returns A promise that resolves to the escrow requirement as a bigint. - */ -export async function getEscrowRequirement(options: { - context: Context - datasetId: number - size: number - type: number -}): Promise { - console.log( - "Start getEscrowRequirement:", - "datasetId:", - options.datasetId, - "type:", - options.type - ) - - switch (options.type) { - case Types.DatacapCollateralRequirment: - return await datacapCollateralRequirment(options) - - case Types.DatacapChunkLandRequirment: - return await datacapChunkLandRequirment(options) - - case Types.ChallengeCommissionRequirment: - return await challengeCommissionRequirment(options) - - case Types.ChallengeAuditCollateralRequirment: - return await challengeAuditCollateralRequirment(options) - - case Types.ProofAuditCollateralRequirment: - return await proofAuditCollateralRequirment(options) - - case Types.DisputeAuditCollateralRequirment: - return await disputeAuditCollateralRequirment(options) - default: - console.log("Not support type: ", options.type) - return BigInt(0) + /** + * Retrieves the escrow requirement based on the provided options. + * @param options - The options object containing the context, dataset ID, size, and type. + * @returns A promise that resolves to the escrow requirement as a bigint. + */ + @logMethodCall(["context"]) + async getEscrowRequirement(options: { + context: Context + datasetId: number + size: number + type: number + replicasCount: number + }): Promise { + switch (options.type) { + case Types.DatacapCollateralRequirment: + return await this.datacapCollateralRequirment(options) + + case Types.DatacapChunkLandRequirment: + return await this.datacapChunkLandRequirment(options) + + case Types.ChallengeCommissionRequirment: + return await this.challengeCommissionRequirment(options) + + case Types.ChallengeAuditCollateralRequirment: + return await this.challengeAuditCollateralRequirment(options) + + case Types.ProofAuditCollateralRequirment: + return await this.proofAuditCollateralRequirment(options) + + case Types.DisputeAuditCollateralRequirment: + return await this.disputeAuditCollateralRequirment(options) + default: + console.log("Not support type: ", options.type) + return BigInt(0) + } } -} -/** - * Retrieves the escrow requirement for datacap collateral. - * @param options - The options object containing the context, dataset ID, and size. - * @returns A promise that resolves to the escrow requirement as a bigint. - */ -async function datacapCollateralRequirment(options: { - context: Context - datasetId: number - size: number -}): Promise { - const replicasCount = await handleEvmError( - options.context.evm.datasetRequirement.getDatasetReplicasCount( - options.datasetId + /** + * Retrieves the escrow requirement for datacap collateral. + * @param options - The options object containing the context, dataset ID, and size. + * @returns A promise that resolves to the escrow requirement as a bigint. + */ + private async datacapCollateralRequirment(options: { + context: Context + datasetId: number + size: number + replicasCount: number + }): Promise { + const price = await handleEvmError( + options.context.evm.filplus.getDatacapPricePreByte() ) - ) - const price = await handleEvmError( - options.context.evm.filplus.getDatacapPricePreByte() - ) - return BigInt(options.size) * replicasCount * price -} + return BigInt(options.size) * BigInt(options.replicasCount) * price + } -/** - * Retrieves the escrow requirement for datacap chunk land. - * @param options - The options object containing the context, dataset ID, and size. - * @returns A promise that resolves to the escrow requirement as a bigint. - */ -async function datacapChunkLandRequirment(options: { - context: Context - datasetId: number - size: number -}): Promise { - const price = await handleEvmError( - options.context.evm.filplus.getDatacapChunkLandPricePreByte() - ) - const maxAllocated = await handleEvmError( - options.context.evm.filplus.datacapRulesMaxAllocatedSizePerTime() - ) - - return BigInt(Math.min(options.size, maxAllocated)) * price -} + /** + * Retrieves the escrow requirement for datacap chunk land. + * @param options - The options object containing the context, dataset ID, and size. + * @returns A promise that resolves to the escrow requirement as a bigint. + */ + private async datacapChunkLandRequirment(options: { + context: Context + datasetId: number + size: number + }): Promise { + const price = await handleEvmError( + options.context.evm.filplus.getDatacapChunkLandPricePreByte() + ) + const maxAllocated = await handleEvmError( + options.context.evm.filplus.datacapRulesMaxAllocatedSizePerTime() + ) -/** - * Retrieves the escrow requirement for challenge commission. - * @param options - The options object containing the context and dataset ID. - * @returns A promise that resolves to the escrow requirement as a bigint. - */ -async function challengeCommissionRequirment(options: { - context: Context - datasetId: number -}): Promise { - const submissionCount = await handleEvmError( - options.context.evm.datasetChallenge.getChallengeSubmissionCount( - options.datasetId + return BigInt(Math.min(options.size, maxAllocated)) * price + } + + /** + * Retrieves the escrow requirement for challenge commission. + * @param options - The options object containing the context and dataset ID. + * @returns A promise that resolves to the escrow requirement as a bigint. + */ + private async challengeCommissionRequirment(options: { + context: Context + datasetId: number + }): Promise { + const submissionCount = await handleEvmError( + options.context.evm.datasetChallenge.getChallengeSubmissionCount( + options.datasetId + ) + ) + const submiterCount = await handleEvmError( + options.context.evm.filplus.getChallengeProofsSubmiterCount() + ) + const price = await handleEvmError( + options.context.evm.filplus.getChallengeProofsPricePrePoint() ) - ) - const submiterCount = await handleEvmError( - options.context.evm.filplus.getChallengeProofsSubmiterCount() - ) - const price = await handleEvmError( - options.context.evm.filplus.getChallengeProofsPricePrePoint() - ) - - return BigInt(submissionCount) * submiterCount * price -} -/** - * Retrieves the escrow requirement for challenge audit collateral. - * @param options - The options object containing the context. - * @returns A promise that resolves to the escrow requirement as a bigint. - */ -async function challengeAuditCollateralRequirment(options: { - context: Context -}): Promise { - return await handleEvmError( - options.context.evm.filplus.getChallengeAuditFee() - ) -} + return BigInt(submissionCount) * submiterCount * price + } -/** - * Retrieves the escrow requirement for proof audit collateral. - * @param options - The options object containing the context. - * @returns A promise that resolves to the escrow requirement as a bigint. - */ -async function proofAuditCollateralRequirment(options: { - context: Context -}): Promise { - return await handleEvmError(options.context.evm.filplus.getProofAuditFee()) -} + /** + * Retrieves the escrow requirement for challenge audit collateral. + * @param options - The options object containing the context. + * @returns A promise that resolves to the escrow requirement as a bigint. + */ + private async challengeAuditCollateralRequirment(options: { + context: Context + }): Promise { + return await handleEvmError( + options.context.evm.filplus.getChallengeAuditFee() + ) + } -/** - * Retrieves the escrow requirement for dispute audit collateral. - * @param options - The options object containing the context. - * @returns A promise that resolves to the escrow requirement as a bigint. - */ -async function disputeAuditCollateralRequirment(options: { - context: Context -}): Promise { - return await handleEvmError( - options.context.evm.filplus.getDisputeAuditFee() - ) + /** + * Retrieves the escrow requirement for proof audit collateral. + * @param options - The options object containing the context. + * @returns A promise that resolves to the escrow requirement as a bigint. + */ + private async proofAuditCollateralRequirment(options: { + context: Context + }): Promise { + return await handleEvmError( + options.context.evm.filplus.getProofAuditFee() + ) + } + + /** + * Retrieves the escrow requirement for dispute audit collateral. + * @param options - The options object containing the context. + * @returns A promise that resolves to the escrow requirement as a bigint. + */ + private async disputeAuditCollateralRequirment(options: { + context: Context + }): Promise { + return await handleEvmError( + options.context.evm.filplus.getDisputeAuditFee() + ) + } } diff --git a/src/shared/constant.ts b/src/shared/constant.ts index 40c4b95..75a9769 100644 --- a/src/shared/constant.ts +++ b/src/shared/constant.ts @@ -19,4 +19,5 @@ ********************************************************************************/ export const chainSuccessInterval = 6 // Default 6 blocknumber +export const blockPeriod = 30000 // Default block period 30s export const defaultEthAddress = "0x0000000000000000000000000000000000000000" diff --git a/src/shared/utils/utils.ts b/src/shared/utils/utils.ts index 262113d..5afe1db 100644 --- a/src/shared/utils/utils.ts +++ b/src/shared/utils/utils.ts @@ -72,3 +72,47 @@ export class FileLock { fs.unlinkSync(this.lockFilePath) } } + +/** + * A decorator function for logging method calls and their return values. + * @param propertiesToIgnore - The ignore properties. + * @returns The modified property descriptor with logging functionality. + */ +export function logMethodCall(propertiesToIgnore: string[] = []) { + return function ( + target: any, + propertyKey: string, + descriptor: PropertyDescriptor + ) { + const originalMethod = descriptor.value + descriptor.value = async function (...args: any[]) { + // Function to filter out specified properties from objects + const filterProperties = (arg: any): any => { + if ( + typeof arg === "object" && + arg !== null && + propertiesToIgnore.length > 0 + ) { + const filteredArg: any = {} + for (const prop in arg) { + if (!propertiesToIgnore.includes(prop)) { + filteredArg[prop] = arg[prop] + } + } + return filteredArg + } + return arg + } + + const loggedArgs = args.map(filterProperties) + + console.log(`Calling ${propertyKey} with arguments: `, loggedArgs) + + const result = await originalMethod.apply(this, args) + + console.log(`${propertyKey} returned: `, result) + return result + } + return descriptor + } +} diff --git a/testdata/challenges.proofs b/testdata/challenges.proofs index c6d76eb..925b07e 100644 --- a/testdata/challenges.proofs +++ b/testdata/challenges.proofs @@ -1,31 +1,43 @@ { - "RandomSeed": 12, + "RandomSeed": 6667, "Leaves": [ - "0x013204c9015958888dde0c3883b76c355b8bb4c18a93f383dfa376216bf92217", - "0xcdd6226db0e2e4fce0f7a85dc85abec8e5459cd995c9cda5bdb9050c1f04c009" + "0xf995504c8383cc2b3bb45b4779cdaf1486e79d7ed5fb1b430f6616a47646ac27" ], "Siblings": [ [ - "0x5e84848a0249009c04085ab51984df50e529cbb0e720a809b7e17c8537939a2c", - "0x555284a9266f22bf1c45e1a76b18d959c05d9c8cf71f05131f87690cd8dcda3f", - "0xbc592e7b86d5fc080dd9f3d5ab865ad3f6c6247c0504fafd351a31ee28823a2b", - "0x76c45d1f8f9ffb871622d3ecb845769b3888b3dd3c8a6c89998f0070f3cec307", - "0xa2a434b6373e7ff747a25b56e2e49484d57f313018b39f2ffc8da8fc4a143c15", - "0xb4d95bbb596f4ce35d1a144023e7d42dc99a6d8c63df68b71c66c8eb1ed4241d", - "0x9e431cbd232571b03d0456b79e429ec264cd94c9c4ac1c887717074bc47b521d" - ], - [ - "0x3aa265726f6f747381d82a5825000170122043901c908585d8e8cd803378cb16", - "0x0e1d97f6ba51c3447c922b3d2dcaeb7cd5ce5a1dcca1aed60b880fa54a91933d", - "0xbc592e7b86d5fc080dd9f3d5ab865ad3f6c6247c0504fafd351a31ee28823a2b", - "0x76c45d1f8f9ffb871622d3ecb845769b3888b3dd3c8a6c89998f0070f3cec307", - "0xa2a434b6373e7ff747a25b56e2e49484d57f313018b39f2ffc8da8fc4a143c15", - "0xb4d95bbb596f4ce35d1a144023e7d42dc99a6d8c63df68b71c66c8eb1ed4241d", - "0x9e431cbd232571b03d0456b79e429ec264cd94c9c4ac1c887717074bc47b521d" + "0xdb8e0c16a4bc41f606d795f3ecb8291e90064a765cb86370110a715df5cefa17", + "0x2509656b31e0eeefefe70a3bb8353fdcbffe6d11499291b62130ad7e55ad743a", + "0x62fc072222a2594fe6bfdd0f076789f589132f91c3580a00a381f7697f608510", + "0x7ec09b6732e11dd06f1b4752bea135d6a1e58827b65375ee7b1c46703749f92f", + "0x9fff2feacfb706b34da5785e4f750add0de5acc1cac4444f65625e4fa183ca23", + "0x6f2e1de9f76f706ec5001906b5e391b6f06b52a5e352554c38c90762e8622618", + "0xddf6a5dcb1f49fdb506c086059101c6960b154a2c78c44897f728dc08df74b37", + "0x1d9cb27bb5b69b732cded94bf38242fb13d79ff6fcb5f009bed439485be4010e", + "0x880bb43bcf5676b0a55fb9bf666a11ab13ece68c8603c50f06b365b0a2bfd33f", + "0x87a9354ef98fec4708f2770649b705dc72b24474b61259c7d6e366292276540d", + "0xe37e94c257b6f4a5b81d1dd5d0f1100d93b938c6eb04ba33890da5f765244212", + "0xf2be73cc27d9ce61dd6df2aca31546b75ee13e291bd62d58ca93e2bf65700335", + "0x9973a30efe858bc824f73ccd47c8d2878451d863d70c092802c5f0d8842e7734", + "0xc7fbea402946efd3381aa387c1eb35b9cc3b624c30e58f5a74f466533be1dc3e", + "0xeb74f3b134fd90601ed29a2f2f2be13bfad665dc86d41736fab0adc6b0577e12", + "0x7b8076966c3bd02d8f70972c25b8e8224a5f746f21aa8d03192ed4cf3f51ec2b", + "0x04aa6795609f85570b22925669f64b8b03a87cce67b4a5b375ead67711f7cc0a", + "0x16f8d9efc97b0a5f2d67eac6181b7f87f4adf22a27559edf2acaab70040b1532", + "0x942b04cc84e8f146febccc1cc643c323cb90532c06ec1170a996e36d21db7724", + "0xb5301938d777fd8f31df4c9940b3a615560a58222245ed1b171275a3daa22c38", + "0xd3aed6ee065e706d5dffb42a65b70a1319b21e1de9ac35ff20630d24580a8611", + "0xf49bc610254d30a83853d936db3ebb53a171aea891d5ec7d0d323e8e34e35034", + "0x9e5aa57eb08977bbb6848c24ec5eca9b0e0f9d7a2d82f49707a48c228b74893a", + "0x2b9f07a37cde7a31496d82df5a9d082a280d81e129bf7b47966f44b503b3370c", + "0x45cf0b972161c36efba1e89712a2af536efe1846fe262fb8b9de359dee246a25", + "0x83838ab93ce0035b2e685094b4513b393bbd1ddca489c53c1498206215554824", + "0x85021a45c2ffb001d3110eb25256da1e218e7bf93865ee2c754deaa1ae0c811f", + "0x981bf4e340746aca610c75b3ca479a5b4a444161f5b54085d1b037eb51c2e314", + "0x0fe01328e37377c43670414b374c4b91865bacfab1fd6a2f4054a77ab047c12e", + "0x61b1b8aa26d247e1953f9b7a5264c30536b3e4ff8fe2ee622f6901c9b176bb12" ] ], "Paths": [ - 125, - 126 +0x322df671 ] } \ No newline at end of file diff --git a/testdata/dataset.proof b/testdata/dataset.proof index 8edc1c5..da7b5fd 100644 --- a/testdata/dataset.proof +++ b/testdata/dataset.proof @@ -1,11 +1,11 @@ { - "Root": "0x7dd575404d59da492f4a43636347312c0e33b9c8c3499a00474a5c6b222d650d", + "Root": "0x7ea8b1c4a9f6045bdff6ab808a9e38b4a6f267744f2a263dd67978ab0589fb32", "LeafHashes": [ - "0xa86b1ec1e4b65a2ce73fa1f20acc2812291fde7702111ad93c12269948ee8c1b", - "0xe7e39e94f18949d335d609eaa7daa48a2887249e7155cf8c94a87f8ed6283639" + "0x27fd5dc2447936d8a7543bb4905f7b6bfe2808b11b4f394b99cd04f67bebd21e", + "0xc116b440005a3abd22fb54a37b84e5334dc706d2c2f32710b368fbb48166a12b", + "0xd2de6d2a02a289c3dd09042737c3619b2a9f56b4d28c3ae19fc16c66bf5e0920" ], "LeafSizes": [ - 2311, - 15973 + 18899714555, 3221542407, 24054182176 ] -} \ No newline at end of file +} diff --git a/testdata/datasetMetadata.json b/testdata/datasetMetadata.json index 0f9cd1f..3bb6714 100644 --- a/testdata/datasetMetadata.json +++ b/testdata/datasetMetadata.json @@ -1,14 +1,14 @@ { - "client":1, + "client":89321, "title": "string", "industry": "string", "name": "string", "description": "string", "source": "string", - "accessMethod": "string", + "accessMethod": "aaxxaaaaaaastringgggggggg", "sizeInBytes": 1, "isPublic": true, "version": 1, "proofBlockCount": 0, "auditBlockCount": 0 -} \ No newline at end of file +} diff --git a/testdata/datasetReplicaRequirements.json b/testdata/datasetReplicaRequirements.json index 73ee730..c0c4c12 100644 --- a/testdata/datasetReplicaRequirements.json +++ b/testdata/datasetReplicaRequirements.json @@ -1,7 +1,7 @@ { "dataPreparers": [ [ - "0x3D08114dD4F65B5DDCc760884249D9d1AE435Dee", + "0x09C6DEE9DB5e7dF2b18283c0CFCf714fEDB692d7", "0xC2390a70E3D6C274c637fD6037967BF4663e07eE", "0x253d87fD415cbE26f530CA401Ce8AA113C32f1f5" ], @@ -95,5 +95,5 @@ ] ], "amount": 0, - "datasetId": 3 -} \ No newline at end of file + "datasetId": 16 +} diff --git a/testdata/mappingFiles.proof b/testdata/mappingFiles.proof new file mode 100644 index 0000000..43ee60f --- /dev/null +++ b/testdata/mappingFiles.proof @@ -0,0 +1,9 @@ +{ + "Root": "0xe883d6ad0b06cf6ea6d8e974075bbeaec8da599e9b99520ec8ab7dbfe9328838", + "LeafHashes": [ + "0xe883d6ad0b06cf6ea6d8e974075bbeaec8da599e9b99520ec8ab7dbfe9328838" + ], + "LeafSizes": [ + 21166653 + ] +}