From 9b77bffeeade48c6e2d9bfd57c00a7ebcf35fc66 Mon Sep 17 00:00:00 2001 From: ndrpp Date: Thu, 12 Feb 2026 12:22:04 +0200 Subject: [PATCH 1/2] feat(logs): add downloadNodeLogs cli command --- src/cli.ts | 672 ++++++---- src/commands.ts | 3386 ++++++++++++++++++++++++----------------------- 2 files changed, 2169 insertions(+), 1889 deletions(-) diff --git a/src/cli.ts b/src/cli.ts index 54dcb44..743137a 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -1,14 +1,13 @@ -import { Command } from 'commander'; -import { Commands } from './commands.js'; -import { JsonRpcProvider, Signer, ethers } from 'ethers'; -import chalk from 'chalk'; -import { stdin as input, stdout as output } from 'node:process'; -import { createInterface } from 'readline/promises'; -import { unitsToAmount } from '@oceanprotocol/lib'; -import { toBoolean } from './helpers.js'; +import { Command } from "commander"; +import { Commands } from "./commands.js"; +import { JsonRpcProvider, Signer, ethers } from "ethers"; +import chalk from "chalk"; +import { stdin as input, stdout as output } from "node:process"; +import { createInterface } from "readline/promises"; +import { unitsToAmount } from "@oceanprotocol/lib"; +import { toBoolean } from "./helpers.js"; async function initializeSigner() { - const provider = new JsonRpcProvider(process.env.RPC); let signer: Signer; @@ -23,7 +22,6 @@ async function initializeSigner() { } export async function createCLI() { - if (!process.env.MNEMONIC && !process.env.PRIVATE_KEY) { console.error(chalk.red("Have you forgot to set MNEMONIC or PRIVATE_KEY?")); process.exit(1); @@ -41,33 +39,33 @@ export async function createCLI() { const program = new Command(); program - .name('ocean-cli') - .description('CLI tool to interact with Ocean Protocol') - .version('2.0.0') - .helpOption('-h, --help', 'Display help for command'); + .name("ocean-cli") + .description("CLI tool to interact with Ocean Protocol") + .version("2.0.0") + .helpOption("-h, --help", "Display help for command"); // Custom help command to support legacy "h" invocation. // Note: We use console.log(program.helpInformation()) to print the full help output. program - .command('help') - .alias('h') - .description('Display help for all commands') + .command("help") + .alias("h") + .description("Display help for all commands") .action(() => { console.log(program.helpInformation()); }); // getDDO command program - .command('getDDO') - .description('Gets DDO for an asset using the asset did') - .argument('', 'The asset DID') - .option('-d, --did ', 'The asset DID') + .command("getDDO") + .description("Gets DDO for an asset using the asset did") + .argument("", "The asset DID") + .option("-d, --did ", "The asset DID") .action(async (did, options) => { const assetDid = options.did || did; if (!assetDid) { - console.error(chalk.red('DID is required')); + console.error(chalk.red("DID is required")); // process.exit(1); - return + return; } const { signer, chainId } = await initializeSigner(); const commands = new Commands(signer, chainId); @@ -76,17 +74,17 @@ export async function createCLI() { // publish command program - .command('publish') - .description('Publishes a new asset with access service or compute service') - .argument('', 'Path to metadata file') - .option('-f, --file ', 'Path to metadata file') - .option('-e, --encrypt [boolean]', 'Encrypt DDO', true) + .command("publish") + .description("Publishes a new asset with access service or compute service") + .argument("", "Path to metadata file") + .option("-f, --file ", "Path to metadata file") + .option("-e, --encrypt [boolean]", "Encrypt DDO", true) .action(async (metadataFile, options) => { const file = options.file || metadataFile; if (!file) { - console.error(chalk.red('Metadata file is required')); + console.error(chalk.red("Metadata file is required")); // process.exit(1); - return + return; } const { signer, chainId } = await initializeSigner(); const commands = new Commands(signer, chainId); @@ -95,17 +93,17 @@ export async function createCLI() { // publishAlgo command program - .command('publishAlgo') - .description('Publishes a new algorithm') - .argument('', 'Path to metadata file') - .option('-f, --file ', 'Path to metadata file') - .option('-e, --encrypt [boolean]', 'Encrypt DDO', true) + .command("publishAlgo") + .description("Publishes a new algorithm") + .argument("", "Path to metadata file") + .option("-f, --file ", "Path to metadata file") + .option("-e, --encrypt [boolean]", "Encrypt DDO", true) .action(async (metadataFile, options) => { const file = options.file || metadataFile; if (!file) { - console.error(chalk.red('Metadata file is required')); + console.error(chalk.red("Metadata file is required")); // process.exit(1); - return + return; } const { signer, chainId } = await initializeSigner(); const commands = new Commands(signer, chainId); @@ -114,21 +112,21 @@ export async function createCLI() { // editAsset command (alias "edit" for backwards compatibility) program - .command('editAsset') - .alias('edit') - .description('Updates DDO using the metadata items in the file') - .argument('', 'Dataset DID') - .argument('', 'Updated metadata file') - .option('-d, --did ', 'Dataset DID') - .option('-f, --file ', 'Updated metadata file') - .option('-e, --encrypt [boolean]', 'Encrypt DDO', true) + .command("editAsset") + .alias("edit") + .description("Updates DDO using the metadata items in the file") + .argument("", "Dataset DID") + .argument("", "Updated metadata file") + .option("-d, --did ", "Dataset DID") + .option("-f, --file ", "Updated metadata file") + .option("-e, --encrypt [boolean]", "Encrypt DDO", true) .action(async (datasetDid, metadataFile, options) => { const dsDid = options.did || datasetDid; const file = options.file || metadataFile; if (!dsDid || !file) { - console.error(chalk.red('Dataset DID and metadata file are required')); + console.error(chalk.red("Dataset DID and metadata file are required")); // process.exit(1); - return + return; } const { signer, chainId } = await initializeSigner(); const commands = new Commands(signer, chainId); @@ -137,19 +135,19 @@ export async function createCLI() { // download command program - .command('download') - .description('Downloads an asset into specified folder') - .argument('', 'The asset DID') - .argument('[folder]', 'Destination folder', '.') - .option('-d, --did ', 'The asset DID') - .option('-f, --folder [folder]', 'Destination folder', '.') + .command("download") + .description("Downloads an asset into specified folder") + .argument("", "The asset DID") + .argument("[folder]", "Destination folder", ".") + .option("-d, --did ", "The asset DID") + .option("-f, --folder [folder]", "Destination folder", ".") .action(async (did, folder, options) => { const assetDid = options.did || did; - const destFolder = options.folder || folder || '.'; + const destFolder = options.folder || folder || "."; if (!assetDid) { - console.error(chalk.red('DID is required')); + console.error(chalk.red("DID is required")); // process.exit(1); - return + return; } const { signer, chainId } = await initializeSigner(); const commands = new Commands(signer, chainId); @@ -158,20 +156,20 @@ export async function createCLI() { // allowAlgo command program - .command('allowAlgo') - .description('Approves an algorithm to run on a dataset') - .argument('', 'Dataset DID') - .argument('', 'Algorithm DID') - .option('-d, --dataset ', 'Dataset DID') - .option('-a, --algo ', 'Algorithm DID') - .option('-e, --encrypt [boolean]', 'Encrypt DDO', true) + .command("allowAlgo") + .description("Approves an algorithm to run on a dataset") + .argument("", "Dataset DID") + .argument("", "Algorithm DID") + .option("-d, --dataset ", "Dataset DID") + .option("-a, --algo ", "Algorithm DID") + .option("-e, --encrypt [boolean]", "Encrypt DDO", true) .action(async (datasetDid, algoDid, options) => { const dsDid = options.dataset || datasetDid; const aDid = options.algo || algoDid; if (!dsDid || !aDid) { - console.error(chalk.red('Dataset DID and Algorithm DID are required')); + console.error(chalk.red("Dataset DID and Algorithm DID are required")); // process.exit(1); - return + return; } const { signer, chainId } = await initializeSigner(); const commands = new Commands(signer, chainId); @@ -180,89 +178,137 @@ export async function createCLI() { // startCompute command program - .command('startCompute') - .description('Starts a compute job') - .argument('', 'Dataset DIDs (comma-separated) OR (empty array for none)') - .argument('', 'Algorithm DID') - .argument('', 'Compute environment ID') - .argument('', 'maxJobDuration for compute job') - .argument('', 'Payment token for compute') - .argument('', 'Resources of compute environment stringified') - .option('-d, --datasets ', 'Dataset DIDs (comma-separated) OR (empty array for none)') - .option('-a, --algo ', 'Algorithm DID') - .option('-e, --env ', 'Compute environment ID') - .option('--maxJobDuration ', 'Compute maxJobDuration') - .option('-t, --token ', 'Compute payment token') - .option('--resources ', 'Compute resources') - .option('--accept [boolean]', 'Auto-confirm payment for compute job (true/false)', toBoolean) - .action(async (datasetDids, algoDid, computeEnvId, maxJobDuration, paymentToken, resources, options) => { - const dsDids = options.datasets || datasetDids; - const aDid = options.algo || algoDid; - const envId = options.env || computeEnvId; - const jobDuration = options.maxJobDuration || maxJobDuration; - const token = options.token || paymentToken; - const res = options.resources || resources; - if (!dsDids || !aDid || !envId || !jobDuration || !token || !res) { - console.error(chalk.red('Missing required arguments')); - // process.exit(1); - return - } - const { signer, chainId } = await initializeSigner(); - const commands = new Commands(signer, chainId); - - const initArgs = [null, dsDids, aDid, envId, jobDuration, token, res]; - const initResp = await commands.initializeCompute(initArgs); - - if (!initResp) { - console.error(chalk.red('Initialization failed. Aborting.')); - return; - } + .command("startCompute") + .description("Starts a compute job") + .argument( + "", + "Dataset DIDs (comma-separated) OR (empty array for none)" + ) + .argument("", "Algorithm DID") + .argument("", "Compute environment ID") + .argument("", "maxJobDuration for compute job") + .argument("", "Payment token for compute") + .argument("", "Resources of compute environment stringified") + .option( + "-d, --datasets ", + "Dataset DIDs (comma-separated) OR (empty array for none)" + ) + .option("-a, --algo ", "Algorithm DID") + .option("-e, --env ", "Compute environment ID") + .option("--maxJobDuration ", "Compute maxJobDuration") + .option("-t, --token ", "Compute payment token") + .option("--resources ", "Compute resources") + .option( + "--accept [boolean]", + "Auto-confirm payment for compute job (true/false)", + toBoolean + ) + .action( + async ( + datasetDids, + algoDid, + computeEnvId, + maxJobDuration, + paymentToken, + resources, + options + ) => { + const dsDids = options.datasets || datasetDids; + const aDid = options.algo || algoDid; + const envId = options.env || computeEnvId; + const jobDuration = options.maxJobDuration || maxJobDuration; + const token = options.token || paymentToken; + const res = options.resources || resources; + if (!dsDids || !aDid || !envId || !jobDuration || !token || !res) { + console.error(chalk.red("Missing required arguments")); + // process.exit(1); + return; + } + const { signer, chainId } = await initializeSigner(); + const commands = new Commands(signer, chainId); - console.log(chalk.yellow('\n--- Payment Details ---')); - console.log(JSON.stringify(initResp, null, 2)); - const amount = await unitsToAmount(signer, initResp.payment.token, initResp.payment.amount.toString()); + const initArgs = [null, dsDids, aDid, envId, jobDuration, token, res]; + const initResp = await commands.initializeCompute(initArgs); - const proceed = options.accept; - if (!proceed) { - if (!process.stdin.isTTY) { - console.error(chalk.red('Cannot prompt for confirmation (non-TTY). Use "--accept true" to skip.')); - process.exit(1); - } - const rl = createInterface({ input, output }); - const confirmation = await rl.question(`\nProceed with payment for starting compute job at price ${amount} in tokens from address ${initResp.payment.token}? (y/n): `); - rl.close(); - if (confirmation.toLowerCase() !== 'y' && confirmation.toLowerCase() !== 'yes') { - console.log(chalk.red('Compute job canceled by user.')); + if (!initResp) { + console.error(chalk.red("Initialization failed. Aborting.")); return; } - } else { - console.log(chalk.cyan('Auto-confirm enabled with --yes flag.')); - } - const computeArgs = [null, dsDids, aDid, envId, JSON.stringify(initResp), jobDuration, token, res]; + console.log(chalk.yellow("\n--- Payment Details ---")); + console.log(JSON.stringify(initResp, null, 2)); + const amount = await unitsToAmount( + signer, + initResp.payment.token, + initResp.payment.amount.toString() + ); + + const proceed = options.accept; + if (!proceed) { + if (!process.stdin.isTTY) { + console.error( + chalk.red( + 'Cannot prompt for confirmation (non-TTY). Use "--accept true" to skip.' + ) + ); + process.exit(1); + } + const rl = createInterface({ input, output }); + const confirmation = await rl.question( + `\nProceed with payment for starting compute job at price ${amount} in tokens from address ${initResp.payment.token}? (y/n): ` + ); + rl.close(); + if ( + confirmation.toLowerCase() !== "y" && + confirmation.toLowerCase() !== "yes" + ) { + console.log(chalk.red("Compute job canceled by user.")); + return; + } + } else { + console.log(chalk.cyan("Auto-confirm enabled with --yes flag.")); + } - await commands.computeStart(computeArgs); - console.log(chalk.green('Compute job started successfully.')); - }); + const computeArgs = [ + null, + dsDids, + aDid, + envId, + JSON.stringify(initResp), + jobDuration, + token, + res, + ]; + + await commands.computeStart(computeArgs); + console.log(chalk.green("Compute job started successfully.")); + } + ); // startFreeCompute command program - .command('startFreeCompute') - .description('Starts a FREE compute job') - .argument('', 'Dataset DIDs (comma-separated) OR (empty array for none)') - .argument('', 'Algorithm DID') - .argument('', 'Compute environment ID') - .option('-d, --datasets ', 'Dataset DIDs (comma-separated) OR (empty array for none)') - .option('-a, --algo ', 'Algorithm DID') - .option('-e, --env ', 'Compute environment ID') + .command("startFreeCompute") + .description("Starts a FREE compute job") + .argument( + "", + "Dataset DIDs (comma-separated) OR (empty array for none)" + ) + .argument("", "Algorithm DID") + .argument("", "Compute environment ID") + .option( + "-d, --datasets ", + "Dataset DIDs (comma-separated) OR (empty array for none)" + ) + .option("-a, --algo ", "Algorithm DID") + .option("-e, --env ", "Compute environment ID") .action(async (datasetDids, algoDid, computeEnvId, options) => { const dsDids = options.datasets || datasetDids; const aDid = options.algo || algoDid; const envId = options.env || computeEnvId; if (!dsDids || !aDid || !envId) { - console.error(chalk.red('Missing required arguments')); + console.error(chalk.red("Missing required arguments")); // process.exit(1); - return + return; } const { signer, chainId } = await initializeSigner(); const commands = new Commands(signer, chainId); @@ -271,9 +317,9 @@ export async function createCLI() { // getComputeEnvironments command program - .command('getComputeEnvironments') - .alias('getC2DEnvs') - .description('Gets the existing compute environments') + .command("getComputeEnvironments") + .alias("getC2DEnvs") + .description("Gets the existing compute environments") .action(async () => { const { signer, chainId } = await initializeSigner(); const commands = new Commands(signer, chainId); @@ -282,35 +328,35 @@ export async function createCLI() { // startFreeCompute command program - .command('computeStreamableLogs') - .description('Gets the existing compute streamable logs') - .argument('', 'Job ID') - .option('-j, --job ', 'Job ID') + .command("computeStreamableLogs") + .description("Gets the existing compute streamable logs") + .argument("", "Job ID") + .option("-j, --job ", "Job ID") .action(async (jobId, options) => { const { signer, chainId } = await initializeSigner(); const commands = new Commands(signer, chainId); - const args = jobId || options.job + const args = jobId || options.job; await commands.computeStreamableLogs([args]); }); // stopCompute command program - .command('stopCompute') - .description('Stops a compute job') - .argument('', 'Dataset DID') - .argument('', 'Job ID') - .argument('', 'Agreement ID') - .option('-d, --dataset ', 'Dataset DID') - .option('-j, --job ', 'Job ID') - .option('-a, --agreement [agreementId]', 'Agreement ID') + .command("stopCompute") + .description("Stops a compute job") + .argument("", "Dataset DID") + .argument("", "Job ID") + .argument("", "Agreement ID") + .option("-d, --dataset ", "Dataset DID") + .option("-j, --job ", "Job ID") + .option("-a, --agreement [agreementId]", "Agreement ID") .action(async (datasetDid, jobId, agreementId, options) => { const dsDid = options.dataset || datasetDid; const jId = options.job || jobId; const agrId = options.agreement || agreementId; if (!dsDid || !jId) { - console.error(chalk.red('Dataset DID and Job ID are required')); + console.error(chalk.red("Dataset DID and Job ID are required")); // process.exit(1); - return + return; } const { signer, chainId } = await initializeSigner(); const commands = new Commands(signer, chainId); @@ -321,22 +367,22 @@ export async function createCLI() { // getJobStatus command program - .command('getJobStatus') - .description('Displays the compute job status') - .argument('', 'Dataset DID') - .argument('', 'Job ID') - .argument('', 'Agreement ID') - .option('-d, --dataset ', 'Dataset DID') - .option('-j, --job ', 'Job ID') - .option('-a, --agreement [agreementId]', 'Agreement ID') + .command("getJobStatus") + .description("Displays the compute job status") + .argument("", "Dataset DID") + .argument("", "Job ID") + .argument("", "Agreement ID") + .option("-d, --dataset ", "Dataset DID") + .option("-j, --job ", "Job ID") + .option("-a, --agreement [agreementId]", "Agreement ID") .action(async (datasetDid, jobId, agreementId, options) => { const dsDid = options.dataset || datasetDid; const jId = options.job || jobId; const agrId = options.agreement || agreementId; if (!dsDid || !jId) { - console.error(chalk.red('Dataset DID and Job ID are required')); + console.error(chalk.red("Dataset DID and Job ID are required")); // process.exit(1); - return + return; } const { signer, chainId } = await initializeSigner(); const commands = new Commands(signer, chainId); @@ -347,21 +393,26 @@ export async function createCLI() { // downloadJobResults command program - .command('downloadJobResults') - .description('Downloads compute job results') - .argument('', 'Job ID') - .argument('', 'Result index', parseInt) - .argument('[destinationFolder]', 'Destination folder', '.') + .command("downloadJobResults") + .description("Downloads compute job results") + .argument("", "Job ID") + .argument("", "Result index", parseInt) + .argument("[destinationFolder]", "Destination folder", ".") .action(async (jobId, resultIndex, destinationFolder) => { const { signer, chainId } = await initializeSigner(); const commands = new Commands(signer, chainId); - await commands.downloadJobResults([null, jobId, resultIndex, destinationFolder]); + await commands.downloadJobResults([ + null, + jobId, + resultIndex, + destinationFolder, + ]); }); // mintOcean command program - .command('mintOcean') - .description('Mints Ocean tokens') + .command("mintOcean") + .description("Mints Ocean tokens") .action(async () => { const { signer, chainId } = await initializeSigner(); const commands = new Commands(signer, chainId); @@ -370,21 +421,20 @@ export async function createCLI() { // Generate new auth token program - .command('generateAuthToken') - .description('Generate new auth token') + .command("generateAuthToken") + .description("Generate new auth token") .action(async () => { const { signer, chainId } = await initializeSigner(); const commands = new Commands(signer, chainId); await commands.generateAuthToken(); }); - // Invalidate auth token program - .command('invalidateAuthToken') - .description('Invalidate auth token') - .argument('', 'Auth token') - .option('-t, --token ', 'Auth token') + .command("invalidateAuthToken") + .description("Invalidate auth token") + .argument("", "Auth token") + .option("-t, --token ", "Auth token") .action(async (token, options) => { const { signer, chainId } = await initializeSigner(); const commands = new Commands(signer, chainId); @@ -393,32 +443,37 @@ export async function createCLI() { // Escrow deposit command program - .command('depositEscrow') - .description('Deposit tokens into the escrow contract') - .argument('', 'Address of the token to deposit') - .argument('', 'Amount of tokens to deposit') - .option('-t, --token ', 'Address of the token to deposit') - .option('-a, --amount ', 'Amount of tokens to deposit') + .command("depositEscrow") + .description("Deposit tokens into the escrow contract") + .argument("", "Address of the token to deposit") + .argument("", "Amount of tokens to deposit") + .option("-t, --token ", "Address of the token to deposit") + .option("-a, --amount ", "Amount of tokens to deposit") .action(async (token, amount, options) => { const { signer, chainId } = await initializeSigner(); const commands = new Commands(signer, chainId); const tokenAddress = options.token || token; const amountToDeposit = options.amount || amount; - const success = await commands.depositToEscrow(signer, tokenAddress, amountToDeposit, chainId); + const success = await commands.depositToEscrow( + signer, + tokenAddress, + amountToDeposit, + chainId + ); if (!success) { - console.log(chalk.red('Deposit failed')); + console.log(chalk.red("Deposit failed")); return; } - console.log(chalk.green('Deposit successful')); + console.log(chalk.green("Deposit successful")); }); // Check escrow deposited balance program - .command('getUserFundsEscrow') - .description('Get deposited token amount in escrow for user') - .argument('', 'Address of the token to check') - .option('-t, --token ', 'Address of the token to check') + .command("getUserFundsEscrow") + .description("Get deposited token amount in escrow for user") + .argument("", "Address of the token to check") + .option("-t, --token ", "Address of the token to check") .action(async (token, options) => { const { signer, chainId } = await initializeSigner(); const commands = new Commands(signer, chainId); @@ -427,12 +482,12 @@ export async function createCLI() { // Withdraw from escrow program - .command('withdrawFromEscrow') - .description('Withdraw tokens from escrow') - .argument('', 'Address of the token to check') - .argument('', 'Amount of tokens to withdraw') - .option('-t, --token ', 'Address of the token to check') - .option('-a, --amount ', 'Amount of tokens to withdraw') + .command("withdrawFromEscrow") + .description("Withdraw tokens from escrow") + .argument("", "Address of the token to check") + .argument("", "Amount of tokens to withdraw") + .option("-t, --token ", "Address of the token to check") + .option("-a, --amount ", "Amount of tokens to withdraw") .action(async (token, amount, options) => { const { signer, chainId } = await initializeSigner(); const commands = new Commands(signer, chainId); @@ -441,67 +496,104 @@ export async function createCLI() { // Escrow authorization command program - .command('authorizeEscrow') - .description('Authorize a payee to lock and claim funds from escrow') - .argument('', 'Address of the token to authorize') - .argument('', 'Address of the payee to authorize') - .argument('', 'Maximum amount that can be locked by payee') - .argument('', 'Maximum lock duration in seconds') - .argument('', 'Maximum number of locks allowed') - .option('-t, --token ', 'Address of the token to authorize') - .option('-p, --payee ', 'Address of the payee to authorize') - .option('-m, --maxLockedAmount ', 'Maximum amount that can be locked by payee') - .option('-s, --maxLockSeconds ', 'Maximum lock duration in seconds') - .option('-c, --maxLockCounts ', 'Maximum number of locks allowed') - .action(async (token, payee, maxLockedAmount, maxLockSeconds, maxLockCounts, options) => { - const { signer, chainId } = await initializeSigner(); - const commands = new Commands(signer, chainId); - const tokenAddress = options.token || token; - const payeeAddress = options.payee || payee; - const maxLockedAmountValue = options.maxLockedAmount || maxLockedAmount; - const maxLockSecondsValue = options.maxLockSeconds || maxLockSeconds; - const maxLockCountsValue = options.maxLockCounts || maxLockCounts; - - const success = await commands.authorizeEscrowPayee( - tokenAddress, - payeeAddress, - maxLockedAmountValue, - maxLockSecondsValue, - maxLockCountsValue, - ); + .command("authorizeEscrow") + .description("Authorize a payee to lock and claim funds from escrow") + .argument("", "Address of the token to authorize") + .argument("", "Address of the payee to authorize") + .argument("", "Maximum amount that can be locked by payee") + .argument("", "Maximum lock duration in seconds") + .argument("", "Maximum number of locks allowed") + .option("-t, --token ", "Address of the token to authorize") + .option("-p, --payee ", "Address of the payee to authorize") + .option( + "-m, --maxLockedAmount ", + "Maximum amount that can be locked by payee" + ) + .option( + "-s, --maxLockSeconds ", + "Maximum lock duration in seconds" + ) + .option( + "-c, --maxLockCounts ", + "Maximum number of locks allowed" + ) + .action( + async ( + token, + payee, + maxLockedAmount, + maxLockSeconds, + maxLockCounts, + options + ) => { + const { signer, chainId } = await initializeSigner(); + const commands = new Commands(signer, chainId); + const tokenAddress = options.token || token; + const payeeAddress = options.payee || payee; + const maxLockedAmountValue = options.maxLockedAmount || maxLockedAmount; + const maxLockSecondsValue = options.maxLockSeconds || maxLockSeconds; + const maxLockCountsValue = options.maxLockCounts || maxLockCounts; + + const success = await commands.authorizeEscrowPayee( + tokenAddress, + payeeAddress, + maxLockedAmountValue, + maxLockSecondsValue, + maxLockCountsValue + ); + + if (!success) { + console.log(chalk.red("Authorization failed")); + return; + } - if (!success) { - console.log(chalk.red('Authorization failed')); - return; + console.log(chalk.green("Authorization successful")); } - - console.log(chalk.green('Authorization successful')); - }); + ); program - .command('getAuthorizationsEscrow') - .description('Get authorizations for escrow') - .argument('', 'Address of the token to check') - .argument('', 'Address of the payee to check') - .option('-t, --token ', 'Address of the token to check') - .option('-p, --payee ', 'Address of the payee to check') + .command("getAuthorizationsEscrow") + .description("Get authorizations for escrow") + .argument("", "Address of the token to check") + .argument("", "Address of the payee to check") + .option("-t, --token ", "Address of the token to check") + .option("-p, --payee ", "Address of the payee to check") .action(async (token, payee, options) => { const { signer, chainId } = await initializeSigner(); const commands = new Commands(signer, chainId); - await commands.getAuthorizationsEscrow(token || options.token, payee || options.payee); + await commands.getAuthorizationsEscrow( + token || options.token, + payee || options.payee + ); }); program - .command('createAccessList') - .description('Create a new access list contract') - .argument('', 'Name for the access list') - .argument('', 'Symbol for the access list') - .argument('[transferable]', 'Whether tokens are transferable (true/false)', 'false') - .argument('[initialUsers]', 'Comma-separated list of initial user addresses', '') - .option('-n, --name ', 'Name for the access list') - .option('-s, --symbol ', 'Symbol for the access list') - .option('-t, --transferable [transferable]', 'Whether tokens are transferable (true/false)', 'false') - .option('-u, --users [initialUsers]', 'Comma-separated list of initial user addresses', '') + .command("createAccessList") + .description("Create a new access list contract") + .argument("", "Name for the access list") + .argument("", "Symbol for the access list") + .argument( + "[transferable]", + "Whether tokens are transferable (true/false)", + "false" + ) + .argument( + "[initialUsers]", + "Comma-separated list of initial user addresses", + "" + ) + .option("-n, --name ", "Name for the access list") + .option("-s, --symbol ", "Symbol for the access list") + .option( + "-t, --transferable [transferable]", + "Whether tokens are transferable (true/false)", + "false" + ) + .option( + "-u, --users [initialUsers]", + "Comma-separated list of initial user addresses", + "" + ) .action(async (name, symbol, transferable, initialUsers, options) => { const { signer, chainId } = await initializeSigner(); const commands = new Commands(signer, chainId); @@ -509,58 +601,100 @@ export async function createCLI() { options.name || name, options.symbol || symbol, options.transferable || transferable, - options.users || initialUsers + options.users || initialUsers, ]); }); program - .command('addToAccessList') - .description('Add user(s) to an access list') - .argument('', 'Address of the access list contract') - .argument('', 'Comma-separated list of user addresses to add') - .option('-a, --address ', 'Address of the access list contract') - .option('-u, --users ', 'Comma-separated list of user addresses to add') + .command("addToAccessList") + .description("Add user(s) to an access list") + .argument("", "Address of the access list contract") + .argument("", "Comma-separated list of user addresses to add") + .option( + "-a, --address ", + "Address of the access list contract" + ) + .option( + "-u, --users ", + "Comma-separated list of user addresses to add" + ) .action(async (accessListAddress, users, options) => { const { signer, chainId } = await initializeSigner(); const commands = new Commands(signer, chainId); await commands.addToAccessList([ options.address || accessListAddress, - options.users || users + options.users || users, ]); }); program - .command('checkAccessList') - .description('Check if user(s) are on an access list') - .argument('', 'Address of the access list contract') - .argument('', 'Comma-separated list of user addresses to check') - .option('-a, --address ', 'Address of the access list contract') - .option('-u, --users ', 'Comma-separated list of user addresses to check') + .command("checkAccessList") + .description("Check if user(s) are on an access list") + .argument("", "Address of the access list contract") + .argument("", "Comma-separated list of user addresses to check") + .option( + "-a, --address ", + "Address of the access list contract" + ) + .option( + "-u, --users ", + "Comma-separated list of user addresses to check" + ) .action(async (accessListAddress, users, options) => { const { signer, chainId } = await initializeSigner(); const commands = new Commands(signer, chainId); await commands.checkAccessList([ options.address || accessListAddress, - options.users || users + options.users || users, ]); }); program - .command('removeFromAccessList') - .description('Remove user(s) from an access list') - .argument('', 'Address of the access list contract') - .argument('', 'Comma-separated list of user addresses to remove') - .option('-a, --address ', 'Address of the access list contract') - .option('-u, --users ', 'Comma-separated list of user addresses to remove') + .command("removeFromAccessList") + .description("Remove user(s) from an access list") + .argument("", "Address of the access list contract") + .argument("", "Comma-separated list of user addresses to remove") + .option( + "-a, --address ", + "Address of the access list contract" + ) + .option( + "-u, --users ", + "Comma-separated list of user addresses to remove" + ) .action(async (accessListAddress, users, options) => { const { signer, chainId } = await initializeSigner(); const commands = new Commands(signer, chainId); await commands.removeFromAccessList([ options.address || accessListAddress, - options.users || users + options.users || users, ]); }); + program + .command("downloadNodeLogs") + .description("Download logs from a node as an admin") + .argument("", "Output directory to save the logs") + .argument( + "[last]", + "Period of time to get logs from now (in hours). Use either last or from-to" + ) + .argument("[from]", "Start time (epoch ms) to get logs from") + .argument("[to]", "End time (epoch ms) to get logs to") + .option("-o, --output ", "Output directory to save the logs") + .option("-l, --last [last]", "Period of time to get logs from now (in hours)") + .option("-f, --from [from]", "Start time (epoch ms) to get logs from") + .option("-t, --to [to]", "End time (epoch ms) to get logs to") + .action(async (output, last, from, to, options) => { + const { signer, chainId } = await initializeSigner(); + const commands = new Commands(signer, chainId); + await commands.downloadNodeLogs([ + options.output || output, + options.last || last, + options.from || from, + options.to || to, + ]); + }); return program; -} \ No newline at end of file +} diff --git a/src/commands.ts b/src/commands.ts index 112e8e2..a633d99 100644 --- a/src/commands.ts +++ b/src/commands.ts @@ -1,1632 +1,1778 @@ import fs from "fs"; import util from "util"; import { - createAssetUtil, - handleComputeOrder, - updateAssetMetadata, - downloadFile, - isOrderable, - getMetadataURI, - getIndexingWaitSettings, - IndexerWaitParams, - fixAndParseProviderFees, - getConfigByChainId + createAssetUtil, + handleComputeOrder, + updateAssetMetadata, + downloadFile, + isOrderable, + getMetadataURI, + getIndexingWaitSettings, + IndexerWaitParams, + fixAndParseProviderFees, + getConfigByChainId, } from "./helpers.js"; import { - Aquarius, - ComputeAlgorithm, - ComputeJob, - ComputeOutput, - Config, - ConfigHelper, - Datatoken, - ProviderInstance, - amountToUnits, - getHash, - orderAsset, - sendTx, - unitsToAmount, - EscrowContract, - getTokenDecimals, - AccesslistFactory, - AccessListContract + Aquarius, + ComputeAlgorithm, + ComputeJob, + ComputeOutput, + Config, + ConfigHelper, + Datatoken, + ProviderInstance, + amountToUnits, + getHash, + orderAsset, + sendTx, + unitsToAmount, + EscrowContract, + getTokenDecimals, + AccesslistFactory, + AccessListContract, } from "@oceanprotocol/lib"; -import { Asset } from '@oceanprotocol/ddo-js'; +import { Asset } from "@oceanprotocol/ddo-js"; import { Signer, ethers, getAddress } from "ethers"; import { interactiveFlow } from "./interactiveFlow.js"; import { publishAsset } from "./publishAsset.js"; -import chalk from 'chalk'; +import chalk from "chalk"; export class Commands { - public signer: Signer; - public config: Config; - public aquarius: Aquarius; - public oceanNodeUrl: string; - // optional settings for indexing wait time - private indexingParams: IndexerWaitParams; - - constructor(signer: Signer, network: string | number, config?: Config) { - this.signer = signer; - this.config = config || new ConfigHelper().getConfig(network); - this.oceanNodeUrl = process.env.NODE_URL; - this.indexingParams = getIndexingWaitSettings(); - console.log("Using Ocean Node URL :", this.oceanNodeUrl); - this.config.nodeUri = this.oceanNodeUrl; - this.aquarius = new Aquarius(this.oceanNodeUrl); - } - - public async start() { - console.log("Starting the interactive CLI flow...\n\n"); - const data = await interactiveFlow(this.oceanNodeUrl); // Collect data via CLI - await publishAsset(this.aquarius, data, this.signer, this.config); // Publish asset with collected data - } - - // utils - public async sleep(ms: number) { - return new Promise((resolve) => { - setTimeout(resolve, ms); - }); - } - - // commands - public async publish(args: string[]) { - console.log("start publishing"); - let asset; - try { - asset = JSON.parse(fs.readFileSync(args[1], "utf8")); - } catch (e) { - console.error("Cannot read metadata from " + args[1]); - console.error(e); - return; - } - const encryptDDO = args[2] === "false" ? false : true; - try { - // add some more checks - const urlAssetId = await createAssetUtil( - asset.indexedMetadata.nft.name, - asset.indexedMetadata.nft.symbol, - this.signer, - asset.services[0].files, - asset, - this.oceanNodeUrl, - this.config, - this.aquarius, - encryptDDO - ); - console.log("Asset published. ID: " + urlAssetId); - } catch (e) { - console.error("Error when publishing dataset from file: " + args[1]); - console.error(e); - return; - } - } - - public async publishAlgo(args: string[]) { - let algoAsset: Asset; - try { - algoAsset = JSON.parse(fs.readFileSync(args[1], "utf8")); - } catch (e) { - console.error("Cannot read metadata from " + args[1]); - console.error(e); - return; - } - const encryptDDO = args[2] === "false" ? false : true; - // add some more checks - try { - const algoDid = await createAssetUtil( - algoAsset.indexedMetadata.nft.name, - algoAsset.indexedMetadata.nft.symbol, - this.signer, - algoAsset.services[0].files, - algoAsset, - this.oceanNodeUrl, - this.config, - this.aquarius, - encryptDDO - ); - // add some more checks - console.log("Algorithm published. DID: " + algoDid); - } catch (e) { - console.error("Error when publishing dataset from file: " + args[1]); - console.error(e); - return; - } - } - - public async editAsset(args: string[]) { - const asset = await this.aquarius.waitForIndexer( - args[1], - null, - null, - this.indexingParams.retryInterval, - this.indexingParams.maxRetries - ); - if (!asset) { - console.error( - "Error fetching DDO " + args[1] + ". Does this asset exists?" - ); - return; - } - const encryptDDO = args[3] === "false" ? false : true; - let updateJson; - try { - updateJson = JSON.parse(fs.readFileSync(args[2], "utf8")); - } catch (e) { - console.error("Cannot read metadata from " + args[2]); - console.error(e); - return; - } - // Get keys and values - const keys = Object.keys(updateJson); - - for (const key of keys) { - asset[key] = updateJson[key]; - } - - const updateAssetTx = await updateAssetMetadata( - this.signer, - asset, - this.oceanNodeUrl, - this.aquarius, - encryptDDO - ); - console.log("Asset updated. Tx: " + JSON.stringify(updateAssetTx, null, 2)); - } - - public async getDDO(args: string[]) { - console.log("Resolving Asset with DID: " + args[1]); - const resolvedDDO = await this.aquarius.waitForIndexer( - args[1], - null, - null, - this.indexingParams.retryInterval, - this.indexingParams.maxRetries - ); - if (!resolvedDDO) { - console.error( - "Error fetching Asset with DID: " + - args[1] + - ". Does this asset exists?" - ); - } else console.log(util.inspect(resolvedDDO, false, null, true)); - } - - public async download(args: string[]) { - const dataDdo = await this.aquarius.waitForIndexer( - args[1], - null, - null, - this.indexingParams.retryInterval, - this.indexingParams.maxRetries - ); - if (!dataDdo) { - console.error( - "Error fetching DDO " + args[1] + ". Does this asset exists?" - ); - return; - } - - const datatoken = new Datatoken( - this.signer, - this.config.chainId, - this.config - ); - - const tx = await orderAsset( - dataDdo, - this.signer, - this.config, - datatoken, - this.oceanNodeUrl - ); - - if (!tx) { - console.error( - "Error ordering access for " + args[1] + ". Do you have enough tokens?" - ); - return; - } - - const orderTx = await tx.wait(); - - const urlDownloadUrl = await ProviderInstance.getDownloadUrl( - dataDdo.id, - dataDdo.services[0].id, - 0, - orderTx.hash, - this.oceanNodeUrl, - this.signer - ); - try { - const path = args[2] ? args[2] : "."; - const { filename } = await downloadFile(urlDownloadUrl, path); - console.log("File downloaded successfully:", path + "/" + filename); - } catch (e) { - console.log(`Download url dataset failed: ${e}`); - } - } - - public async initializeCompute(args: string[]) { - const inputDatasetsString = args[1]; - let inputDatasets = []; - - if ( - inputDatasetsString.includes("[") && - inputDatasetsString.includes("]") - ) { - const processedInput = inputDatasetsString - .replaceAll("]", "") - .replaceAll("[", ""); - if (processedInput.indexOf(",") > -1) { - inputDatasets = processedInput.split(","); - } - } else { - inputDatasets.push(inputDatasetsString); - } - - const ddos = []; - - for (const dataset in inputDatasets) { - const dataDdo = await this.aquarius.waitForIndexer( - inputDatasets[dataset], - null, - null, - this.indexingParams.retryInterval, - this.indexingParams.maxRetries - ); - if (!dataDdo) { - console.error( - "Error fetching DDO " + dataset[1] + ". Does this asset exists?" - ); - return; - } else { - ddos.push(dataDdo); - } - } - if ( - inputDatasets.length > 0 && - (ddos.length <= 0 || ddos.length != inputDatasets.length) - ) { - console.error("Not all the data ddos are available."); - return; - } - let providerURI = this.oceanNodeUrl; - if (ddos.length > 0) { - providerURI = ddos[0].services[0].serviceEndpoint; - } - - const algoDdo = await this.aquarius.waitForIndexer( - args[2], - null, - null, - this.indexingParams.retryInterval, - this.indexingParams.maxRetries - ); - if (!algoDdo) { - console.error( - "Error fetching DDO " + args[1] + ". Does this asset exists?" - ); - return; - } - - const computeEnvs = await ProviderInstance.getComputeEnvironments( - this.oceanNodeUrl - ); - - if (!computeEnvs || computeEnvs.length < 1) { - console.error( - "Error fetching compute environments. No compute environments available." - ); - return; - } - - const computeEnvID = args[3]; - // NO chainId needed anymore (is not part of ComputeEnvironment spec anymore) - // const chainComputeEnvs = computeEnvs[computeEnvID]; // was algoDdo.chainId - let computeEnv = null; // chainComputeEnvs[0]; - - if (computeEnvID && computeEnvID.length > 1) { - for (const index in computeEnvs) { - if (computeEnvID == computeEnvs[index].id) { - computeEnv = computeEnvs[index]; - break; - } - } - } - if (!computeEnv || !computeEnvID) { - console.error( - "Error fetching compute environment. No compute environment matches id: ", - computeEnvID - ); - return; - } - - const algo: ComputeAlgorithm = { - documentId: algoDdo.id, - serviceId: algoDdo.services[0].id, - meta: algoDdo.metadata.algorithm, - }; - - const assets = []; - for (const dataDdo in ddos) { - const canStartCompute = isOrderable( - ddos[dataDdo], - ddos[dataDdo].services[0].id, - algo, - algoDdo - ); - if (!canStartCompute) { - console.error( - "Error Cannot start compute job using the datasets DIDs & algorithm DID provided" - ); - return; - } - assets.push({ - documentId: ddos[dataDdo].id, - serviceId: ddos[dataDdo].services[0].id, - }); - } - const maxJobDuration = Number(args[4]) - if (!maxJobDuration) { - console.error( - "Error initializing Provider for the compute job using dataset DID " + - args[1] + - " and algorithm DID " + - args[2] + - " because maxJobDuration was not provided." - ); - return; - } - if (maxJobDuration < 0) { - console.error( - "Error initializing Provider for the compute job using dataset DID " + - args[1] + - " and algorithm DID " + - args[2] + - " because maxJobDuration is less than 0. It should be in seconds." - ); - return; - } - let supportedMaxJobDuration: number = maxJobDuration; - if (maxJobDuration > computeEnv.maxJobDuration) { - supportedMaxJobDuration = computeEnv.maxJobDuration; - } - const paymentToken = args[5] - if (!paymentToken) { - console.error( - "Error initializing Provider for the compute job using dataset DID " + - args[1] + - " and algorithm DID " + - args[2] + - " because paymentToken was not provided." - ); - return; - } - const { chainId } = await this.signer.provider.getNetwork() - if (!Object.keys(computeEnv.fees).includes(chainId.toString())) { - console.error( - "Error starting paid compute using dataset DID " + - args[1] + - " and algorithm DID " + - args[2] + - " because chainId is not supported by compute environment. " + - args[3] + - ". Supported chain IDs: " + - computeEnv.fees.keys() - ); - return; - } - let found: boolean = false; - for (const fee of computeEnv.fees[chainId.toString()]) { - if (fee.feeToken.toLowerCase() === paymentToken.toLowerCase()) { - found = true; - break; - } - } - if (found === false) { - console.error( - "Error initializing Provider for the compute job using dataset DID " + - args[1] + - " and algorithm DID " + - args[2] + - " because paymentToken is not supported by this environment " + - args[3] - ); - return; - } - const resources = args[6] // resources object should be stringified in cli when calling initializeCompute - if (!resources) { - console.error( - "Error initializing Provider for the compute job using dataset DID " + - args[1] + - " and algorithm DID " + - args[2] + - " because resources for compute were not provided." - ); - return; - } - const parsedResources = JSON.parse(resources); - const providerInitializeComputeJob = - await ProviderInstance.initializeCompute( - assets, - algo, - computeEnv.id, - paymentToken, - supportedMaxJobDuration, - providerURI, - this.signer, // V1 was this.signer.getAddress() - parsedResources, - Number(chainId) - ); - if ( - !providerInitializeComputeJob || - "error" in providerInitializeComputeJob.algorithm - ) { - console.error( - "Error initializing Provider for the compute job using dataset DID " + - args[1] + - " and algorithm DID " + - args[2] - ); - return; - } - console.log(chalk.yellow('\n--- Payment Details ---')); - console.log(JSON.stringify(providerInitializeComputeJob, null, 2)); - return providerInitializeComputeJob; - - } - - public async computeStart(args: string[]) { - const inputDatasetsString = args[1]; - let inputDatasets = []; - - if ( - inputDatasetsString.includes("[") && - inputDatasetsString.includes("]") - ) { - const processedInput = inputDatasetsString - .replaceAll("]", "") - .replaceAll("[", ""); - if (processedInput.indexOf(",") > -1) { - inputDatasets = processedInput.split(","); - } - } else { - inputDatasets.push(inputDatasetsString); - } - - const ddos = []; - - for (const dataset in inputDatasets) { - const dataDdo = await this.aquarius.waitForIndexer( - inputDatasets[dataset], - null, - null, - this.indexingParams.retryInterval, - this.indexingParams.maxRetries - ); - if (!dataDdo) { - console.error( - "Error fetching DDO " + dataset[1] + ". Does this asset exists?" - ); - return; - } else { - ddos.push(dataDdo); - } - } - if ( - inputDatasets.length > 0 && - (ddos.length <= 0 || ddos.length != inputDatasets.length) - ) { - console.error("Not all the data ddos are available."); - return; - } - let providerURI = this.oceanNodeUrl; - if (ddos.length > 0) { - providerURI = ddos[0].services[0].serviceEndpoint; - } - const algoDdo = await this.aquarius.waitForIndexer( - args[2], - null, - null, - this.indexingParams.retryInterval, - this.indexingParams.maxRetries - ); - if (!algoDdo) { - console.error( - "Error fetching DDO " + args[1] + ". Does this asset exists?" - ); - return; - } - - const computeEnvs = await ProviderInstance.getComputeEnvironments( - this.oceanNodeUrl - ); - - if (!computeEnvs || computeEnvs.length < 1) { - console.error( - "Error fetching compute environments. No compute environments available." - ); - return; - } - const computeEnvID = args[3]; - // NO chainId needed anymore (is not part of ComputeEnvironment spec anymore) - // const chainComputeEnvs = computeEnvs[computeEnvID]; // was algoDdo.chainId - let computeEnv = null; // chainComputeEnvs[0]; - - if (computeEnvID && computeEnvID.length > 1) { - for (const index in computeEnvs) { - if (computeEnvID == computeEnvs[index].id) { - computeEnv = computeEnvs[index]; - break; - } - } - } - if (!computeEnv || !computeEnvID) { - console.error( - "Error fetching compute environment. No compute environment matches id: ", - computeEnvID - ); - return; - } - - const algo: ComputeAlgorithm = { - documentId: algoDdo.id, - serviceId: algoDdo.services[0].id, - meta: algoDdo.metadata.algorithm, - }; - - const assets = []; - for (const dataDdo in ddos) { - const canStartCompute = isOrderable( - ddos[dataDdo], - ddos[dataDdo].services[0].id, - algo, - algoDdo - ); - if (!canStartCompute) { - console.error( - "Error Cannot start compute job using the datasets DIDs & algorithm DID provided" - ); - return; - } - assets.push({ - documentId: ddos[dataDdo].id, - serviceId: ddos[dataDdo].services[0].id, - }); - } - const providerInitializeComputeJob = args[4]; // provider fees + payment - const parsedProviderInitializeComputeJob = fixAndParseProviderFees(providerInitializeComputeJob) - console.log("Ordering algorithm: ", args[2]); - const datatoken = new Datatoken( - this.signer, - (await this.signer.provider.getNetwork()).chainId.toString(), - this.config - ); - algo.transferTxId = await handleComputeOrder( - parsedProviderInitializeComputeJob?.algorithm, - algoDdo, - this.signer, - computeEnv.consumerAddress, - 0, - datatoken, - this.config, - parsedProviderInitializeComputeJob?.algorithm?.providerFee, - providerURI - ); - if (!algo.transferTxId) { - console.error( - "Error ordering compute for algorithm with DID: " + - args[2] + - ". Do you have enough tokens?" - ); - return; - } - console.log("Ordering assets: ", args[1]); - - for (let i = 0; i < ddos.length; i++) { - assets[i].transferTxId = await handleComputeOrder( - parsedProviderInitializeComputeJob?.datasets[i], - ddos[i], - this.signer, - computeEnv.consumerAddress, - 0, - datatoken, - this.config, - parsedProviderInitializeComputeJob?.datasets[i].providerFee, - providerURI - ); - if (!assets[i].transferTxId) { - console.error( - "Error ordering dataset with DID: " + - assets[i] + - ". Do you have enough tokens?" - ); - return; - } - } - // payment check - const maxJobDuration = Number(args[5]) - if (!maxJobDuration) { - console.error( - "Error initializing Provider for the compute job using dataset DID " + - args[1] + - " and algorithm DID " + - args[2] + - " because maxJobDuration was not provided." - ); - return; - } - if (maxJobDuration < 0) { - console.error( - "Error starting paid compute using dataset DID " + - args[1] + - " and algorithm DID " + - args[2] + - " because maxJobDuration is less than 0. It should be in seconds." - ); - return; - } - let supportedMaxJobDuration: number = maxJobDuration; - if (maxJobDuration > computeEnv.maxJobDuration) { - supportedMaxJobDuration = computeEnv.maxJobDuration; - } - const { chainId } = await this.signer.provider.getNetwork() - const paymentToken = args[6] - if (!paymentToken) { - console.error( - "Error starting paid compute using dataset DID " + - args[1] + - " and algorithm DID " + - args[2] + - " because paymentToken was not provided." - ); - return; - } - if (!Object.keys(computeEnv.fees).includes(chainId.toString())) { - console.error( - "Error starting paid compute using dataset DID " + - args[1] + - " and algorithm DID " + - args[2] + - " because chainId is not supported by compute environment. " + - args[3] + - ". Supported chain IDs: " + - computeEnv.fees.keys() - ); - return; - } - let found: boolean = false; - for (const fee of computeEnv.fees[chainId.toString()]) { - if (fee.feeToken.toLowerCase() === paymentToken.toLowerCase()) { - found = true; - break; - } - } - if (found === false) { - console.error( - "Error starting paid compute using dataset DID " + - args[1] + - " and algorithm DID " + - args[2] + - " because paymentToken is not supported by this environment " + - args[3] - ); - return; - } - const resources = args[7] // resources object should be stringified in cli when calling initializeCompute - if (!resources) { - console.error( - "Error starting paid compute using dataset DID " + - args[1] + - " and algorithm DID " + - args[2] + - " because resources for compute were not provided." - ); - return; - } - - const escrow = new EscrowContract( - getAddress(parsedProviderInitializeComputeJob.payment.escrowAddress), - this.signer - ) - console.log("Verifying payment..."); - await new Promise(resolve => setTimeout(resolve, 3000)) - - const validationEscrow = await escrow.verifyFundsForEscrowPayment( - paymentToken, - computeEnv.consumerAddress, - await unitsToAmount(this.signer, paymentToken, parsedProviderInitializeComputeJob.payment.amount), - parsedProviderInitializeComputeJob.payment.amount.toString(), - parsedProviderInitializeComputeJob.payment.minLockSeconds.toString(), - '10' - ) - if (validationEscrow.isValid === false) { - console.error( - "Error starting compute job dataset DID " + - args[1] + - " and algorithm DID " + - args[2] + - " because escrow funds check failed: " - + validationEscrow.message - ); - return; - } - - console.log("Starting compute job using provider: ", providerURI); - - const additionalDatasets = assets.length > 1 ? assets.slice(1) : null; - if (assets.length > 0) { - console.log( - "Starting compute job on " + - assets[0].documentId + - " with additional datasets:" + - (!additionalDatasets ? "none" : additionalDatasets[0].documentId) - ); - } else { - console.log( - "Starting compute job on " + - algo.documentId + - " with additional datasets:" + - (!additionalDatasets ? "none" : additionalDatasets[0].documentId) - ); - } - if (additionalDatasets !== null) { - console.log( - "Adding additional datasets to dataset, according to C2D V2 specs" - ); - assets.push(additionalDatasets); - } - - const output: ComputeOutput = { - metadataUri: await getMetadataURI(), - }; - - const computeJobs = await ProviderInstance.computeStart( - providerURI, - this.signer, - computeEnv.id, - assets, // assets[0] // only c2d v1, - algo, - supportedMaxJobDuration, - paymentToken, - JSON.parse(resources), - Number((await this.signer.provider.getNetwork()).chainId), - null, - null, - // additionalDatasets, only c2d v1 - output - ); - - console.log("computeJobs: ", computeJobs); - - if (computeJobs && computeJobs[0]) { - const { jobId, payment } = computeJobs[0]; - console.log("Compute started. JobID: " + jobId); - console.log("Agreement ID: " + payment.lockTx); - } else { - console.log("Error while starting the compute job: ", computeJobs); - } - } - - public async freeComputeStart(args: string[]) { - const inputDatasetsString = args[1]; - let inputDatasets = []; - - if ( - inputDatasetsString.includes("[") && - inputDatasetsString.includes("]") - ) { - const processedInput = inputDatasetsString - .replaceAll("]", "") - .replaceAll("[", ""); - if (processedInput.indexOf(",") > -1) { - inputDatasets = processedInput.split(","); - } - } else { - inputDatasets.push(inputDatasetsString); - } - - const ddos = []; - - for (const dataset in inputDatasets) { - const dataDdo = await this.aquarius.waitForIndexer( - inputDatasets[dataset], - null, - null, - this.indexingParams.retryInterval, - this.indexingParams.maxRetries - ); - if (!dataDdo) { - console.error( - "Error fetching DDO " + dataset[1] + ". Does this asset exists?" - ); - return; - } else { - ddos.push(dataDdo); - } - } - - if ( - inputDatasets.length > 0 && - (ddos.length <= 0 || ddos.length != inputDatasets.length) - ) { - console.error("Not all the data ddos are available."); - return; - } - let providerURI = this.oceanNodeUrl; - if (ddos.length > 0) { - providerURI = ddos[0].services[0].serviceEndpoint; - } - - const algoDdo = await this.aquarius.waitForIndexer( - args[2], - null, - null, - this.indexingParams.retryInterval, - this.indexingParams.maxRetries - ); - if (!algoDdo) { - console.error( - "Error fetching DDO " + args[1] + ". Does this asset exists?" - ); - return; - } - - const computeEnvs = await ProviderInstance.getComputeEnvironments( - this.oceanNodeUrl - ); - - if (!computeEnvs || computeEnvs.length < 1) { - console.error( - "Error fetching compute environments. No compute environments available." - ); - return; - } - - const mytime = new Date(); - const computeMinutes = 5; - mytime.setMinutes(mytime.getMinutes() + computeMinutes); - - const computeEnvID = args[3]; - // NO chainId needed anymore (is not part of ComputeEnvironment spec anymore) - // const chainComputeEnvs = computeEnvs[computeEnvID]; // was algoDdo.chainId - let computeEnv = null; // chainComputeEnvs[0]; - - if (computeEnvID && computeEnvID.length > 1) { - for (const env of computeEnvs) { - if (computeEnvID == env.id && env.free) { - computeEnv = env; - break; - } - } - } - - if (!computeEnv || !computeEnvID) { - console.error( - "Error fetching free compute environment. No free compute environment matches id: ", - computeEnvID - ); - return; - } - - const algo: ComputeAlgorithm = { - documentId: algoDdo.id, - serviceId: algoDdo.services[0].id, - meta: algoDdo.metadata.algorithm, - }; - - const assets = []; - for (const dataDdo in ddos) { - const canStartCompute = isOrderable( - ddos[dataDdo], - ddos[dataDdo].services[0].id, - algo, - algoDdo - ); - if (!canStartCompute) { - console.error( - "Error Cannot start compute job using the datasets DIDs & algorithm DID provided" - ); - return; - } - assets.push({ - documentId: ddos[dataDdo].id, - serviceId: ddos[dataDdo].services[0].id, - }); - } - - console.log("Starting compute job using provider: ", providerURI); - const additionalDatasets = assets.length > 1 ? assets.slice(1) : null; - if (assets.length > 0) { - console.log( - "Starting compute job on " + - assets[0].documentId + - " with additional datasets:" + - (!additionalDatasets ? "none" : additionalDatasets[0].documentId) - ); - } else { - console.log( - "Starting compute job on " + - algo.documentId + - " with additional datasets:" + - (!additionalDatasets ? "none" : additionalDatasets[0].documentId) - ); - } - - if (additionalDatasets !== null) { - console.log( - "Adding additional datasets to dataset, according to C2D V2 specs" - ); - assets.push(additionalDatasets); - } - - const output: ComputeOutput = { - metadataUri: await getMetadataURI(), - }; - - const computeJobs = await ProviderInstance.freeComputeStart( - providerURI, - this.signer, - computeEnv.id, - assets, // assets[0] // only c2d v1, - algo, - null, - null, - null, - output - ); - - console.log("compute jobs: ", computeJobs); - - if (computeJobs && computeJobs[0]) { - const { jobId } = computeJobs[0]; - console.log("Compute started. JobID: " + jobId); - } else { - console.log("Error while starting the compute job: ", computeJobs); - } - } - - public async computeStop(args: string[]) { - const dataDdo = await this.aquarius.waitForIndexer( - args[1], - null, - null, - this.indexingParams.retryInterval, - this.indexingParams.maxRetries - ); - if (!dataDdo) { - console.error( - "Error fetching DDO " + args[1] + ". Does this asset exists?" - ); - return; - } - - const jobId = args[2]; - const jobStatus = await ProviderInstance.computeStop( - jobId, - this.oceanNodeUrl, - this.signer, - ); - console.log(jobStatus); - } - - public async getComputeEnvironments() { - const computeEnvs = await ProviderInstance.getComputeEnvironments( - this.oceanNodeUrl - ); - - if (!computeEnvs || computeEnvs.length < 1) { - console.error( - "Error fetching compute environments. No compute environments available." - ); - return; - } - - console.log("Exiting compute environments: ", JSON.stringify(computeEnvs)); - } - - public async computeStreamableLogs(args: string[]) { - const jobId = args[0]; - const logsResponse = await ProviderInstance.computeStreamableLogs( - this.oceanNodeUrl, - this.signer, - jobId - ); - console.log("response: ", logsResponse); - - if (!logsResponse) { - console.error("Error fetching streamable logs. No logs available."); - return; - } else { - const stream = logsResponse as ReadableStream; - console.log("stream: ", stream); - const text = await new Response(stream).text(); - console.log("Streamable Logs: "); - console.log(text); - // for await (const value of stream) { - // // just print it to the console - // console.log(value); - // } - } - console.log("Exiting computeStreamableLogs: ", logsResponse); - } - - public async allowAlgo(args: string[]) { - const asset = await this.aquarius.waitForIndexer( - args[1], - null, - null, - this.indexingParams.retryInterval, - this.indexingParams.maxRetries - ); - if (!asset) { - console.error( - "Error fetching DDO " + args[1] + ". Does this asset exists?" - ); - return; - } - - if (asset.indexedMetadata.nft.owner !== (await this.signer.getAddress())) { - console.error( - "You are not the owner of this asset, and there for you cannot update it." - ); - return; - } - - if (asset.services[0].type !== "compute") { - console.error( - "Error getting computeService for " + - args[1] + - ". Does this asset has an computeService?" - ); - return; - } - const algoAsset = await this.aquarius.waitForIndexer( - args[2], - null, - null, - this.indexingParams.retryInterval, - this.indexingParams.maxRetries - ); - if (!algoAsset) { - console.error( - "Error fetching DDO " + args[2] + ". Does this asset exists?" - ); - return; - } - const encryptDDO = args[3] === "false" ? false : true; - let filesChecksum; - try { - filesChecksum = await ProviderInstance.checkDidFiles( - algoAsset.id, - algoAsset.services[0].id, - algoAsset.services[0].serviceEndpoint, - true - ); - } catch (e) { - console.error("Error checking algo files: ", e); - return; - } - - const containerChecksum = - algoAsset.metadata.algorithm.container.entrypoint + - algoAsset.metadata.algorithm.container.checksum; - const trustedAlgorithm = { - did: algoAsset.id, - containerSectionChecksum: getHash(containerChecksum), - filesChecksum: filesChecksum?.[0]?.checksum, - }; - asset.services[0].compute.publisherTrustedAlgorithms.push(trustedAlgorithm); - try { - const txid = await updateAssetMetadata( - this.signer, - asset, - this.oceanNodeUrl, - this.aquarius, - encryptDDO - ); - console.log("Successfully updated asset metadata: " + txid); - } catch (e) { - console.error("Error updating asset metadata: ", e); - return; - } - } - - public async disallowAlgo(args: string[]) { - const asset = await this.aquarius.waitForIndexer( - args[1], - null, - null, - this.indexingParams.retryInterval, - this.indexingParams.maxRetries - ); - if (!asset) { - console.error( - "Error fetching DDO " + args[1] + ". Does this asset exists?" - ); - return; - } - if (asset.indexedMetadata.nft.owner !== (await this.signer.getAddress())) { - console.error( - "You are not the owner of this asset, and there for you cannot update it." - ); - return; - } - if (asset.services[0].type !== "compute") { - console.error( - "Error getting computeService for " + - args[1] + - ". Does this asset has an computeService?" - ); - return; - } - if (asset.services[0].compute.publisherTrustedAlgorithms) { - console.error( - " " + args[1] + ". Does this asset has an computeService?" - ); - return; - } - const encryptDDO = args[3] === "false" ? false : true; - const indexToDelete = - asset.services[0].compute.publisherTrustedAlgorithms.findIndex( - (item) => item.did === args[2] - ); - - if (indexToDelete !== -1) { - asset.services[0].compute.publisherTrustedAlgorithms.splice( - indexToDelete, - 1 - ); - } else { - console.error( - " " + - args[2] + - ". is not allowed by the publisher to run on " + - args[1] - ); - return; - } - - const txid = await updateAssetMetadata( - this.signer, - asset, - this.oceanNodeUrl, - this.aquarius, - encryptDDO - ); - console.log("Asset updated " + txid); - } - - public async getJobStatus(args: string[]) { - // args[1] - did (for checking if data asset exists, legacy) - // args[2] - jobId - // args[3] - agreementId - const hasAgreementId = args.length === 4; - - const dataDdo = await this.aquarius.waitForIndexer( - args[1], - null, - null, - this.indexingParams.retryInterval, - this.indexingParams.maxRetries - ); - if (!dataDdo) { - console.error( - "Error fetching DDO " + args[1] + ". Does this asset exists?" - ); - return; - } - const jobId = args[2]; - let agreementId = null; - if (hasAgreementId) { - agreementId = args[3]; - } - - const jobStatus = (await ProviderInstance.computeStatus( - this.oceanNodeUrl, - await this.signer.getAddress(), - jobId, - agreementId - )) as ComputeJob; - console.log(util.inspect(jobStatus, false, null, true)); - } - - public async downloadJobResults(args: string[]) { - const jobResult = await ProviderInstance.getComputeResultUrl( - this.oceanNodeUrl, - this.signer, - args[1], - parseInt(args[2]) - ); - console.log("jobResult ", jobResult); - - try { - const path = args[3] ? args[3] : "."; - const { filename } = await downloadFile( - jobResult, - path, - parseInt(args[2]) - ); - console.log("File downloaded successfully:", path + "/" + filename); - } catch (e) { - console.log(`Download url dataset failed: ${e}`); - } - } - - public async mintOceanTokens() { - try { - const config = await getConfigByChainId(Number(this.config.chainId)); - const minAbi = [ - { - constant: false, - inputs: [ - { name: "to", type: "address" }, - { name: "value", type: "uint256" }, - ], - name: "mint", - outputs: [{ name: "", type: "bool" }], - payable: false, - stateMutability: "nonpayable", - type: "function", - }, - ]; - - const tokenContract = new ethers.Contract( - config?.Ocean, - minAbi, - this.signer - ); - const estGasPublisher = await tokenContract.mint.estimateGas( - await this.signer.getAddress(), - await amountToUnits(null, null, "1000", 18) - ); - const tx = await sendTx( - estGasPublisher, - this.signer, - 1, - tokenContract.mint, - await this.signer.getAddress(), - amountToUnits(null, null, "1000", 18) - ); - await tx.wait(); - } catch (error) { - console.error("Error minting Ocean tokens:", error); - } - } - - public async generateAuthToken() { - const authToken = await ProviderInstance.generateAuthToken( - this.signer, - this.oceanNodeUrl, - ); - console.log(`Auth token successfully generated: ${authToken}`); - } - - public async invalidateAuthToken(args: string[]) { - const authToken = args[0]; - const result = await ProviderInstance.invalidateAuthToken( - this.signer, - authToken, - this.oceanNodeUrl, - ); - if (!result.success) { - console.log('Auth token could not be invalidated'); - return; - } - - console.log(`Auth token successfully invalidated`); - } - - public async getEscrowBalance(token: string): Promise { - const config = await getConfigByChainId(Number(this.config.chainId)); - const escrow = new EscrowContract( - getAddress(config.Escrow), - this.signer, - Number(this.config.chainId) - ); - - try { - const balance = await escrow.getUserFunds(await this.signer.getAddress(), token); - const decimals = await getTokenDecimals(this.signer, token); - const available = balance.available; - const amount = await unitsToAmount(this.signer, token, available, decimals); - console.log(`Escrow user funds for token ${token}: ${amount}`); - return Number(amount); - } catch (error) { - console.error("Error getting escrow balance:", error); - } - } - - public async withdrawFromEscrow(token: string, amount: string): Promise { - const config = await getConfigByChainId(Number(this.config.chainId)); - const escrow = new EscrowContract( - getAddress(config.Escrow), - this.signer, - Number(this.config.chainId) - ); - - const balance = await this.getEscrowBalance(token); - if (balance < Number(amount)) { - console.error(`Insufficient balance in escrow for token ${token}`); - return; - } - - const withdrawTx = await escrow.withdraw([token], [amount]); - await withdrawTx.wait(); - console.log(`Successfully withdrawn ${amount} ${token} from escrow`); - } - - public async depositToEscrow(signer: Signer, token: string, amount: string, chainId: number) { - try { - const amountInUnits = await amountToUnits(signer, token, amount, 18); - const config = await getConfigByChainId(chainId); - const escrowAddress = config.Escrow; - - const tokenContract = new ethers.Contract( - token, - ['function approve(address spender, uint256 amount) returns (bool)'], - signer - ); - - const escrow = new EscrowContract( - getAddress(escrowAddress), - signer, - chainId - ); - - console.log('Approving token transfer...') - const approveTx = await tokenContract.approve(escrowAddress, amountInUnits); - await approveTx.wait(); - console.log(`Successfully approved ${amount} ${token} to escrow`); - - - console.log('Depositing to escrow...') - const depositTx = await escrow.deposit(token, amount); - await depositTx.wait(); - return true; - - } catch (error) { - console.error("Error depositing to escrow:", error); - return false; - } - } - - public async authorizeEscrowPayee( - token: string, - payee: string, - maxLockedAmount: string, - maxLockSeconds: string, - maxLockCounts: string - ) { - try { - const config = await getConfigByChainId(Number(this.config.chainId)); - const escrowAddress = config.Escrow; - - const escrow = new EscrowContract( - getAddress(escrowAddress), - this.signer - ); - - console.log("Authorizing payee..."); - const authorizeTx = await escrow.authorize( - getAddress(token), - getAddress(payee), - maxLockedAmount, - maxLockSeconds, - maxLockCounts - ); - await authorizeTx.wait(); - console.log(`Successfully authorized payee ${payee} for token ${token}`); - - return true; - } catch (error) { - console.error("Error authorizing payee:", error); - return false; - } - } - - public async getAuthorizationsEscrow(token: string, payee: string) { - const config = await getConfigByChainId(Number(this.config.chainId)); - const payer = await this.signer.getAddress(); - const tokenAddress = getAddress(token); - const payerAddress = getAddress(payer); - const payeeAddress = getAddress(payee); - const decimals = await getTokenDecimals(this.signer, token); - const escrow = new EscrowContract( - getAddress(config.Escrow), - this.signer, - Number(this.config.chainId) - ); - - const authorizations = await escrow.getAuthorizations(tokenAddress, payerAddress, payeeAddress); - const authorization = authorizations[0] - if (!authorization || authorization.length === 0) { - console.log('No authorizations found'); - return; - } - - const currentLockedAmount = await unitsToAmount(this.signer, token, authorization.currentLockedAmount.toString(), decimals); - const maxLockedAmount = await unitsToAmount(this.signer, token, authorization.maxLockedAmount.toString(), decimals); - - console.log('Authorizations found:') - console.log(`- Current Locked Amount: ${Number(currentLockedAmount)}`) - console.log(`- Current Locks: ${authorization.currentLocks}`) - console.log(`- Max locked amount: ${Number(maxLockedAmount)}`) - console.log(`- Max lock seconds: ${authorization.maxLockSeconds}`) - console.log(`- Max lock counts: ${authorization.maxLockCounts}`) - - return authorizations; - } - - public async createAccessList(args: string[]): Promise { - try { - const name = args[0]; - const symbol = args[1]; - const transferable = args[2] === 'true'; - const initialUsers = args[3] ? args[3].split(',').map(u => u.trim()) : []; - - if (!name || !symbol) { - console.error(chalk.red('Name and symbol are required')); - return; - } - - const config = await getConfigByChainId(Number(this.config.chainId)); - if (!config.AccessListFactory) { - console.error(chalk.red('Access list factory not found. Check local address.json file')); - return; - } - const accessListFactory = new AccesslistFactory( - config.AccessListFactory, - this.signer, - Number(this.config.chainId) - ); - - const owner = await this.signer.getAddress(); - const tokenURIs = initialUsers.map(() => 'https://oceanprotocol.com/nft/'); - - console.log(chalk.cyan('Creating new access list...')); - console.log(`Name: ${name}`); - console.log(`Symbol: ${symbol}`); - console.log(`Transferable: ${transferable}`); - console.log(`Owner: ${owner}`); - console.log(`Initial users: ${initialUsers.length > 0 ? initialUsers.join(', ') : 'none'}`); - - const accessListAddress = await accessListFactory.deployAccessListContract( - name, - symbol, - tokenURIs, - transferable, - owner, - initialUsers - ); - - console.log(chalk.green(`\nAccess list created successfully!`)); - console.log(chalk.green(`Contract address: ${accessListAddress}`)); - } catch (error) { - console.error(chalk.red('Error creating access list:'), error); - } - } - - public async addToAccessList(args: string[]): Promise { - try { - const accessListAddress = args[0]; - const users = args[1].split(',').map(u => u.trim()); - - if (!accessListAddress || users.length === 0) { - console.error(chalk.red('Access list address and at least one user are required')); - return; - } - - const accessList = new AccessListContract( - accessListAddress, - this.signer, - Number(this.config.chainId) - ); - - console.log(chalk.cyan(`Adding ${users.length} user(s) to access list...`)); - - if (users.length === 1) { - const tx = await accessList.mint(users[0], 'https://oceanprotocol.com/nft/'); - await tx.wait(); - console.log(chalk.green(`Successfully added user ${users[0]} to access list`)); - return; - } - - const tokenURIs = users.map(() => 'https://oceanprotocol.com/nft/'); - const tx = await accessList.batchMint(users, tokenURIs); - await tx.wait(); - console.log(chalk.green(`Successfully added ${users.length} users to access list:`)); - users.forEach(user => console.log(` - ${user}`)); - } catch (error) { - console.error(chalk.red('Error adding users to access list:'), error); - } - } - - - public async checkAccessList(args: string[]): Promise { - try { - const accessListAddress = args[0]; - const users = args[1].split(',').map(u => u.trim()); - - if (!accessListAddress || users.length === 0) { - console.error(chalk.red('Access list address and at least one user are required')); - return; - } - - const accessList = new AccessListContract( - accessListAddress, - this.signer, - Number(this.config.chainId) - ); - - console.log(chalk.cyan(`Checking access list for ${users.length} user(s)...\n`)); - - for (const user of users) { - const balance = await accessList.balance(user); - const hasAccess = Number(balance) > 0; - - if (hasAccess) { - console.log(chalk.green(`✓ ${user}: Has access (balance: ${balance})`)); - } else { - console.log(chalk.red(`✗ ${user}: No access`)); - } - } - } catch (error) { - console.error(chalk.red('Error checking access list:'), error); - } - } - - - public async removeFromAccessList(args: string[]): Promise { - try { - const accessListAddress = args[0]; - const users = args[1].split(',').map(u => u.trim()); - - if (!accessListAddress || users.length === 0) { - console.error(chalk.red('Access list address and at least one user address are required')); - return; - } - - const accessList = new AccessListContract( - accessListAddress, - this.signer, - Number(this.config.chainId) - ); - - console.log(chalk.cyan(`Removing ${users.length} user(s) from access list...`)); - for (const user of users) { - const balance = await accessList.balance(user); - - if (Number(balance) === 0) { - console.log(chalk.yellow(`⚠ User ${user} is not on the access list, skipping...`)); - continue; - } - - const balanceNum = Number(balance); - const contract = accessList.contract; - - let removedCount = 0; - for (let index = 0; index < balanceNum; index++) { - try { - const tokenId = await contract.tokenOfOwnerByIndex(user, index); - const tx = await accessList.burn(Number(tokenId)); - await tx.wait(); - - console.log(chalk.green(`✓ Successfully removed user ${user} (token ID: ${tokenId})`)); - removedCount++; - } catch (e: any) { - console.log(chalk.yellow(`⚠ Could not remove token at index ${index} for user ${user}: ${e.message}`)); - } - } - - if (removedCount === 0) { - console.log(chalk.yellow(`⚠ Could not remove any tokens for user ${user}`)); - } else if (removedCount < balanceNum) { - console.log(chalk.yellow(`⚠ Only removed ${removedCount} of ${balanceNum} tokens for user ${user}`)); - } - } - } catch (error) { - console.error(chalk.red('Error removing users from access list:'), error); - } - } + public signer: Signer; + public config: Config; + public aquarius: Aquarius; + public oceanNodeUrl: string; + // optional settings for indexing wait time + private indexingParams: IndexerWaitParams; + + constructor(signer: Signer, network: string | number, config?: Config) { + this.signer = signer; + this.config = config || new ConfigHelper().getConfig(network); + this.oceanNodeUrl = process.env.NODE_URL; + this.indexingParams = getIndexingWaitSettings(); + console.log("Using Ocean Node URL :", this.oceanNodeUrl); + this.config.nodeUri = this.oceanNodeUrl; + this.aquarius = new Aquarius(this.oceanNodeUrl); + } + + public async start() { + console.log("Starting the interactive CLI flow...\n\n"); + const data = await interactiveFlow(this.oceanNodeUrl); // Collect data via CLI + await publishAsset(this.aquarius, data, this.signer, this.config); // Publish asset with collected data + } + + // utils + public async sleep(ms: number) { + return new Promise((resolve) => { + setTimeout(resolve, ms); + }); + } + + // commands + public async publish(args: string[]) { + console.log("start publishing"); + let asset; + try { + asset = JSON.parse(fs.readFileSync(args[1], "utf8")); + } catch (e) { + console.error("Cannot read metadata from " + args[1]); + console.error(e); + return; + } + const encryptDDO = args[2] === "false" ? false : true; + try { + // add some more checks + const urlAssetId = await createAssetUtil( + asset.indexedMetadata.nft.name, + asset.indexedMetadata.nft.symbol, + this.signer, + asset.services[0].files, + asset, + this.oceanNodeUrl, + this.config, + this.aquarius, + encryptDDO + ); + console.log("Asset published. ID: " + urlAssetId); + } catch (e) { + console.error("Error when publishing dataset from file: " + args[1]); + console.error(e); + return; + } + } + + public async publishAlgo(args: string[]) { + let algoAsset: Asset; + try { + algoAsset = JSON.parse(fs.readFileSync(args[1], "utf8")); + } catch (e) { + console.error("Cannot read metadata from " + args[1]); + console.error(e); + return; + } + const encryptDDO = args[2] === "false" ? false : true; + // add some more checks + try { + const algoDid = await createAssetUtil( + algoAsset.indexedMetadata.nft.name, + algoAsset.indexedMetadata.nft.symbol, + this.signer, + algoAsset.services[0].files, + algoAsset, + this.oceanNodeUrl, + this.config, + this.aquarius, + encryptDDO + ); + // add some more checks + console.log("Algorithm published. DID: " + algoDid); + } catch (e) { + console.error("Error when publishing dataset from file: " + args[1]); + console.error(e); + return; + } + } + + public async editAsset(args: string[]) { + const asset = await this.aquarius.waitForIndexer( + args[1], + null, + null, + this.indexingParams.retryInterval, + this.indexingParams.maxRetries + ); + if (!asset) { + console.error( + "Error fetching DDO " + args[1] + ". Does this asset exists?" + ); + return; + } + const encryptDDO = args[3] === "false" ? false : true; + let updateJson; + try { + updateJson = JSON.parse(fs.readFileSync(args[2], "utf8")); + } catch (e) { + console.error("Cannot read metadata from " + args[2]); + console.error(e); + return; + } + // Get keys and values + const keys = Object.keys(updateJson); + + for (const key of keys) { + asset[key] = updateJson[key]; + } + + const updateAssetTx = await updateAssetMetadata( + this.signer, + asset, + this.oceanNodeUrl, + this.aquarius, + encryptDDO + ); + console.log("Asset updated. Tx: " + JSON.stringify(updateAssetTx, null, 2)); + } + + public async getDDO(args: string[]) { + console.log("Resolving Asset with DID: " + args[1]); + const resolvedDDO = await this.aquarius.waitForIndexer( + args[1], + null, + null, + this.indexingParams.retryInterval, + this.indexingParams.maxRetries + ); + if (!resolvedDDO) { + console.error( + "Error fetching Asset with DID: " + + args[1] + + ". Does this asset exists?" + ); + } else console.log(util.inspect(resolvedDDO, false, null, true)); + } + + public async download(args: string[]) { + const dataDdo = await this.aquarius.waitForIndexer( + args[1], + null, + null, + this.indexingParams.retryInterval, + this.indexingParams.maxRetries + ); + if (!dataDdo) { + console.error( + "Error fetching DDO " + args[1] + ". Does this asset exists?" + ); + return; + } + + const datatoken = new Datatoken( + this.signer, + this.config.chainId, + this.config + ); + + const tx = await orderAsset( + dataDdo, + this.signer, + this.config, + datatoken, + this.oceanNodeUrl + ); + + if (!tx) { + console.error( + "Error ordering access for " + args[1] + ". Do you have enough tokens?" + ); + return; + } + + const orderTx = await tx.wait(); + + const urlDownloadUrl = await ProviderInstance.getDownloadUrl( + dataDdo.id, + dataDdo.services[0].id, + 0, + orderTx.hash, + this.oceanNodeUrl, + this.signer + ); + try { + const path = args[2] ? args[2] : "."; + const { filename } = await downloadFile(urlDownloadUrl, path); + console.log("File downloaded successfully:", path + "/" + filename); + } catch (e) { + console.log(`Download url dataset failed: ${e}`); + } + } + + public async initializeCompute(args: string[]) { + const inputDatasetsString = args[1]; + let inputDatasets = []; + + if ( + inputDatasetsString.includes("[") && + inputDatasetsString.includes("]") + ) { + const processedInput = inputDatasetsString + .replaceAll("]", "") + .replaceAll("[", ""); + if (processedInput.indexOf(",") > -1) { + inputDatasets = processedInput.split(","); + } + } else { + inputDatasets.push(inputDatasetsString); + } + + const ddos = []; + + for (const dataset in inputDatasets) { + const dataDdo = await this.aquarius.waitForIndexer( + inputDatasets[dataset], + null, + null, + this.indexingParams.retryInterval, + this.indexingParams.maxRetries + ); + if (!dataDdo) { + console.error( + "Error fetching DDO " + dataset[1] + ". Does this asset exists?" + ); + return; + } else { + ddos.push(dataDdo); + } + } + if ( + inputDatasets.length > 0 && + (ddos.length <= 0 || ddos.length != inputDatasets.length) + ) { + console.error("Not all the data ddos are available."); + return; + } + let providerURI = this.oceanNodeUrl; + if (ddos.length > 0) { + providerURI = ddos[0].services[0].serviceEndpoint; + } + + const algoDdo = await this.aquarius.waitForIndexer( + args[2], + null, + null, + this.indexingParams.retryInterval, + this.indexingParams.maxRetries + ); + if (!algoDdo) { + console.error( + "Error fetching DDO " + args[1] + ". Does this asset exists?" + ); + return; + } + + const computeEnvs = await ProviderInstance.getComputeEnvironments( + this.oceanNodeUrl + ); + + if (!computeEnvs || computeEnvs.length < 1) { + console.error( + "Error fetching compute environments. No compute environments available." + ); + return; + } + + const computeEnvID = args[3]; + // NO chainId needed anymore (is not part of ComputeEnvironment spec anymore) + // const chainComputeEnvs = computeEnvs[computeEnvID]; // was algoDdo.chainId + let computeEnv = null; // chainComputeEnvs[0]; + + if (computeEnvID && computeEnvID.length > 1) { + for (const index in computeEnvs) { + if (computeEnvID == computeEnvs[index].id) { + computeEnv = computeEnvs[index]; + break; + } + } + } + if (!computeEnv || !computeEnvID) { + console.error( + "Error fetching compute environment. No compute environment matches id: ", + computeEnvID + ); + return; + } + + const algo: ComputeAlgorithm = { + documentId: algoDdo.id, + serviceId: algoDdo.services[0].id, + meta: algoDdo.metadata.algorithm, + }; + + const assets = []; + for (const dataDdo in ddos) { + const canStartCompute = isOrderable( + ddos[dataDdo], + ddos[dataDdo].services[0].id, + algo, + algoDdo + ); + if (!canStartCompute) { + console.error( + "Error Cannot start compute job using the datasets DIDs & algorithm DID provided" + ); + return; + } + assets.push({ + documentId: ddos[dataDdo].id, + serviceId: ddos[dataDdo].services[0].id, + }); + } + const maxJobDuration = Number(args[4]); + if (!maxJobDuration) { + console.error( + "Error initializing Provider for the compute job using dataset DID " + + args[1] + + " and algorithm DID " + + args[2] + + " because maxJobDuration was not provided." + ); + return; + } + if (maxJobDuration < 0) { + console.error( + "Error initializing Provider for the compute job using dataset DID " + + args[1] + + " and algorithm DID " + + args[2] + + " because maxJobDuration is less than 0. It should be in seconds." + ); + return; + } + let supportedMaxJobDuration: number = maxJobDuration; + if (maxJobDuration > computeEnv.maxJobDuration) { + supportedMaxJobDuration = computeEnv.maxJobDuration; + } + const paymentToken = args[5]; + if (!paymentToken) { + console.error( + "Error initializing Provider for the compute job using dataset DID " + + args[1] + + " and algorithm DID " + + args[2] + + " because paymentToken was not provided." + ); + return; + } + const { chainId } = await this.signer.provider.getNetwork(); + if (!Object.keys(computeEnv.fees).includes(chainId.toString())) { + console.error( + "Error starting paid compute using dataset DID " + + args[1] + + " and algorithm DID " + + args[2] + + " because chainId is not supported by compute environment. " + + args[3] + + ". Supported chain IDs: " + + computeEnv.fees.keys() + ); + return; + } + let found: boolean = false; + for (const fee of computeEnv.fees[chainId.toString()]) { + if (fee.feeToken.toLowerCase() === paymentToken.toLowerCase()) { + found = true; + break; + } + } + if (found === false) { + console.error( + "Error initializing Provider for the compute job using dataset DID " + + args[1] + + " and algorithm DID " + + args[2] + + " because paymentToken is not supported by this environment " + + args[3] + ); + return; + } + const resources = args[6]; // resources object should be stringified in cli when calling initializeCompute + if (!resources) { + console.error( + "Error initializing Provider for the compute job using dataset DID " + + args[1] + + " and algorithm DID " + + args[2] + + " because resources for compute were not provided." + ); + return; + } + const parsedResources = JSON.parse(resources); + const providerInitializeComputeJob = + await ProviderInstance.initializeCompute( + assets, + algo, + computeEnv.id, + paymentToken, + supportedMaxJobDuration, + providerURI, + this.signer, // V1 was this.signer.getAddress() + parsedResources, + Number(chainId) + ); + if ( + !providerInitializeComputeJob || + "error" in providerInitializeComputeJob.algorithm + ) { + console.error( + "Error initializing Provider for the compute job using dataset DID " + + args[1] + + " and algorithm DID " + + args[2] + ); + return; + } + console.log(chalk.yellow("\n--- Payment Details ---")); + console.log(JSON.stringify(providerInitializeComputeJob, null, 2)); + return providerInitializeComputeJob; + } + + public async computeStart(args: string[]) { + const inputDatasetsString = args[1]; + let inputDatasets = []; + + if ( + inputDatasetsString.includes("[") && + inputDatasetsString.includes("]") + ) { + const processedInput = inputDatasetsString + .replaceAll("]", "") + .replaceAll("[", ""); + if (processedInput.indexOf(",") > -1) { + inputDatasets = processedInput.split(","); + } + } else { + inputDatasets.push(inputDatasetsString); + } + + const ddos = []; + + for (const dataset in inputDatasets) { + const dataDdo = await this.aquarius.waitForIndexer( + inputDatasets[dataset], + null, + null, + this.indexingParams.retryInterval, + this.indexingParams.maxRetries + ); + if (!dataDdo) { + console.error( + "Error fetching DDO " + dataset[1] + ". Does this asset exists?" + ); + return; + } else { + ddos.push(dataDdo); + } + } + if ( + inputDatasets.length > 0 && + (ddos.length <= 0 || ddos.length != inputDatasets.length) + ) { + console.error("Not all the data ddos are available."); + return; + } + let providerURI = this.oceanNodeUrl; + if (ddos.length > 0) { + providerURI = ddos[0].services[0].serviceEndpoint; + } + const algoDdo = await this.aquarius.waitForIndexer( + args[2], + null, + null, + this.indexingParams.retryInterval, + this.indexingParams.maxRetries + ); + if (!algoDdo) { + console.error( + "Error fetching DDO " + args[1] + ". Does this asset exists?" + ); + return; + } + + const computeEnvs = await ProviderInstance.getComputeEnvironments( + this.oceanNodeUrl + ); + + if (!computeEnvs || computeEnvs.length < 1) { + console.error( + "Error fetching compute environments. No compute environments available." + ); + return; + } + const computeEnvID = args[3]; + // NO chainId needed anymore (is not part of ComputeEnvironment spec anymore) + // const chainComputeEnvs = computeEnvs[computeEnvID]; // was algoDdo.chainId + let computeEnv = null; // chainComputeEnvs[0]; + + if (computeEnvID && computeEnvID.length > 1) { + for (const index in computeEnvs) { + if (computeEnvID == computeEnvs[index].id) { + computeEnv = computeEnvs[index]; + break; + } + } + } + if (!computeEnv || !computeEnvID) { + console.error( + "Error fetching compute environment. No compute environment matches id: ", + computeEnvID + ); + return; + } + + const algo: ComputeAlgorithm = { + documentId: algoDdo.id, + serviceId: algoDdo.services[0].id, + meta: algoDdo.metadata.algorithm, + }; + + const assets = []; + for (const dataDdo in ddos) { + const canStartCompute = isOrderable( + ddos[dataDdo], + ddos[dataDdo].services[0].id, + algo, + algoDdo + ); + if (!canStartCompute) { + console.error( + "Error Cannot start compute job using the datasets DIDs & algorithm DID provided" + ); + return; + } + assets.push({ + documentId: ddos[dataDdo].id, + serviceId: ddos[dataDdo].services[0].id, + }); + } + const providerInitializeComputeJob = args[4]; // provider fees + payment + const parsedProviderInitializeComputeJob = fixAndParseProviderFees( + providerInitializeComputeJob + ); + console.log("Ordering algorithm: ", args[2]); + const datatoken = new Datatoken( + this.signer, + (await this.signer.provider.getNetwork()).chainId.toString(), + this.config + ); + algo.transferTxId = await handleComputeOrder( + parsedProviderInitializeComputeJob?.algorithm, + algoDdo, + this.signer, + computeEnv.consumerAddress, + 0, + datatoken, + this.config, + parsedProviderInitializeComputeJob?.algorithm?.providerFee, + providerURI + ); + if (!algo.transferTxId) { + console.error( + "Error ordering compute for algorithm with DID: " + + args[2] + + ". Do you have enough tokens?" + ); + return; + } + console.log("Ordering assets: ", args[1]); + + for (let i = 0; i < ddos.length; i++) { + assets[i].transferTxId = await handleComputeOrder( + parsedProviderInitializeComputeJob?.datasets[i], + ddos[i], + this.signer, + computeEnv.consumerAddress, + 0, + datatoken, + this.config, + parsedProviderInitializeComputeJob?.datasets[i].providerFee, + providerURI + ); + if (!assets[i].transferTxId) { + console.error( + "Error ordering dataset with DID: " + + assets[i] + + ". Do you have enough tokens?" + ); + return; + } + } + // payment check + const maxJobDuration = Number(args[5]); + if (!maxJobDuration) { + console.error( + "Error initializing Provider for the compute job using dataset DID " + + args[1] + + " and algorithm DID " + + args[2] + + " because maxJobDuration was not provided." + ); + return; + } + if (maxJobDuration < 0) { + console.error( + "Error starting paid compute using dataset DID " + + args[1] + + " and algorithm DID " + + args[2] + + " because maxJobDuration is less than 0. It should be in seconds." + ); + return; + } + let supportedMaxJobDuration: number = maxJobDuration; + if (maxJobDuration > computeEnv.maxJobDuration) { + supportedMaxJobDuration = computeEnv.maxJobDuration; + } + const { chainId } = await this.signer.provider.getNetwork(); + const paymentToken = args[6]; + if (!paymentToken) { + console.error( + "Error starting paid compute using dataset DID " + + args[1] + + " and algorithm DID " + + args[2] + + " because paymentToken was not provided." + ); + return; + } + if (!Object.keys(computeEnv.fees).includes(chainId.toString())) { + console.error( + "Error starting paid compute using dataset DID " + + args[1] + + " and algorithm DID " + + args[2] + + " because chainId is not supported by compute environment. " + + args[3] + + ". Supported chain IDs: " + + computeEnv.fees.keys() + ); + return; + } + let found: boolean = false; + for (const fee of computeEnv.fees[chainId.toString()]) { + if (fee.feeToken.toLowerCase() === paymentToken.toLowerCase()) { + found = true; + break; + } + } + if (found === false) { + console.error( + "Error starting paid compute using dataset DID " + + args[1] + + " and algorithm DID " + + args[2] + + " because paymentToken is not supported by this environment " + + args[3] + ); + return; + } + const resources = args[7]; // resources object should be stringified in cli when calling initializeCompute + if (!resources) { + console.error( + "Error starting paid compute using dataset DID " + + args[1] + + " and algorithm DID " + + args[2] + + " because resources for compute were not provided." + ); + return; + } + + const escrow = new EscrowContract( + getAddress(parsedProviderInitializeComputeJob.payment.escrowAddress), + this.signer + ); + console.log("Verifying payment..."); + await new Promise((resolve) => setTimeout(resolve, 3000)); + + const validationEscrow = await escrow.verifyFundsForEscrowPayment( + paymentToken, + computeEnv.consumerAddress, + await unitsToAmount( + this.signer, + paymentToken, + parsedProviderInitializeComputeJob.payment.amount + ), + parsedProviderInitializeComputeJob.payment.amount.toString(), + parsedProviderInitializeComputeJob.payment.minLockSeconds.toString(), + "10" + ); + if (validationEscrow.isValid === false) { + console.error( + "Error starting compute job dataset DID " + + args[1] + + " and algorithm DID " + + args[2] + + " because escrow funds check failed: " + + validationEscrow.message + ); + return; + } + + console.log("Starting compute job using provider: ", providerURI); + + const additionalDatasets = assets.length > 1 ? assets.slice(1) : null; + if (assets.length > 0) { + console.log( + "Starting compute job on " + + assets[0].documentId + + " with additional datasets:" + + (!additionalDatasets ? "none" : additionalDatasets[0].documentId) + ); + } else { + console.log( + "Starting compute job on " + + algo.documentId + + " with additional datasets:" + + (!additionalDatasets ? "none" : additionalDatasets[0].documentId) + ); + } + if (additionalDatasets !== null) { + console.log( + "Adding additional datasets to dataset, according to C2D V2 specs" + ); + assets.push(additionalDatasets); + } + + const output: ComputeOutput = { + metadataUri: await getMetadataURI(), + }; + + const computeJobs = await ProviderInstance.computeStart( + providerURI, + this.signer, + computeEnv.id, + assets, // assets[0] // only c2d v1, + algo, + supportedMaxJobDuration, + paymentToken, + JSON.parse(resources), + Number((await this.signer.provider.getNetwork()).chainId), + null, + null, + // additionalDatasets, only c2d v1 + output + ); + + console.log("computeJobs: ", computeJobs); + + if (computeJobs && computeJobs[0]) { + const { jobId, payment } = computeJobs[0]; + console.log("Compute started. JobID: " + jobId); + console.log("Agreement ID: " + payment.lockTx); + } else { + console.log("Error while starting the compute job: ", computeJobs); + } + } + + public async freeComputeStart(args: string[]) { + const inputDatasetsString = args[1]; + let inputDatasets = []; + + if ( + inputDatasetsString.includes("[") && + inputDatasetsString.includes("]") + ) { + const processedInput = inputDatasetsString + .replaceAll("]", "") + .replaceAll("[", ""); + if (processedInput.indexOf(",") > -1) { + inputDatasets = processedInput.split(","); + } + } else { + inputDatasets.push(inputDatasetsString); + } + + const ddos = []; + + for (const dataset in inputDatasets) { + const dataDdo = await this.aquarius.waitForIndexer( + inputDatasets[dataset], + null, + null, + this.indexingParams.retryInterval, + this.indexingParams.maxRetries + ); + if (!dataDdo) { + console.error( + "Error fetching DDO " + dataset[1] + ". Does this asset exists?" + ); + return; + } else { + ddos.push(dataDdo); + } + } + + if ( + inputDatasets.length > 0 && + (ddos.length <= 0 || ddos.length != inputDatasets.length) + ) { + console.error("Not all the data ddos are available."); + return; + } + let providerURI = this.oceanNodeUrl; + if (ddos.length > 0) { + providerURI = ddos[0].services[0].serviceEndpoint; + } + + const algoDdo = await this.aquarius.waitForIndexer( + args[2], + null, + null, + this.indexingParams.retryInterval, + this.indexingParams.maxRetries + ); + if (!algoDdo) { + console.error( + "Error fetching DDO " + args[1] + ". Does this asset exists?" + ); + return; + } + + const computeEnvs = await ProviderInstance.getComputeEnvironments( + this.oceanNodeUrl + ); + + if (!computeEnvs || computeEnvs.length < 1) { + console.error( + "Error fetching compute environments. No compute environments available." + ); + return; + } + + const mytime = new Date(); + const computeMinutes = 5; + mytime.setMinutes(mytime.getMinutes() + computeMinutes); + + const computeEnvID = args[3]; + // NO chainId needed anymore (is not part of ComputeEnvironment spec anymore) + // const chainComputeEnvs = computeEnvs[computeEnvID]; // was algoDdo.chainId + let computeEnv = null; // chainComputeEnvs[0]; + + if (computeEnvID && computeEnvID.length > 1) { + for (const env of computeEnvs) { + if (computeEnvID == env.id && env.free) { + computeEnv = env; + break; + } + } + } + + if (!computeEnv || !computeEnvID) { + console.error( + "Error fetching free compute environment. No free compute environment matches id: ", + computeEnvID + ); + return; + } + + const algo: ComputeAlgorithm = { + documentId: algoDdo.id, + serviceId: algoDdo.services[0].id, + meta: algoDdo.metadata.algorithm, + }; + + const assets = []; + for (const dataDdo in ddos) { + const canStartCompute = isOrderable( + ddos[dataDdo], + ddos[dataDdo].services[0].id, + algo, + algoDdo + ); + if (!canStartCompute) { + console.error( + "Error Cannot start compute job using the datasets DIDs & algorithm DID provided" + ); + return; + } + assets.push({ + documentId: ddos[dataDdo].id, + serviceId: ddos[dataDdo].services[0].id, + }); + } + + console.log("Starting compute job using provider: ", providerURI); + const additionalDatasets = assets.length > 1 ? assets.slice(1) : null; + if (assets.length > 0) { + console.log( + "Starting compute job on " + + assets[0].documentId + + " with additional datasets:" + + (!additionalDatasets ? "none" : additionalDatasets[0].documentId) + ); + } else { + console.log( + "Starting compute job on " + + algo.documentId + + " with additional datasets:" + + (!additionalDatasets ? "none" : additionalDatasets[0].documentId) + ); + } + + if (additionalDatasets !== null) { + console.log( + "Adding additional datasets to dataset, according to C2D V2 specs" + ); + assets.push(additionalDatasets); + } + + const output: ComputeOutput = { + metadataUri: await getMetadataURI(), + }; + + const computeJobs = await ProviderInstance.freeComputeStart( + providerURI, + this.signer, + computeEnv.id, + assets, // assets[0] // only c2d v1, + algo, + null, + null, + null, + output + ); + + console.log("compute jobs: ", computeJobs); + + if (computeJobs && computeJobs[0]) { + const { jobId } = computeJobs[0]; + console.log("Compute started. JobID: " + jobId); + } else { + console.log("Error while starting the compute job: ", computeJobs); + } + } + + public async computeStop(args: string[]) { + const dataDdo = await this.aquarius.waitForIndexer( + args[1], + null, + null, + this.indexingParams.retryInterval, + this.indexingParams.maxRetries + ); + if (!dataDdo) { + console.error( + "Error fetching DDO " + args[1] + ". Does this asset exists?" + ); + return; + } + + const jobId = args[2]; + const jobStatus = await ProviderInstance.computeStop( + jobId, + this.oceanNodeUrl, + this.signer + ); + console.log(jobStatus); + } + + public async getComputeEnvironments() { + const computeEnvs = await ProviderInstance.getComputeEnvironments( + this.oceanNodeUrl + ); + + if (!computeEnvs || computeEnvs.length < 1) { + console.error( + "Error fetching compute environments. No compute environments available." + ); + return; + } + + console.log("Exiting compute environments: ", JSON.stringify(computeEnvs)); + } + + public async computeStreamableLogs(args: string[]) { + const jobId = args[0]; + const logsResponse = await ProviderInstance.computeStreamableLogs( + this.oceanNodeUrl, + this.signer, + jobId + ); + console.log("response: ", logsResponse); + + if (!logsResponse) { + console.error("Error fetching streamable logs. No logs available."); + return; + } else { + const stream = logsResponse as ReadableStream; + console.log("stream: ", stream); + const text = await new Response(stream).text(); + console.log("Streamable Logs: "); + console.log(text); + // for await (const value of stream) { + // // just print it to the console + // console.log(value); + // } + } + console.log("Exiting computeStreamableLogs: ", logsResponse); + } + + public async allowAlgo(args: string[]) { + const asset = await this.aquarius.waitForIndexer( + args[1], + null, + null, + this.indexingParams.retryInterval, + this.indexingParams.maxRetries + ); + if (!asset) { + console.error( + "Error fetching DDO " + args[1] + ". Does this asset exists?" + ); + return; + } + + if (asset.indexedMetadata.nft.owner !== (await this.signer.getAddress())) { + console.error( + "You are not the owner of this asset, and there for you cannot update it." + ); + return; + } + + if (asset.services[0].type !== "compute") { + console.error( + "Error getting computeService for " + + args[1] + + ". Does this asset has an computeService?" + ); + return; + } + const algoAsset = await this.aquarius.waitForIndexer( + args[2], + null, + null, + this.indexingParams.retryInterval, + this.indexingParams.maxRetries + ); + if (!algoAsset) { + console.error( + "Error fetching DDO " + args[2] + ". Does this asset exists?" + ); + return; + } + const encryptDDO = args[3] === "false" ? false : true; + let filesChecksum; + try { + filesChecksum = await ProviderInstance.checkDidFiles( + algoAsset.id, + algoAsset.services[0].id, + algoAsset.services[0].serviceEndpoint, + true + ); + } catch (e) { + console.error("Error checking algo files: ", e); + return; + } + + const containerChecksum = + algoAsset.metadata.algorithm.container.entrypoint + + algoAsset.metadata.algorithm.container.checksum; + const trustedAlgorithm = { + did: algoAsset.id, + containerSectionChecksum: getHash(containerChecksum), + filesChecksum: filesChecksum?.[0]?.checksum, + }; + asset.services[0].compute.publisherTrustedAlgorithms.push(trustedAlgorithm); + try { + const txid = await updateAssetMetadata( + this.signer, + asset, + this.oceanNodeUrl, + this.aquarius, + encryptDDO + ); + console.log("Successfully updated asset metadata: " + txid); + } catch (e) { + console.error("Error updating asset metadata: ", e); + return; + } + } + + public async disallowAlgo(args: string[]) { + const asset = await this.aquarius.waitForIndexer( + args[1], + null, + null, + this.indexingParams.retryInterval, + this.indexingParams.maxRetries + ); + if (!asset) { + console.error( + "Error fetching DDO " + args[1] + ". Does this asset exists?" + ); + return; + } + if (asset.indexedMetadata.nft.owner !== (await this.signer.getAddress())) { + console.error( + "You are not the owner of this asset, and there for you cannot update it." + ); + return; + } + if (asset.services[0].type !== "compute") { + console.error( + "Error getting computeService for " + + args[1] + + ". Does this asset has an computeService?" + ); + return; + } + if (asset.services[0].compute.publisherTrustedAlgorithms) { + console.error( + " " + args[1] + ". Does this asset has an computeService?" + ); + return; + } + const encryptDDO = args[3] === "false" ? false : true; + const indexToDelete = + asset.services[0].compute.publisherTrustedAlgorithms.findIndex( + (item) => item.did === args[2] + ); + + if (indexToDelete !== -1) { + asset.services[0].compute.publisherTrustedAlgorithms.splice( + indexToDelete, + 1 + ); + } else { + console.error( + " " + + args[2] + + ". is not allowed by the publisher to run on " + + args[1] + ); + return; + } + + const txid = await updateAssetMetadata( + this.signer, + asset, + this.oceanNodeUrl, + this.aquarius, + encryptDDO + ); + console.log("Asset updated " + txid); + } + + public async getJobStatus(args: string[]) { + // args[1] - did (for checking if data asset exists, legacy) + // args[2] - jobId + // args[3] - agreementId + const hasAgreementId = args.length === 4; + + const dataDdo = await this.aquarius.waitForIndexer( + args[1], + null, + null, + this.indexingParams.retryInterval, + this.indexingParams.maxRetries + ); + if (!dataDdo) { + console.error( + "Error fetching DDO " + args[1] + ". Does this asset exists?" + ); + return; + } + const jobId = args[2]; + let agreementId = null; + if (hasAgreementId) { + agreementId = args[3]; + } + + const jobStatus = (await ProviderInstance.computeStatus( + this.oceanNodeUrl, + await this.signer.getAddress(), + jobId, + agreementId + )) as ComputeJob; + console.log(util.inspect(jobStatus, false, null, true)); + } + + public async downloadJobResults(args: string[]) { + const jobResult = await ProviderInstance.getComputeResultUrl( + this.oceanNodeUrl, + this.signer, + args[1], + parseInt(args[2]) + ); + console.log("jobResult ", jobResult); + + try { + const path = args[3] ? args[3] : "."; + const { filename } = await downloadFile( + jobResult, + path, + parseInt(args[2]) + ); + console.log("File downloaded successfully:", path + "/" + filename); + } catch (e) { + console.log(`Download url dataset failed: ${e}`); + } + } + + public async mintOceanTokens() { + try { + const config = await getConfigByChainId(Number(this.config.chainId)); + const minAbi = [ + { + constant: false, + inputs: [ + { name: "to", type: "address" }, + { name: "value", type: "uint256" }, + ], + name: "mint", + outputs: [{ name: "", type: "bool" }], + payable: false, + stateMutability: "nonpayable", + type: "function", + }, + ]; + + const tokenContract = new ethers.Contract( + config?.Ocean, + minAbi, + this.signer + ); + const estGasPublisher = await tokenContract.mint.estimateGas( + await this.signer.getAddress(), + await amountToUnits(null, null, "1000", 18) + ); + const tx = await sendTx( + estGasPublisher, + this.signer, + 1, + tokenContract.mint, + await this.signer.getAddress(), + amountToUnits(null, null, "1000", 18) + ); + await tx.wait(); + } catch (error) { + console.error("Error minting Ocean tokens:", error); + } + } + + public async generateAuthToken() { + const authToken = await ProviderInstance.generateAuthToken( + this.signer, + this.oceanNodeUrl + ); + console.log(`Auth token successfully generated: ${authToken}`); + } + + public async invalidateAuthToken(args: string[]) { + const authToken = args[0]; + const result = await ProviderInstance.invalidateAuthToken( + this.signer, + authToken, + this.oceanNodeUrl + ); + if (!result.success) { + console.log("Auth token could not be invalidated"); + return; + } + + console.log(`Auth token successfully invalidated`); + } + + public async getEscrowBalance(token: string): Promise { + const config = await getConfigByChainId(Number(this.config.chainId)); + const escrow = new EscrowContract( + getAddress(config.Escrow), + this.signer, + Number(this.config.chainId) + ); + + try { + const balance = await escrow.getUserFunds( + await this.signer.getAddress(), + token + ); + const decimals = await getTokenDecimals(this.signer, token); + const available = balance.available; + const amount = await unitsToAmount( + this.signer, + token, + available, + decimals + ); + console.log(`Escrow user funds for token ${token}: ${amount}`); + return Number(amount); + } catch (error) { + console.error("Error getting escrow balance:", error); + } + } + + public async withdrawFromEscrow( + token: string, + amount: string + ): Promise { + const config = await getConfigByChainId(Number(this.config.chainId)); + const escrow = new EscrowContract( + getAddress(config.Escrow), + this.signer, + Number(this.config.chainId) + ); + + const balance = await this.getEscrowBalance(token); + if (balance < Number(amount)) { + console.error(`Insufficient balance in escrow for token ${token}`); + return; + } + + const withdrawTx = await escrow.withdraw([token], [amount]); + await withdrawTx.wait(); + console.log(`Successfully withdrawn ${amount} ${token} from escrow`); + } + + public async depositToEscrow( + signer: Signer, + token: string, + amount: string, + chainId: number + ) { + try { + const amountInUnits = await amountToUnits(signer, token, amount, 18); + const config = await getConfigByChainId(chainId); + const escrowAddress = config.Escrow; + + const tokenContract = new ethers.Contract( + token, + ["function approve(address spender, uint256 amount) returns (bool)"], + signer + ); + + const escrow = new EscrowContract( + getAddress(escrowAddress), + signer, + chainId + ); + + console.log("Approving token transfer..."); + const approveTx = await tokenContract.approve( + escrowAddress, + amountInUnits + ); + await approveTx.wait(); + console.log(`Successfully approved ${amount} ${token} to escrow`); + + console.log("Depositing to escrow..."); + const depositTx = await escrow.deposit(token, amount); + await depositTx.wait(); + return true; + } catch (error) { + console.error("Error depositing to escrow:", error); + return false; + } + } + + public async authorizeEscrowPayee( + token: string, + payee: string, + maxLockedAmount: string, + maxLockSeconds: string, + maxLockCounts: string + ) { + try { + const config = await getConfigByChainId(Number(this.config.chainId)); + const escrowAddress = config.Escrow; + + const escrow = new EscrowContract(getAddress(escrowAddress), this.signer); + + console.log("Authorizing payee..."); + const authorizeTx = await escrow.authorize( + getAddress(token), + getAddress(payee), + maxLockedAmount, + maxLockSeconds, + maxLockCounts + ); + await authorizeTx.wait(); + console.log(`Successfully authorized payee ${payee} for token ${token}`); + + return true; + } catch (error) { + console.error("Error authorizing payee:", error); + return false; + } + } + + public async getAuthorizationsEscrow(token: string, payee: string) { + const config = await getConfigByChainId(Number(this.config.chainId)); + const payer = await this.signer.getAddress(); + const tokenAddress = getAddress(token); + const payerAddress = getAddress(payer); + const payeeAddress = getAddress(payee); + const decimals = await getTokenDecimals(this.signer, token); + const escrow = new EscrowContract( + getAddress(config.Escrow), + this.signer, + Number(this.config.chainId) + ); + + const authorizations = await escrow.getAuthorizations( + tokenAddress, + payerAddress, + payeeAddress + ); + const authorization = authorizations[0]; + if (!authorization || authorization.length === 0) { + console.log("No authorizations found"); + return; + } + + const currentLockedAmount = await unitsToAmount( + this.signer, + token, + authorization.currentLockedAmount.toString(), + decimals + ); + const maxLockedAmount = await unitsToAmount( + this.signer, + token, + authorization.maxLockedAmount.toString(), + decimals + ); + + console.log("Authorizations found:"); + console.log(`- Current Locked Amount: ${Number(currentLockedAmount)}`); + console.log(`- Current Locks: ${authorization.currentLocks}`); + console.log(`- Max locked amount: ${Number(maxLockedAmount)}`); + console.log(`- Max lock seconds: ${authorization.maxLockSeconds}`); + console.log(`- Max lock counts: ${authorization.maxLockCounts}`); + + return authorizations; + } + + public async createAccessList(args: string[]): Promise { + try { + const name = args[0]; + const symbol = args[1]; + const transferable = args[2] === "true"; + const initialUsers = args[3] + ? args[3].split(",").map((u) => u.trim()) + : []; + + if (!name || !symbol) { + console.error(chalk.red("Name and symbol are required")); + return; + } + + const config = await getConfigByChainId(Number(this.config.chainId)); + if (!config.AccessListFactory) { + console.error( + chalk.red( + "Access list factory not found. Check local address.json file" + ) + ); + return; + } + const accessListFactory = new AccesslistFactory( + config.AccessListFactory, + this.signer, + Number(this.config.chainId) + ); + + const owner = await this.signer.getAddress(); + const tokenURIs = initialUsers.map( + () => "https://oceanprotocol.com/nft/" + ); + + console.log(chalk.cyan("Creating new access list...")); + console.log(`Name: ${name}`); + console.log(`Symbol: ${symbol}`); + console.log(`Transferable: ${transferable}`); + console.log(`Owner: ${owner}`); + console.log( + `Initial users: ${ + initialUsers.length > 0 ? initialUsers.join(", ") : "none" + }` + ); + + const accessListAddress = + await accessListFactory.deployAccessListContract( + name, + symbol, + tokenURIs, + transferable, + owner, + initialUsers + ); + + console.log(chalk.green(`\nAccess list created successfully!`)); + console.log(chalk.green(`Contract address: ${accessListAddress}`)); + } catch (error) { + console.error(chalk.red("Error creating access list:"), error); + } + } + + public async addToAccessList(args: string[]): Promise { + try { + const accessListAddress = args[0]; + const users = args[1].split(",").map((u) => u.trim()); + + if (!accessListAddress || users.length === 0) { + console.error( + chalk.red("Access list address and at least one user are required") + ); + return; + } + + const accessList = new AccessListContract( + accessListAddress, + this.signer, + Number(this.config.chainId) + ); + + console.log( + chalk.cyan(`Adding ${users.length} user(s) to access list...`) + ); + + if (users.length === 1) { + const tx = await accessList.mint( + users[0], + "https://oceanprotocol.com/nft/" + ); + await tx.wait(); + console.log( + chalk.green(`Successfully added user ${users[0]} to access list`) + ); + return; + } + + const tokenURIs = users.map(() => "https://oceanprotocol.com/nft/"); + const tx = await accessList.batchMint(users, tokenURIs); + await tx.wait(); + console.log( + chalk.green(`Successfully added ${users.length} users to access list:`) + ); + users.forEach((user) => console.log(` - ${user}`)); + } catch (error) { + console.error(chalk.red("Error adding users to access list:"), error); + } + } + + public async checkAccessList(args: string[]): Promise { + try { + const accessListAddress = args[0]; + const users = args[1].split(",").map((u) => u.trim()); + + if (!accessListAddress || users.length === 0) { + console.error( + chalk.red("Access list address and at least one user are required") + ); + return; + } + + const accessList = new AccessListContract( + accessListAddress, + this.signer, + Number(this.config.chainId) + ); + + console.log( + chalk.cyan(`Checking access list for ${users.length} user(s)...\n`) + ); + + for (const user of users) { + const balance = await accessList.balance(user); + const hasAccess = Number(balance) > 0; + + if (hasAccess) { + console.log( + chalk.green(`✓ ${user}: Has access (balance: ${balance})`) + ); + } else { + console.log(chalk.red(`✗ ${user}: No access`)); + } + } + } catch (error) { + console.error(chalk.red("Error checking access list:"), error); + } + } + + public async removeFromAccessList(args: string[]): Promise { + try { + const accessListAddress = args[0]; + const users = args[1].split(",").map((u) => u.trim()); + + if (!accessListAddress || users.length === 0) { + console.error( + chalk.red( + "Access list address and at least one user address are required" + ) + ); + return; + } + + const accessList = new AccessListContract( + accessListAddress, + this.signer, + Number(this.config.chainId) + ); + + console.log( + chalk.cyan(`Removing ${users.length} user(s) from access list...`) + ); + for (const user of users) { + const balance = await accessList.balance(user); + + if (Number(balance) === 0) { + console.log( + chalk.yellow( + `⚠ User ${user} is not on the access list, skipping...` + ) + ); + continue; + } + + const balanceNum = Number(balance); + const contract = accessList.contract; + + let removedCount = 0; + for (let index = 0; index < balanceNum; index++) { + try { + const tokenId = await contract.tokenOfOwnerByIndex(user, index); + const tx = await accessList.burn(Number(tokenId)); + await tx.wait(); + + console.log( + chalk.green( + `✓ Successfully removed user ${user} (token ID: ${tokenId})` + ) + ); + removedCount++; + } catch (e: any) { + console.log( + chalk.yellow( + `⚠ Could not remove token at index ${index} for user ${user}: ${e.message}` + ) + ); + } + } + + if (removedCount === 0) { + console.log( + chalk.yellow(`⚠ Could not remove any tokens for user ${user}`) + ); + } else if (removedCount < balanceNum) { + console.log( + chalk.yellow( + `⚠ Only removed ${removedCount} of ${balanceNum} tokens for user ${user}` + ) + ); + } + } + } catch (error) { + console.error(chalk.red("Error removing users from access list:"), error); + } + } + + public async downloadNodeLogs(args: string[]): Promise { + try { + const outputLocation = args[0]; + const last = args[1]; + let from = args[2]; + let to = args[3]; + + if (!outputLocation) { + console.error(chalk.red("Output location is required")); + return; + } + + if (!fs.existsSync(outputLocation)) { + console.error( + chalk.red(`Output directory does not exist: ${outputLocation}`) + ); + return; + } + + if (last && (from || to)) { + console.error( + chalk.red("Use either --last or --from/--to, not both") + ); + return; + } + + if ((from && !to) || (!from && to)) { + console.error( + chalk.red( + "Both --from and --to are required when specifying a time range" + ) + ); + return; + } + + if (!last && !from && !to) { + to = `${Date.now()}`; + from = `${Date.now() - 60 * 60 * 1000}`; // default: last 1 hour + } + + if (last) { + to = `${Date.now()}`; + from = `${Date.now() - parseInt(last, 10) * 60 * 60 * 1000}`; + } + + const response = await ProviderInstance.downloadNodeLogs( + this.oceanNodeUrl, + this.signer, + from, + to + ); + + const text = await new Response(response).text(); + const outputPath = `${outputLocation}/logs.json`; + fs.writeFileSync(outputPath, text); + console.log(chalk.green(`Logs saved to ${outputPath}`)); + } catch (error) { + console.error(chalk.red("Error downloading node logs: "), error); + } + } } From 81a037970d192f7a5f13b6a6988cbfacabefbd77 Mon Sep 17 00:00:00 2001 From: ndrpp Date: Thu, 12 Feb 2026 14:11:12 +0200 Subject: [PATCH 2/2] fix: add maxLogs option & argument for download logs command --- src/cli.ts | 3 +++ src/commands.ts | 4 +++- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/src/cli.ts b/src/cli.ts index 743137a..833e9b9 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -681,10 +681,12 @@ export async function createCLI() { ) .argument("[from]", "Start time (epoch ms) to get logs from") .argument("[to]", "End time (epoch ms) to get logs to") + .argument("[maxLogs]", "Maximum number of logs to retrieve (default: 100, max: 1000)") .option("-o, --output ", "Output directory to save the logs") .option("-l, --last [last]", "Period of time to get logs from now (in hours)") .option("-f, --from [from]", "Start time (epoch ms) to get logs from") .option("-t, --to [to]", "End time (epoch ms) to get logs to") + .option("-m, --maxLogs [maxLogs]", "Maximum number of logs to retrieve (default: 100, max: 1000)") .action(async (output, last, from, to, options) => { const { signer, chainId } = await initializeSigner(); const commands = new Commands(signer, chainId); @@ -693,6 +695,7 @@ export async function createCLI() { options.last || last, options.from || from, options.to || to, + options.maxLogs, ]); }); diff --git a/src/commands.ts b/src/commands.ts index a633d99..165624b 100644 --- a/src/commands.ts +++ b/src/commands.ts @@ -1721,6 +1721,7 @@ export class Commands { const last = args[1]; let from = args[2]; let to = args[3]; + const maxLogs = args[4] ? Math.min(parseInt(args[4], 10), 1000) : undefined; if (!outputLocation) { console.error(chalk.red("Output location is required")); @@ -1764,7 +1765,8 @@ export class Commands { this.oceanNodeUrl, this.signer, from, - to + to, + maxLogs ); const text = await new Response(response).text();