diff --git a/AGENTS.md b/AGENTS.md index da06fbf6..e1d9ce85 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -46,7 +46,7 @@ packages/synapse-sdk/src/ **Commands**: -- Root: `pnpm run fix` (Biome auto-fix all), `pnpm run build` (all packages), `pnpm test` +- Root: `pnpm run lint:fix` (Biome auto-fix all), `pnpm run build` (all packages), `pnpm test` - Package: `pnpm run lint:fix`, `pnpm run build`, `pnpm test` (from `packages/synapse-sdk/`) **Build**: TypeScript → `dist/` (in package), ES modules with `.js` extensions, strict mode, NodeNext resolution diff --git a/examples/cli/package.json b/examples/cli/package.json index 7ae7bffa..e7dce592 100644 --- a/examples/cli/package.json +++ b/examples/cli/package.json @@ -16,8 +16,10 @@ "@clack/prompts": "^0.11.0", "@filoz/synapse-core": "workspace:^", "@filoz/synapse-sdk": "workspace:^", + "@remix-run/fs": "^0.3.0", "cleye": "^2.0.0", "conf": "^15.0.2", + "terminal-link": "^5.0.0", "viem": "catalog:" }, "devDependencies": { diff --git a/examples/cli/src/client.ts b/examples/cli/src/client.ts index 11dec175..ab8ea951 100644 --- a/examples/cli/src/client.ts +++ b/examples/cli/src/client.ts @@ -22,6 +22,7 @@ export function privateKeyClient(chainId: number) { client, privateKey: privateKey as Hex, rpcURL: chain.rpcUrls.default.http[0], + chain, } } diff --git a/examples/cli/src/commands/dataset-terminate.ts b/examples/cli/src/commands/dataset-terminate.ts deleted file mode 100644 index cf6b2854..00000000 --- a/examples/cli/src/commands/dataset-terminate.ts +++ /dev/null @@ -1,70 +0,0 @@ -import * as p from '@clack/prompts' -import { getDataSets, terminateDataSet } from '@filoz/synapse-core/warm-storage' -import { type Command, command } from 'cleye' -import { waitForTransactionReceipt } from 'viem/actions' -import { privateKeyClient } from '../client.ts' -import { globalFlags } from '../flags.ts' - -export const datasetTerminate: Command = command( - { - name: 'dataset-terminate', - description: 'Terminate a data set', - alias: 'dt', - flags: { - ...globalFlags, - }, - help: { - description: 'Terminate a data set', - }, - }, - async (argv) => { - const { client } = privateKeyClient(argv.flags.chain) - - const spinner = p.spinner() - spinner.start(`Fetching data sets...`) - try { - const dataSets = await getDataSets(client, { - address: client.account.address, - }) - spinner.stop(`Fetching data sets complete`) - - const dataSetId = await p.select({ - message: 'Pick a data set to terminate.', - options: dataSets - // .filter((dataSet) => dataSet.pdpEndEpoch === 0n) - .map((dataSet) => ({ - value: dataSet.dataSetId.toString(), - label: `#${dataSet.dataSetId} - SP: #${dataSet.providerId} ${dataSet.pdp.serviceURL}`, - })), - }) - if (p.isCancel(dataSetId)) { - p.cancel('Operation cancelled.') - process.exit(0) - } - - spinner.start(`Terminating data set ${dataSetId}...`) - // const synapse = await Synapse.create({ - // privateKey: privateKey as Hex, - // rpcURL: RPC_URLS.calibration.http, - // }) - - // const tx = await synapse.storage.terminateDataSet(Number(dataSetId)) - - const tx = await terminateDataSet(client, { - dataSetId: BigInt(dataSetId), - }) - - spinner.message(`Waiting for transaction to be mined...`) - await waitForTransactionReceipt(client, { - hash: tx, - }) - - spinner.stop(`Data set terminated`) - } catch (error) { - spinner.stop() - console.error(error) - p.outro('Please try again') - return - } - } -) diff --git a/examples/cli/src/commands/datasets-create.ts b/examples/cli/src/commands/datasets-create.ts new file mode 100644 index 00000000..d7bb136f --- /dev/null +++ b/examples/cli/src/commands/datasets-create.ts @@ -0,0 +1,103 @@ +import * as p from '@clack/prompts' +import * as sp from '@filoz/synapse-core/sp' +import { + createDataSet, + getProvider, + readProviders, +} from '@filoz/synapse-core/warm-storage' +import { type Command, command } from 'cleye' +import type { Account, Chain, Client, Transport } from 'viem' +import { privateKeyClient } from '../client.ts' +import { globalFlags } from '../flags.ts' +import { hashLink } from '../utils.ts' + +export const datasetsCreate: Command = command( + { + name: 'datasets-create', + description: 'Create a data set', + alias: 'dc', + parameters: ['[providerId]'], + flags: { + ...globalFlags, + cdn: { + type: Boolean, + description: 'Enable CDN', + default: false, + }, + }, + help: { + description: 'Create a data set', + }, + }, + async (argv) => { + const { client, chain } = privateKeyClient(argv.flags.chain) + + const spinner = p.spinner() + try { + const provider = argv._.providerId + ? await getProvider(client, { providerId: BigInt(argv._.providerId) }) + : await selectProvider(client, argv.flags) + + p.log.info( + `Selected provider: #${provider.id} - ${provider.serviceProvider} ${provider.pdp.serviceURL}` + ) + spinner.start(`Creating data set...`) + + const result = await createDataSet(client, { + payee: provider.payee, + payer: client.account.address, + endpoint: provider.pdp.serviceURL, + cdn: argv.flags.cdn, + }) + + spinner.message( + `Waiting for tx ${hashLink(result.txHash, chain)} to be mined...` + ) + const dataset = await sp.waitForDataSetCreationStatus(result) + + spinner.stop(`Data set created #${dataset.dataSetId}`) + } catch (error) { + spinner.stop('Failed to create data set', 1) + if (argv.flags.debug) { + console.error(error) + } else { + p.log.error((error as Error).message) + } + process.exit(1) + } + } +) + +async function selectProvider( + client: Client, + options: { debug?: boolean } +) { + const spinner = p.spinner() + spinner.start(`Fetching providers...`) + + try { + const providers = await readProviders(client) + spinner.stop(`Fetching providers complete`) + + const provider = await p.select({ + message: 'Pick a provider to create a data set.', + options: providers.map((provider) => ({ + value: provider, + label: `#${provider.id} - ${provider.serviceProvider} ${provider.pdp.serviceURL}`, + })), + }) + if (p.isCancel(provider)) { + p.cancel('Operation cancelled.') + process.exit(1) + } + return provider + } catch (error) { + spinner.stop('Failed to select data set', 1) + if (options.debug) { + console.error(error) + } else { + p.log.error((error as Error).message) + } + process.exit(1) + } +} diff --git a/examples/cli/src/commands/datasets-terminate.ts b/examples/cli/src/commands/datasets-terminate.ts new file mode 100644 index 00000000..e436b4dd --- /dev/null +++ b/examples/cli/src/commands/datasets-terminate.ts @@ -0,0 +1,90 @@ +import * as p from '@clack/prompts' +import { getDataSets, terminateDataSet } from '@filoz/synapse-core/warm-storage' +import { type Command, command } from 'cleye' +import type { Account, Chain, Client, Transport } from 'viem' +import { waitForTransactionReceipt } from 'viem/actions' +import { privateKeyClient } from '../client.ts' +import { globalFlags } from '../flags.ts' +import { hashLink } from '../utils.ts' + +export const datasetsTerminate: Command = command( + { + name: 'datasets-terminate', + description: 'Terminate a data set', + alias: 'dt', + parameters: ['[dataSetId]'], + flags: { + ...globalFlags, + }, + help: { + description: 'Terminate a data set', + }, + }, + async (argv) => { + const { client, chain } = privateKeyClient(argv.flags.chain) + + const spinner = p.spinner() + try { + const dataSetId = argv._.dataSetId + ? BigInt(argv._.dataSetId) + : await selectDataSet(client, argv.flags) + spinner.start(`Terminating data set ${dataSetId}...`) + + const tx = await terminateDataSet(client, { + dataSetId: BigInt(dataSetId), + }) + + spinner.message(`Waiting for tx ${hashLink(tx, chain)} to be mined...`) + await waitForTransactionReceipt(client, { + hash: tx, + }) + + spinner.stop(`Data set terminated`) + } catch (error) { + spinner.stop('Failed to terminate data set', 1) + if (argv.flags.debug) { + console.error(error) + } else { + p.log.error((error as Error).message) + } + process.exit(1) + } + } +) + +async function selectDataSet( + client: Client, + options: { debug?: boolean } +) { + const spinner = p.spinner() + spinner.start(`Fetching data sets...`) + + try { + const dataSets = await getDataSets(client, { + address: client.account.address, + }) + spinner.stop(`Fetching data sets complete`) + + const dataSetId = await p.select({ + message: 'Pick a data set to terminate.', + options: dataSets.map((dataSet) => ({ + value: dataSet.dataSetId, + label: `#${dataSet.dataSetId} - SP: #${dataSet.providerId} ${dataSet.pdp.serviceURL} ${dataSet.pdpEndEpoch > 0n ? `Terminating at epoch ${dataSet.pdpEndEpoch}` : ''}`, + })), + }) + if (p.isCancel(dataSetId)) { + p.cancel('Operation cancelled.') + process.exit(1) + } + + return dataSetId + } catch (error) { + spinner.stop('Failed to select data set', 1) + if (options.debug) { + console.error(error) + } else { + p.log.error((error as Error).message) + } + process.exit(1) + } +} diff --git a/examples/cli/src/commands/datasets.ts b/examples/cli/src/commands/datasets.ts index f7c9ba40..974650ee 100644 --- a/examples/cli/src/commands/datasets.ts +++ b/examples/cli/src/commands/datasets.ts @@ -1,6 +1,7 @@ import * as p from '@clack/prompts' import { getDataSets } from '@filoz/synapse-core/warm-storage' import { type Command, command } from 'cleye' +import { getBlockNumber } from 'viem/actions' import { privateKeyClient } from '../client.ts' import { globalFlags } from '../flags.ts' @@ -22,6 +23,8 @@ export const datasets: Command = command( const spinner = p.spinner() + const blockNumber = await getBlockNumber(client) + spinner.start('Listing data sets...') try { const dataSets = await getDataSets(client, { @@ -30,10 +33,11 @@ export const datasets: Command = command( spinner.stop('Data sets:') dataSets.forEach(async (dataSet) => { p.log.info( - `#${dataSet.dataSetId} ${dataSet.cdn ? 'CDN' : ''} ${dataSet.pdp.serviceURL} ${dataSet.pdpEndEpoch > 0n ? `Terminating at epoch ${dataSet.pdpEndEpoch}` : ''}` + `#${dataSet.dataSetId} ${dataSet.cdn ? 'CDN' : ''} ${dataSet.pdp.serviceURL} ${dataSet.pdpEndEpoch > 0n ? `Terminating at epoch ${dataSet.pdpEndEpoch}` : ''} ${dataSet.live ? 'Live' : ''} ${dataSet.managed ? 'Managed' : ''}` ) - console.log(dataSet) + // console.log(dataSet) }) + p.log.warn(`Block number: ${blockNumber}`) } catch (error) { spinner.stop() console.error(error) diff --git a/examples/cli/src/commands/pieces-upload.ts b/examples/cli/src/commands/pieces-upload.ts new file mode 100644 index 00000000..f2094ebd --- /dev/null +++ b/examples/cli/src/commands/pieces-upload.ts @@ -0,0 +1,58 @@ +import path from 'node:path' +import * as p from '@clack/prompts' +import * as SP from '@filoz/synapse-core/sp' +import { upload } from '@filoz/synapse-core/warm-storage' +import { openFile } from '@remix-run/fs' +import { type Command, command } from 'cleye' +import { privateKeyClient } from '../client.ts' +import { globalFlags } from '../flags.ts' +import { hashLink } from '../utils.ts' + +export const piecesUpload: Command = command( + { + name: 'pieces-upload', + parameters: ['', ''], + description: 'Upload a file to a data set', + flags: { + ...globalFlags, + cdn: { + type: Boolean, + description: 'Enable CDN', + default: false, + }, + }, + help: { + description: 'Upload a file to a data set', + }, + }, + async (argv) => { + const { client, chain } = privateKeyClient(argv.flags.chain) + const spinner = p.spinner() + + const filePath = argv._.path + const absolutePath = path.resolve(filePath) + const file = openFile(absolutePath) + + spinner.start(`Uploading file ${absolutePath}...`) + try { + const result = await upload(client, { + dataSetId: BigInt(argv._.dataSetId), + data: [file], + onEvent: (event, data) => { + spinner.message(`${event} ${data.pieceCid.toString()}`) + }, + }) + + spinner.message( + `Waiting for tx ${hashLink(result.txHash, chain)} to be mined...` + ) + const pieces = await SP.waitForAddPiecesStatus(result) + spinner.stop(`File uploaded ${pieces.confirmedPieceIds.join(',')}`) + } catch (error) { + spinner.stop() + p.log.error((error as Error).message) + p.outro('Please try again') + return + } + } +) diff --git a/examples/cli/src/commands/upload-dataset.ts b/examples/cli/src/commands/upload-dataset.ts index 1fd0df5a..44d33ae6 100644 --- a/examples/cli/src/commands/upload-dataset.ts +++ b/examples/cli/src/commands/upload-dataset.ts @@ -72,7 +72,7 @@ export const uploadDataset: Command = command( ], }) - await SP.pollForDataSetCreationStatus(rsp) + await SP.waitForDataSetCreationStatus(rsp) spinner.stop(`File uploaded ${pieceCid}`) } catch (error) { spinner.stop() diff --git a/examples/cli/src/flags.ts b/examples/cli/src/flags.ts index 9800a32d..8c8aee1a 100644 --- a/examples/cli/src/flags.ts +++ b/examples/cli/src/flags.ts @@ -1,7 +1,8 @@ const possibleChains = [314159, 314] as const type Chains = (typeof possibleChains)[number] -const Chain = (chain: Chains) => { +const Chain = (chainStr: string) => { + const chain = Number(chainStr) as Chains if (!possibleChains.includes(chain)) { throw new Error( `Invalid chain: ${chain}. Must be one of: ${possibleChains.join(', ')}` @@ -16,4 +17,9 @@ export const globalFlags = { description: 'The chain to use. 314159 for calibration, 314 for mainnet', default: 314159, }, + debug: { + type: Boolean, + description: 'Enable debug mode', + default: false, + }, } diff --git a/examples/cli/src/index.ts b/examples/cli/src/index.ts index a71f9add..a74e6f94 100755 --- a/examples/cli/src/index.ts +++ b/examples/cli/src/index.ts @@ -1,13 +1,15 @@ #!/usr/bin/env node import { cli } from 'cleye' -import { datasetTerminate } from './commands/dataset-terminate.ts' import { datasets } from './commands/datasets.ts' +import { datasetsCreate } from './commands/datasets-create.ts' +import { datasetsTerminate } from './commands/datasets-terminate.ts' import { deposit } from './commands/deposit.ts' import { fund } from './commands/fund.ts' import { init } from './commands/init.ts' import { pay } from './commands/pay.ts' import { pieces } from './commands/pieces.ts' +import { piecesUpload } from './commands/pieces-upload.ts' import { upload } from './commands/upload.ts' import { uploadDataset } from './commands/upload-dataset.ts' @@ -22,8 +24,10 @@ const argv = cli({ deposit, upload, datasets, - datasetTerminate, + datasetsTerminate, + datasetsCreate, pieces, + piecesUpload, uploadDataset, ], }) diff --git a/examples/cli/src/utils.ts b/examples/cli/src/utils.ts new file mode 100644 index 00000000..4eb5ad72 --- /dev/null +++ b/examples/cli/src/utils.ts @@ -0,0 +1,16 @@ +import * as p from '@clack/prompts' +import type { Chain } from '@filoz/synapse-core/chains' +import terminalLink from 'terminal-link' + +export function onCancel(message?: string) { + p.cancel(message ?? 'Operation cancelled.') + process.exit(0) +} + +export function hashLink(hash: string, chain: Chain) { + const link = terminalLink( + hash, + `${chain.blockExplorers?.default?.url}/tx/${hash}` + ) + return link +} diff --git a/package.json b/package.json index d7c6a077..8a379d58 100644 --- a/package.json +++ b/package.json @@ -39,7 +39,7 @@ "devEngines": { "runtime": { "name": "node", - "version": "^24.8.0", + "version": ">=24.8.0", "onFail": "download" } } diff --git a/packages/synapse-core/package.json b/packages/synapse-core/package.json index 4db809c7..6bdd16f8 100644 --- a/packages/synapse-core/package.json +++ b/packages/synapse-core/package.json @@ -211,11 +211,13 @@ "devDependencies": { "@biomejs/biome": "catalog:", "@types/assert": "^1.5.11", + "@types/chai": "^5.2.3", "@types/mocha": "catalog:", "@types/node": "catalog:", "@wagmi/cli": "^2.7.0", "abitype": "catalog:", "assert": "^2.1.0", + "chai": "^6.2.1", "mocha": "catalog:", "msw": "catalog:", "playwright-test": "^14.1.12", diff --git a/packages/synapse-core/src/chains.ts b/packages/synapse-core/src/chains.ts index f19a0312..740d0497 100644 --- a/packages/synapse-core/src/chains.ts +++ b/packages/synapse-core/src/chains.ts @@ -11,6 +11,7 @@ import type { Address, ChainContract, Chain as ViemChain } from 'viem' import * as Abis from './abis/index.ts' +import { UnsupportedChainError } from './errors/chains.ts' /** * Viem compatible chain interface with all the FOC contracts addresses and ABIs @@ -281,3 +282,21 @@ export function getChain(id?: number): Chain { throw new Error(`Chain with id ${id} not found`) } } + +/** + * Convert a viem chain to a filecoin chain. + * @param chain - The viem chain. + * @returns The filecoin chain. + * @throws Errors {@link asChain.ErrorType} + */ +export function asChain(chain: ViemChain): Chain { + if (chain.contracts && 'payments' in chain.contracts) { + return chain as Chain + } + throw new UnsupportedChainError(chain.id) +} + +// biome-ignore lint/style/noNamespace: namespaced types +export namespace asChain { + export type ErrorType = UnsupportedChainError +} diff --git a/packages/synapse-core/src/errors/index.ts b/packages/synapse-core/src/errors/index.ts index 0fc26f89..f700c495 100644 --- a/packages/synapse-core/src/errors/index.ts +++ b/packages/synapse-core/src/errors/index.ts @@ -13,3 +13,4 @@ export * from './chains.ts' export * from './erc20.ts' export * from './pay.ts' export * from './pdp.ts' +export * from './pull.ts' diff --git a/packages/synapse-core/src/errors/pdp.ts b/packages/synapse-core/src/errors/pdp.ts index b391a9b7..2498d5e7 100644 --- a/packages/synapse-core/src/errors/pdp.ts +++ b/packages/synapse-core/src/errors/pdp.ts @@ -29,18 +29,18 @@ export class CreateDataSetError extends SynapseError { } } -export class PollDataSetCreationStatusError extends SynapseError { - override name: 'PollDataSetCreationStatusError' = 'PollDataSetCreationStatusError' +export class WaitDataSetCreationStatusError extends SynapseError { + override name: 'WaitDataSetCreationStatusError' = 'WaitDataSetCreationStatusError' constructor(error: string) { const decodedError = decodePDPError(error) - super(`Failed to check data set creation status.`, { + super(`Failed to wait for data set creation status.`, { details: decodedError, }) } - static override is(value: unknown): value is PollDataSetCreationStatusError { - return isSynapseError(value) && value.name === 'PollDataSetCreationStatusError' + static override is(value: unknown): value is WaitDataSetCreationStatusError { + return isSynapseError(value) && value.name === 'WaitDataSetCreationStatusError' } } @@ -118,18 +118,18 @@ export class AddPiecesError extends SynapseError { } } -export class PollForAddPiecesStatusError extends SynapseError { - override name: 'PollForAddPiecesStatusError' = 'PollForAddPiecesStatusError' +export class WaitForAddPiecesStatusError extends SynapseError { + override name: 'WaitForAddPiecesStatusError' = 'WaitForAddPiecesStatusError' constructor(error: string) { const decodedError = decodePDPError(error) - super(`Failed to poll for add pieces status.`, { + super(`Failed to wait for add pieces status.`, { details: decodedError, }) } - static override is(value: unknown): value is PollForAddPiecesStatusError { - return isSynapseError(value) && value.name === 'PollForAddPiecesStatusError' + static override is(value: unknown): value is WaitForAddPiecesStatusError { + return isSynapseError(value) && value.name === 'WaitForAddPiecesStatusError' } } @@ -153,7 +153,25 @@ export class InvalidUploadSizeError extends SynapseError { constructor(size: number) { super(`Invalid upload size.`, { - details: `Size ${size} bytes is below minimum allowed size of ${SIZE_CONSTANTS.MIN_UPLOAD_SIZE} bytes or exceeds maximum allowed size of ${SIZE_CONSTANTS.MAX_UPLOAD_SIZE} bytes`, + details: `Size ${size} bytes is below minimum allowed size of ${SIZE_CONSTANTS.MIN_UPLOAD_SIZE} bytes or exceeds maximum allowed size of ${SIZE_CONSTANTS.MAX_UPLOAD_SIZE} bytes (1 GiB with fr32 expansion)`, }) } + + static override is(value: unknown): value is InvalidUploadSizeError { + return isSynapseError(value) && value.name === 'InvalidUploadSizeError' + } +} + +export class DownloadPieceError extends SynapseError { + override name: 'DownloadPieceError' = 'DownloadPieceError' + + constructor(error: string) { + super(`Failed to download piece.`, { + details: error, + }) + } + + static override is(value: unknown): value is DownloadPieceError { + return isSynapseError(value) && value.name === 'DownloadPieceError' + } } diff --git a/packages/synapse-core/src/errors/pull.ts b/packages/synapse-core/src/errors/pull.ts new file mode 100644 index 00000000..99b37572 --- /dev/null +++ b/packages/synapse-core/src/errors/pull.ts @@ -0,0 +1,17 @@ +import { decodePDPError } from '../utils/decode-pdp-errors.ts' +import { isSynapseError, SynapseError } from './base.ts' + +export class PullError extends SynapseError { + override name: 'PullError' = 'PullError' + + constructor(error: string) { + const decodedError = decodePDPError(error) + super(`Failed to pull pieces from storage provider.`, { + details: decodedError, + }) + } + + static override is(value: unknown): value is PullError { + return isSynapseError(value) && value.name === 'PullError' + } +} diff --git a/packages/synapse-core/src/errors/warm-storage.ts b/packages/synapse-core/src/errors/warm-storage.ts index 7a53c183..1fcd6965 100644 --- a/packages/synapse-core/src/errors/warm-storage.ts +++ b/packages/synapse-core/src/errors/warm-storage.ts @@ -10,3 +10,14 @@ export class DataSetNotFoundError extends SynapseError { return isSynapseError(value) && value.name === 'DataSetNotFoundError' } } + +export class AtLeastOnePieceRequiredError extends SynapseError { + override name: 'AtLeastOnePieceRequiredError' = 'AtLeastOnePieceRequiredError' + constructor() { + super('At least one piece must be provided') + } + + static override is(value: unknown): value is AtLeastOnePieceRequiredError { + return isSynapseError(value) && value.name === 'AtLeastOnePieceRequiredError' + } +} diff --git a/packages/synapse-core/src/index.ts b/packages/synapse-core/src/index.ts index 202d2205..26563f30 100644 --- a/packages/synapse-core/src/index.ts +++ b/packages/synapse-core/src/index.ts @@ -18,6 +18,7 @@ export * as pay from './pay/index.ts' export * as piece from './piece.ts' export * as sessionKey from './session-key/index.ts' export * as curio from './sp.ts' +export * as spFetch from './sp-fetch.ts' export * as typedData from './typed-data/index.ts' export * as usdfc from './usdfc.ts' export * as utils from './utils/index.ts' diff --git a/packages/synapse-core/src/mocks/index.ts b/packages/synapse-core/src/mocks/index.ts index 9c1f8502..f1da29ec 100644 --- a/packages/synapse-core/src/mocks/index.ts +++ b/packages/synapse-core/src/mocks/index.ts @@ -13,3 +13,4 @@ export * from './common.ts' export * from './jsonrpc/index.ts' export { mockServiceProviderRegistry } from './jsonrpc/service-registry.ts' export * as pdp from './pdp.ts' +export * as pull from './pull.ts' diff --git a/packages/synapse-core/src/mocks/jsonrpc/constants.ts b/packages/synapse-core/src/mocks/jsonrpc/constants.ts index 4b652699..d7e75cc8 100644 --- a/packages/synapse-core/src/mocks/jsonrpc/constants.ts +++ b/packages/synapse-core/src/mocks/jsonrpc/constants.ts @@ -1,9 +1,9 @@ -import { type Address, zeroAddress } from 'viem' +import { type Address, type Hex, zeroAddress } from 'viem' import { calibration, mainnet } from '../../chains.ts' export const PRIVATE_KEYS = { - key1: '0x1234567890123456789012345678901234567890123456789012345678901234', - key2: '0x4123456789012345678901234567890123456789012345678901234567890123', + key1: '0x1234567890123456789012345678901234567890123456789012345678901234' as Hex, + key2: '0x4123456789012345678901234567890123456789012345678901234567890123' as Hex, } export const ADDRESSES = { client1: '0x2e988A386a799F506693793c6A5AF6B54dfAaBfB' as Address, diff --git a/packages/synapse-core/src/mocks/pdp.ts b/packages/synapse-core/src/mocks/pdp.ts index 6922a596..0a55522c 100644 --- a/packages/synapse-core/src/mocks/pdp.ts +++ b/packages/synapse-core/src/mocks/pdp.ts @@ -7,7 +7,7 @@ import assert from 'assert' import { HttpResponse, http } from 'msw' import { decodeAbiParameters, type Hex } from 'viem' -import type { AddPiecesRequest } from '../sp.ts' +import type { addPieces } from '../sp.ts' export interface PDPMockOptions { baseUrl?: string @@ -332,7 +332,7 @@ export function addPiecesWithMetadataCapture( ) { const baseUrl = options.baseUrl ?? 'http://pdp.local' - return http.post<{ id: string }, AddPiecesRequest>( + return http.post<{ id: string }, addPieces.RequestBody>( `${baseUrl}/pdp/data-sets/:id/pieces`, async ({ params, request }) => { if (params.id !== dataSetId.toString()) { diff --git a/packages/synapse-core/src/mocks/pull.ts b/packages/synapse-core/src/mocks/pull.ts new file mode 100644 index 00000000..1bbea495 --- /dev/null +++ b/packages/synapse-core/src/mocks/pull.ts @@ -0,0 +1,155 @@ +/** + * MSW HTTP handlers for SP Pull endpoints + * + * These handlers can be used to mock SP-to-SP pull HTTP responses in tests + */ + +import { HttpResponse, http } from 'msw' +import type { PullPieceInput, PullResponse, PullStatus } from '../pull.ts' + +export interface PullMockOptions { + baseUrl?: string + debug?: boolean +} + +export interface PullRequestCapture { + extraData: string + recordKeeper: string + dataSetId?: number + pieces: PullPieceInput[] +} + +/** + * Creates a handler for the fetch pieces endpoint that returns a fixed response + */ +export function fetchPiecesHandler(response: PullResponse, options: PullMockOptions = {}) { + const baseUrl = options.baseUrl ?? 'http://pdp.local' + + return http.post(`${baseUrl}/pdp/piece/pull`, async () => { + if (options.debug) { + console.debug('SP Pull Mock: returning response', response) + } + return HttpResponse.json(response, { status: 200 }) + }) +} + +/** + * Creates a handler that captures the request body and returns a response + */ +export function fetchPiecesWithCaptureHandler( + response: PullResponse, + captureCallback: (request: PullRequestCapture) => void, + options: PullMockOptions = {} +) { + const baseUrl = options.baseUrl ?? 'http://pdp.local' + + return http.post(`${baseUrl}/pdp/piece/pull`, async ({ request }) => { + const body = (await request.json()) as PullRequestCapture + + captureCallback(body) + + if (options.debug) { + console.debug('SP Pull Mock: captured request', body) + } + + return HttpResponse.json(response, { status: 200 }) + }) +} + +/** + * Creates a handler that returns an error response + */ +export function fetchPiecesErrorHandler(errorMessage: string, statusCode = 500, options: PullMockOptions = {}) { + const baseUrl = options.baseUrl ?? 'http://pdp.local' + + return http.post(`${baseUrl}/pdp/piece/pull`, async () => { + if (options.debug) { + console.debug('SP Pull Mock: returning error', errorMessage) + } + return HttpResponse.text(errorMessage, { status: statusCode }) + }) +} + +/** + * Creates a handler that simulates polling, returns pending status N times, + * then returns the final response + */ +export function fetchPiecesPollingHandler( + pendingCount: number, + finalResponse: PullResponse, + options: PullMockOptions = {} +) { + const baseUrl = options.baseUrl ?? 'http://pdp.local' + let callCount = 0 + + return http.post(`${baseUrl}/pdp/piece/pull`, async () => { + callCount++ + + if (options.debug) { + console.debug(`SP Fetch Mock: poll attempt ${callCount}/${pendingCount + 1}`) + } + + if (callCount <= pendingCount) { + // Return pending status + const pendingResponse: PullResponse = { + status: 'pending', + pieces: finalResponse.pieces.map((p) => ({ + pieceCid: p.pieceCid, + status: 'pending' as PullStatus, + })), + } + return HttpResponse.json(pendingResponse, { status: 200 }) + } + + // Return final response + return HttpResponse.json(finalResponse, { status: 200 }) + }) +} + +/** + * Creates a handler that simulates a progression through statuses + */ +export function fetchPiecesProgressionHandler( + statusProgression: PullStatus[], + pieces: Array<{ pieceCid: string }>, + options: PullMockOptions = {} +) { + const baseUrl = options.baseUrl ?? 'http://pdp.local' + let callCount = 0 + + return http.post(`${baseUrl}/pdp/piece/pull`, async () => { + const statusIndex = Math.min(callCount, statusProgression.length - 1) + const currentStatus = statusProgression[statusIndex] + callCount++ + + if (options.debug) { + console.debug(`SP Fetch Mock: returning status ${currentStatus} (call ${callCount})`) + } + + const response: PullResponse = { + status: currentStatus, + pieces: pieces.map((p) => ({ + pieceCid: p.pieceCid, + status: currentStatus, + })), + } + + return HttpResponse.json(response, { status: 200 }) + }) +} + +/** + * Helper to create a complete PullResponse + */ +export function createPullResponse( + status: PullStatus, + pieces: Array<{ pieceCid: string; status?: PullStatus }> +): PullResponse { + return { + status, + pieces: pieces.map((p) => ({ + pieceCid: p.pieceCid, + status: p.status ?? status, + })), + } +} diff --git a/packages/synapse-core/src/piece.ts b/packages/synapse-core/src/piece.ts index 25be9e41..16b03ff4 100644 --- a/packages/synapse-core/src/piece.ts +++ b/packages/synapse-core/src/piece.ts @@ -19,24 +19,11 @@ import * as Raw from 'multiformats/codecs/raw' import * as Digest from 'multiformats/hashes/digest' import * as Link from 'multiformats/link' import { type Hex, hexToBytes } from 'viem' +import { DownloadPieceError } from './errors/pdp.ts' const FIL_COMMITMENT_UNSEALED = 0xf101 const SHA2_256_TRUNC254_PADDED = 0x1012 -/** - * Maximum upload size currently supported by PDP servers. - * - * 1 GiB adjusted for fr32 expansion: 1 GiB * (127/128) = 1,065,353,216 bytes - * - * Fr32 encoding adds 2 bits of padding per 254 bits of data, resulting in 128 bytes - * of padded data for every 127 bytes of raw data. - * - * Note: While it's technically possible to upload pieces this large as Uint8Array, - * streaming via AsyncIterable is strongly recommended for non-trivial sizes. - * See SIZE_CONSTANTS.MAX_UPLOAD_SIZE in synapse-sdk for detailed guidance. - */ -export const MAX_UPLOAD_SIZE = 1_065_353_216 // 1 GiB * 127/128 - /** * PieceCID - A constrained CID type for Piece Commitments. * This is implemented as a Link type which is made concrete by a CID. A @@ -345,27 +332,6 @@ export function createPieceCIDStream(): { } } -/** - * Convert Uint8Array to async iterable with optimal chunk size. - * - * Uses 2048-byte chunks for better hasher performance (determined by manual - * testing with Node.js; this will likely vary by environment). This may not be - * optimal for the streaming upload case, so further tuning may be needed to - * find the best balance between hasher performance and upload chunk size. - * - * @param data - Uint8Array to convert - * @param chunkSize - Size of chunks (default 2048) - * @returns AsyncIterable yielding chunks - */ -export async function* uint8ArrayToAsyncIterable( - data: Uint8Array, - chunkSize: number = 2048 -): AsyncIterable { - for (let i = 0; i < data.length; i += chunkSize) { - yield data.subarray(i, i + chunkSize) - } -} - /** * Convert a hex representation of a PieceCID to a PieceCID object * @@ -412,16 +378,16 @@ export async function downloadAndValidate( // Parse and validate the expected PieceCID const parsedPieceCid = asPieceCID(expectedPieceCid) if (parsedPieceCid == null) { - throw new Error(`Invalid PieceCID: ${String(expectedPieceCid)}`) + throw new DownloadPieceError(`Invalid PieceCID: ${String(expectedPieceCid)}`) } // Check response is OK if (!response.ok) { - throw new Error(`Download failed: ${response.status} ${response.statusText}`) + throw new DownloadPieceError(`Download failed: ${response.status} ${response.statusText}`) } if (response.body == null) { - throw new Error('Response body is null') + throw new DownloadPieceError('Response body is null') } // Create PieceCID calculation stream @@ -451,19 +417,19 @@ export async function downloadAndValidate( } if (chunks.length === 0) { - throw new Error('Response body is empty') + throw new DownloadPieceError('Response body is empty') } // Get the calculated PieceCID const calculatedPieceCid = getPieceCID() if (calculatedPieceCid == null) { - throw new Error('Failed to calculate PieceCID from stream') + throw new DownloadPieceError('Failed to calculate PieceCID from stream') } // Verify the PieceCID if (calculatedPieceCid.toString() !== parsedPieceCid.toString()) { - throw new Error( + throw new DownloadPieceError( `PieceCID verification failed. Expected: ${String(parsedPieceCid)}, Got: ${String(calculatedPieceCid)}` ) } diff --git a/packages/synapse-core/src/pull.ts b/packages/synapse-core/src/pull.ts new file mode 100644 index 00000000..32d0cc0c --- /dev/null +++ b/packages/synapse-core/src/pull.ts @@ -0,0 +1,245 @@ +/** + * SP-to-SP Piece Pull Operations + * + * Provides functionality to pull pieces from external storage providers. + * Uses Curio's POST /pdp/piece/pull endpoint which is idempotent - + * repeated calls with the same extraData return the current status + * rather than creating duplicate requests. + * + * @example + * ```ts + * import * as Pull from '@filoz/synapse-core/pull' + * ``` + * + * @module pull + */ + +import { type AbortError, HttpError, type NetworkError, request, type TimeoutError } from 'iso-web/http' +import type { Address, Hex } from 'viem' +import { PullError } from './errors/pull.ts' +import { RETRY_CONSTANTS } from './utils/constants.ts' + +let TIMEOUT = RETRY_CONSTANTS.MAX_RETRY_TIME +export const RETRIES = RETRY_CONSTANTS.RETRIES +export const FACTOR = RETRY_CONSTANTS.FACTOR +export const MIN_TIMEOUT = RETRY_CONSTANTS.DELAY_TIME + +// For testing purposes +export function setTimeout(timeout: number) { + TIMEOUT = timeout +} +export function resetTimeout() { + TIMEOUT = RETRY_CONSTANTS.MAX_RETRY_TIME +} + +export { AbortError, NetworkError, TimeoutError } from 'iso-web/http' + +/** + * Status of a pull operation or individual piece. + * + * Status progression: + * - `pending`: Piece is queued but download hasn't started + * - `inProgress`: Download task is actively running (first attempt) + * - `retrying`: Download task is running after one or more failures + * - `complete`: Piece successfully downloaded and verified + * - `failed`: Piece permanently failed after exhausting retries + * + * Overall response status reflects the worst-case across all pieces: + * failed > retrying > inProgress > pending > complete + */ +export type PullStatus = 'pending' | 'inProgress' | 'retrying' | 'complete' | 'failed' + +/** + * Input piece for a pull request. + */ +export type PullPieceInput = { + /** PieceCIDv2 format (encodes both CommP and raw size) */ + pieceCid: string + /** HTTPS URL to pull the piece from (must end in /piece/{pieceCid}) */ + sourceUrl: string +} + +/** + * Status of a single piece in a pull response. + */ +export type PullPieceStatus = { + /** PieceCIDv2 of the piece */ + pieceCid: string + /** Current status of this piece */ + status: PullStatus +} + +/** + * Response from a pull request. + */ +export type PullResponse = { + /** Overall status (worst-case across all pieces) */ + status: PullStatus + /** Per-piece status */ + pieces: PullPieceStatus[] +} + +// biome-ignore lint/style/noNamespace: namespaced types +export namespace fetchPieces { + /** + * Options for pulling pieces from external SPs. + */ + export type OptionsType = { + /** The endpoint of the PDP API. */ + endpoint: string + /** The record keeper contract address (e.g., FWSS). */ + recordKeeper: Address + /** EIP-712 signed extraData for authorization. */ + extraData: Hex + /** Optional target dataset ID (omit or 0n to create new). */ + dataSetId?: bigint + /** Pieces to pull with their source URLs. */ + pieces: PullPieceInput[] + /** Optional AbortSignal to cancel the request. */ + signal?: AbortSignal + } + + export type ReturnType = PullResponse + + export type ErrorType = PullError | TimeoutError | NetworkError | AbortError + + export type RequestBody = { + extraData: Hex + recordKeeper: Address + pieces: PullPieceInput[] + dataSetId?: number + } +} + +/** + * Build the JSON request body for a pull request. + */ +function buildRequestBody(options: fetchPieces.OptionsType): string { + const body: fetchPieces.RequestBody = { + extraData: options.extraData, + recordKeeper: options.recordKeeper, + pieces: options.pieces, + } + + // Only include dataSetId if specified and non-zero + if (options.dataSetId != null && options.dataSetId > 0n) { + body.dataSetId = Number(options.dataSetId) + } + + return JSON.stringify(body) +} + +/** + * Initiate a piece pull request or get status of an existing one. + * + * POST /pdp/piece/pull + * + * This endpoint is idempotent - calling with the same extraData returns + * the status of the existing pull rather than creating duplicates. + * This allows safe retries and status polling using the same request. + * + * @param options - {@link fetchPieces.OptionsType} + * @returns The current status of the pull operation. {@link fetchPieces.ReturnType} + * @throws Errors {@link fetchPieces.ErrorType} + */ +export async function fetchPieces(options: fetchPieces.OptionsType): Promise { + const response = await request.post(new URL('pdp/piece/pull', options.endpoint), { + body: buildRequestBody(options), + headers: { + 'Content-Type': 'application/json', + }, + timeout: TIMEOUT, + signal: options.signal, + }) + + if (response.error) { + if (HttpError.is(response.error)) { + throw new PullError(await response.error.response.text()) + } + throw response.error + } + + return (await response.result.json()) as fetchPieces.ReturnType +} + +// biome-ignore lint/style/noNamespace: namespaced types +export namespace waitForFetchStatus { + /** + * Options for polling pull status. + */ + export type OptionsType = fetchPieces.OptionsType & { + /** Callback invoked on each poll with current status. */ + onStatus?: (response: PullResponse) => void + /** Minimum time between poll attempts in milliseconds (default: 4000). */ + minTimeout?: number + } + + export type ReturnType = PullResponse + + export type ErrorType = PullError | TimeoutError | NetworkError | AbortError +} + +/** + * Wait for pull completion. + * + * Repeatedly calls the pull endpoint until all pieces are complete or any piece fails. + * Since the endpoint is idempotent, this effectively polls for status updates. + * + * @param options - {@link waitForFetchStatus.OptionsType} + * @returns The final status when complete or failed. {@link waitForFetchStatus.ReturnType} + * @throws Errors {@link waitForFetchStatus.ErrorType} + */ +export async function waitForFetchStatus( + options: waitForFetchStatus.OptionsType +): Promise { + const url = new URL('pdp/piece/pull', options.endpoint) + const body = buildRequestBody(options) + const headers = { 'Content-Type': 'application/json' } + + // Custom fetch that creates a fresh Request each time to avoid body consumption issues + // (iso-web creates Request once and reuses it, but POST bodies can only be read once) + const fetchWithFreshRequest: typeof globalThis.fetch = (input, init) => { + // iso-web passes the Request object as input, extract signal from it + const signal = input instanceof Request ? input.signal : init?.signal + return globalThis.fetch(url, { method: 'POST', body, headers, signal }) + } + + const response = await request.post(url, { + body, + headers, + fetch: fetchWithFreshRequest, + async onResponse(response) { + if (response.ok) { + const data = (await response.clone().json()) as PullResponse + + // Invoke status callback if provided + if (options.onStatus) { + options.onStatus(data) + } + + // Stop polling when complete or failed + if (data.status === 'complete' || data.status === 'failed') { + return response + } + throw new Error('Pull not complete') + } + }, + retry: { + shouldRetry: (ctx) => ctx.error.message === 'Pull not complete', + retries: RETRIES, + factor: FACTOR, + minTimeout: options.minTimeout ?? MIN_TIMEOUT, + }, + timeout: TIMEOUT, + signal: options.signal, + }) + + if (response.error) { + if (HttpError.is(response.error)) { + throw new PullError(await response.error.response.text()) + } + throw response.error + } + + return (await response.result.json()) as waitForFetchStatus.ReturnType +} diff --git a/packages/synapse-core/src/sp.ts b/packages/synapse-core/src/sp.ts index 8498f4c2..2d0d4d1b 100644 --- a/packages/synapse-core/src/sp.ts +++ b/packages/synapse-core/src/sp.ts @@ -9,7 +9,7 @@ * @module sp */ -import { HttpError, request, TimeoutError } from 'iso-web/http' +import { type AbortError, HttpError, type NetworkError, request, TimeoutError } from 'iso-web/http' import type { ToString } from 'multiformats' import type { Simplify } from 'type-fest' import { type Address, type Hex, isHex } from 'viem' @@ -17,93 +17,63 @@ import { AddPiecesError, CreateDataSetError, DeletePieceError, + DownloadPieceError, FindPieceError, GetDataSetError, InvalidUploadSizeError, LocationHeaderError, - PollDataSetCreationStatusError, - PollForAddPiecesStatusError, PostPieceError, UploadPieceError, + WaitDataSetCreationStatusError, + WaitForAddPiecesStatusError, } from './errors/pdp.ts' import type { PieceCID } from './piece.ts' import * as Piece from './piece.ts' -import { SIZE_CONSTANTS } from './utils/constants.ts' -import { createPieceUrl } from './utils/piece-url.ts' +import type * as TypedData from './typed-data/index.ts' +import { RETRY_CONSTANTS, SIZE_CONSTANTS } from './utils/constants.ts' +import { createPieceUrl, createPieceUrlPDP } from './utils/piece-url.ts' +import { asReadableStream } from './utils/streams.ts' -let TIMEOUT = 1000 * 60 * 5 // 5 minutes -export const RETRIES = Infinity -export const FACTOR = 1 -export const MIN_TIMEOUT = 4000 // interval between retries in milliseconds +let TIMEOUT = RETRY_CONSTANTS.MAX_RETRY_TIME +export const RETRIES = RETRY_CONSTANTS.RETRIES +export const FACTOR = RETRY_CONSTANTS.FACTOR +export const MIN_TIMEOUT = RETRY_CONSTANTS.DELAY_TIME // Just for testing purposes export function setTimeout(timeout: number) { TIMEOUT = timeout } +export function resetTimeout() { + TIMEOUT = RETRY_CONSTANTS.MAX_RETRY_TIME +} -/** - * Convert AsyncIterable to ReadableStream with broad browser compatibility. - * Provides fallback for environments where ReadableStream.from() is not available. - * - * Uses pull-based streaming to implement proper backpressure and ensure all - * chunks are consumed in order. - */ -function asyncIterableToReadableStream(iterable: AsyncIterable): ReadableStream { - if (!isAsyncIterable(iterable)) { - throw new Error('Input must be an AsyncIterable') +export { AbortError, NetworkError, TimeoutError } from 'iso-web/http' + +// biome-ignore lint/style/noNamespace: namespaced types +export namespace createDataSet { + /** + * The options for the create data set on PDP API. + */ + export type OptionsType = { + /** The endpoint of the PDP API. */ + endpoint: string + /** The address of the record keeper. */ + recordKeeper: Address + /** The extra data for the create data set. */ + extraData: Hex } - // Use native ReadableStream.from() if available - // See https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream/from_static for latest - // support matrix, as of late 2025 this is still "Experimental" - if (typeof ReadableStream.from === 'function') { - return ReadableStream.from(iterable) + export type ReturnType = { + txHash: Hex + statusUrl: string } - // Fallback implementation using pull-based streaming - const iterator = iterable[Symbol.asyncIterator]() + export type ErrorType = CreateDataSetError | LocationHeaderError | TimeoutError | NetworkError | AbortError - return new ReadableStream({ - async pull(controller) { - try { - const { value, done } = await iterator.next() - if (done) { - controller.close() - } else { - controller.enqueue(value) - } - } catch (error) { - // run cleanup on internal errors - if (iterator.return) { - try { - await iterator.return() - } catch { - // safely ignore - } - } - controller.error(error) - } - }, - async cancel() { - // Clean up iterator if stream is cancelled - if (iterator.return) { - await iterator.return() - } - }, - }) -} - -/** - * The options for the create data set on PDP API. - * - * @param endpoint - The endpoint of the PDP API. - * @param recordKeeper - The address of the record keeper. - * @param extraData - The extra data for the create data set. - */ -export type PDPCreateDataSetOptions = { - endpoint: string - recordKeeper: Address - extraData: Hex + export type RequestBody = { + recordKeeper: Address + extraData: Hex + } } /** @@ -111,13 +81,11 @@ export type PDPCreateDataSetOptions = { * * POST /pdp/data-sets * - * @param options - The options for the create data set on PDP API. - * @param options.endpoint - The endpoint of the PDP API. - * @param options.recordKeeper - The address of the record keeper. - * @param options.extraData - The extra data for the create data set. - * @returns The response from the create data set on PDP API. + * @param options - {@link createDataSet.OptionsType} + * @returns Transaction hash and status URL. {@link createDataSet.ReturnType} + * @throws Errors {@link createDataSet.ErrorType} */ -export async function createDataSet(options: PDPCreateDataSetOptions) { +export async function createDataSet(options: createDataSet.OptionsType): Promise { // Send the create data set message to the PDP const response = await request.post(new URL(`pdp/data-sets`, options.endpoint), { body: JSON.stringify({ @@ -149,10 +117,6 @@ export async function createDataSet(options: PDPCreateDataSetOptions) { } } -export type PollForDataSetCreationStatusOptions = { - statusUrl: string -} - export type DataSetCreatedResponse = | { createMessageHash: Hex @@ -172,20 +136,30 @@ export type DataSetCreateSuccess = { dataSetId: number } +// biome-ignore lint/style/noNamespace: namespaced types +export namespace waitForDataSetCreationStatus { + export type OptionsType = { + statusUrl: string + } + export type ReturnType = DataSetCreateSuccess + export type ErrorType = WaitDataSetCreationStatusError | TimeoutError | NetworkError | AbortError +} /** - * Poll for the data set creation status. + * Wait for the data set creation status. * * GET /pdp/data-sets/created({txHash}) * - * @param options - The options for the poll for data set creation status. - * @param options.statusUrl - The status URL of the data set creation. - * @returns The data set creation status. + * @param options - {@link waitForDataSetCreationStatus.OptionsType} + * @returns Status {@link waitForDataSetCreationStatus.ReturnType} + * @throws Errors {@link waitForDataSetCreationStatus.ErrorType} */ -export async function pollForDataSetCreationStatus(options: PollForDataSetCreationStatusOptions) { - const response = await request.json.get(options.statusUrl, { +export async function waitForDataSetCreationStatus( + options: waitForDataSetCreationStatus.OptionsType +): Promise { + const response = await request.json.get(options.statusUrl, { async onResponse(response) { if (response.ok) { - const data = (await response.clone().json()) as DataSetCreatedResponse + const data = (await response.clone().json()) as waitForDataSetCreationStatus.ReturnType if (data.dataSetCreated) { return response @@ -204,19 +178,41 @@ export async function pollForDataSetCreationStatus(options: PollForDataSetCreati }) if (response.error) { if (HttpError.is(response.error)) { - throw new PollDataSetCreationStatusError(await response.error.response.text()) + throw new WaitDataSetCreationStatusError(await response.error.response.text()) } throw response.error } - return response.result as DataSetCreateSuccess -} - -export type PDPCreateDataSetAndAddPiecesOptions = { - endpoint: string - recordKeeper: Address - extraData: Hex - pieces: PieceCID[] + return response.result as waitForDataSetCreationStatus.ReturnType +} + +// biome-ignore lint/style/noNamespace: namespaced types +export namespace createDataSetAndAddPieces { + export type OptionsType = { + /** The endpoint of the PDP API. */ + endpoint: string + /** The address of the record keeper. */ + recordKeeper: Address + /** The extra data for the create data set and add pieces. */ + extraData: Hex + /** The pieces to add. */ + pieces: PieceCID[] + } + export type ReturnType = { + /** The transaction hash. */ + txHash: Hex + /** The status URL. */ + statusUrl: string + } + export type ErrorType = CreateDataSetError | LocationHeaderError | TimeoutError | NetworkError | AbortError + export type RequestBody = { + recordKeeper: Address + extraData: Hex + pieces: { + pieceCid: ToString + subPieces: { subPieceCid: ToString }[] + }[] + } } /** @@ -224,13 +220,13 @@ export type PDPCreateDataSetAndAddPiecesOptions = { * * POST /pdp/data-sets/create-and-add * - * @param options - The options for the create data set and add pieces to it on PDP API. - * @param options.endpoint - The endpoint of the PDP API. - * @param options.recordKeeper - The address of the record keeper. - * @param options.extraData - The extra data for the create data set. - * @returns The response from the create data set and add pieces to it on PDP API. + * @param options - {@link createDataSetAndAddPieces.OptionsType} + * @returns Hash and status URL {@link createDataSetAndAddPieces.ReturnType} + * @throws Errors {@link createDataSetAndAddPieces.ErrorType} */ -export async function createDataSetAndAddPieces(options: PDPCreateDataSetAndAddPiecesOptions) { +export async function createDataSetAndAddPieces( + options: createDataSetAndAddPieces.OptionsType +): Promise { // Send the create data set message to the PDP const response = await request.post(new URL(`pdp/data-sets/create-and-add`, options.endpoint), { body: JSON.stringify({ @@ -266,17 +262,6 @@ export async function createDataSetAndAddPieces(options: PDPCreateDataSetAndAddP } } -export type GetDataSetOptions = { - endpoint: string - dataSetId: bigint -} - -export type GetDataSetResponse = { - id: number - nextChallengeEpoch: number - pieces: SPPiece[] -} - export type SPPiece = { pieceCid: string pieceId: number @@ -284,18 +269,33 @@ export type SPPiece = { subPieceOffset: number } +// biome-ignore lint/style/noNamespace: namespaced types +export namespace getDataSet { + export type OptionsType = { + /** The endpoint of the PDP API. */ + endpoint: string + /** The ID of the data set. */ + dataSetId: bigint + } + export type ReturnType = { + id: number + nextChallengeEpoch: number + pieces: SPPiece[] + } + export type ErrorType = GetDataSetError | TimeoutError | NetworkError | AbortError +} + /** * Get a data set from the PDP API. * * GET /pdp/data-sets/{dataSetId} * - * @param options - The options for the get data set from the PDP API. - * @param options.endpoint - The endpoint of the PDP API. - * @param options.dataSetId - The ID of the data set. - * @returns The data set from the PDP API. + * @param options - {@link getDataSet.OptionsType} + * @returns The data set from the PDP API. {@link getDataSet.ReturnType} + * @throws Errors {@link getDataSet.ErrorType} */ -export async function getDataSet(options: GetDataSetOptions) { - const response = await request.json.get( +export async function getDataSet(options: getDataSet.OptionsType): Promise { + const response = await request.json.get( new URL(`pdp/data-sets/${options.dataSetId}`, options.endpoint) ) if (response.error) { @@ -308,32 +308,40 @@ export async function getDataSet(options: GetDataSetOptions) { return response.result } -export type GetPiecesForDataSetOptions = { - endpoint: string - dataSetId: bigint - chainId: number - address: Address - cdn: boolean -} - export type SPPieceWithUrl = Simplify< SPPiece & { pieceUrl: string } > +// biome-ignore lint/style/noNamespace: namespaced types +export namespace getPiecesForDataSet { + export type OptionsType = { + /** The endpoint of the PDP API. */ + endpoint: string + /** The ID of the data set. */ + dataSetId: bigint + /** The chain ID. */ + chainId: number + /** The address of the user. */ + address: Address + /** Whether the CDN is enabled. */ + cdn: boolean + } + export type ReturnType = SPPieceWithUrl[] + export type ErrorType = GetDataSetError | TimeoutError | NetworkError | AbortError +} + /** * Get the pieces for a data set from the PDP API. * - * - * @param options - The options for the get pieces for data set. - * @param options.endpoint - The endpoint of the PDP API. - * @param options.dataSetId - The ID of the data set. - * @param options.chainId - The chain ID. - * @param options.address - The address of the user. - * @param options.cdn - Whether the CDN is enabled. + * @param options - {@link getPiecesForDataSet.OptionsType} + * @returns Pieces with URLs. {@link getPiecesForDataSet.ReturnType} + * @throws Errors {@link getPiecesForDataSet.ErrorType} */ -export async function getPiecesForDataSet(options: GetPiecesForDataSetOptions): Promise { +export async function getPiecesForDataSet( + options: getPiecesForDataSet.OptionsType +): Promise { const dataSet = await getDataSet(options) const pieces = dataSet.pieces.map((piece) => ({ pieceCid: piece.pieceCid, @@ -346,15 +354,17 @@ export async function getPiecesForDataSet(options: GetPiecesForDataSetOptions): return pieces } -export type UploadPieceOptions = { - endpoint: string - data: Uint8Array - pieceCid: PieceCID -} - -export type UploadPieceResponse = { - pieceCid: PieceCID - size: number +// biome-ignore lint/style/noNamespace: namespaced types +export namespace uploadPiece { + export type OptionsType = { + /** The endpoint of the PDP API. */ + endpoint: string + /** The data to upload. */ + data: Uint8Array + /** The piece CID to upload. */ + pieceCid: PieceCID + } + export type ErrorType = InvalidUploadSizeError | LocationHeaderError | TimeoutError | NetworkError | AbortError } /** @@ -362,12 +372,10 @@ export type UploadPieceResponse = { * * POST /pdp/piece * - * @param options - The options for the upload piece. - * @param options.endpoint - The endpoint of the PDP API. - * @param options.data - The data to upload. - * @returns The response from the upload piece. + * @param options - {@link uploadPiece.OptionsType} + * @throws Errors {@link uploadPiece.ErrorType} */ -export async function uploadPiece(options: UploadPieceOptions): Promise { +export async function uploadPiece(options: uploadPiece.OptionsType): Promise { const size = options.data.length if (size < SIZE_CONSTANTS.MIN_UPLOAD_SIZE || size > SIZE_CONSTANTS.MAX_UPLOAD_SIZE) { throw new InvalidUploadSizeError(size) @@ -431,6 +439,11 @@ export type UploadPieceStreamingOptions = { signal?: AbortSignal } +export type UploadPieceResponse = { + pieceCid: PieceCID + size: number +} + /** * Upload piece data using the 3-step CommP-last streaming protocol. * @@ -499,7 +512,7 @@ export async function uploadPieceStreaming(options: UploadPieceStreamingOptions) bytesUploaded += chunk.length // Check size limit - if (bytesUploaded > Piece.MAX_UPLOAD_SIZE) { + if (bytesUploaded > SIZE_CONSTANTS.MAX_UPLOAD_SIZE) { throw new InvalidUploadSizeError(bytesUploaded) } @@ -583,22 +596,27 @@ export async function uploadPieceStreaming(options: UploadPieceStreamingOptions) } } -export type FindPieceOptions = { - endpoint: string - pieceCid: PieceCID +// biome-ignore lint/style/noNamespace: namespaced types +export namespace findPiece { + export type OptionsType = { + /** The endpoint of the PDP API. */ + endpoint: string + /** The piece CID to find. */ + pieceCid: PieceCID + } + export type ReturnType = PieceCID + export type ErrorType = FindPieceError | TimeoutError | NetworkError | AbortError } - /** * Find a piece on the PDP API. * * GET /pdp/piece?pieceCid={pieceCid} * - * @param options - The options for the find piece. - * @param options.endpoint - The endpoint of the PDP API. - * @param options.pieceCid - The piece CID to find. - * @returns + * @param options - {@link findPiece.OptionsType} + * @returns Piece CID {@link findPiece.ReturnType} + * @throws Errors {@link findPiece.ErrorType} */ -export async function findPiece(options: FindPieceOptions): Promise { +export async function findPiece(options: findPiece.OptionsType): Promise { const { pieceCid, endpoint } = options const params = new URLSearchParams({ pieceCid: pieceCid.toString() }) @@ -624,21 +642,32 @@ export async function findPiece(options: FindPieceOptions): Promise { return Piece.parse(data.pieceCid) } -export type AddPiecesOptions = { - endpoint: string - dataSetId: bigint - pieces: PieceCID[] - extraData: Hex -} - -export type AddPiecesRequest = { - pieces: { - pieceCid: ToString - subPieces: { - subPieceCid: ToString +// biome-ignore lint/style/noNamespace: namespaced types +export namespace addPieces { + export type OptionsType = { + /** The endpoint of the PDP API. */ + endpoint: string + /** The ID of the data set. */ + dataSetId: bigint + /** The pieces to add. */ + pieces: PieceCID[] + /** The extra data for the add pieces. {@link TypedData.signAddPieces} */ + extraData: Hex + } + export type ReturnType = { + /** The transaction hash. */ + txHash: Hex + /** The status URL. */ + statusUrl: string + } + export type ErrorType = AddPiecesError | LocationHeaderError | TimeoutError | NetworkError | AbortError + export type RequestBody = { + pieces: { + pieceCid: ToString + subPieces: { subPieceCid: ToString }[] }[] - }[] - extraData: Hex + extraData: Hex + } } /** @@ -646,14 +675,11 @@ export type AddPiecesRequest = { * * POST /pdp/data-sets/{dataSetId}/pieces * - * @param options - The options for the add pieces. - * @param options.endpoint - The endpoint of the PDP API. - * @param options.dataSetId - The ID of the data set. - * @param options.pieces - The pieces to add. - * @param options.extraData - The extra data for the add pieces. - * @returns The response from the add pieces. + * @param options - {@link addPieces.OptionsType} + * @returns Hash and status URL {@link addPieces.ReturnType} + * @throws Errors {@link addPieces.ErrorType} */ -export async function addPieces(options: AddPiecesOptions) { +export async function addPieces(options: addPieces.OptionsType): Promise { const { endpoint, dataSetId, pieces, extraData } = options const response = await request.post(new URL(`pdp/data-sets/${dataSetId}/pieces`, endpoint), { headers: { @@ -717,25 +743,33 @@ export type AddPiecesSuccess = { txStatus: 'confirmed' } -export type PollForAddPiecesStatusOptions = { - statusUrl: string +// biome-ignore lint/style/noNamespace: namespaced types +export namespace waitForAddPiecesStatus { + export type OptionsType = { + statusUrl: string + } + export type ReturnType = AddPiecesSuccess + export type ErrorType = WaitForAddPiecesStatusError | TimeoutError | NetworkError | AbortError } /** - * Poll for the add pieces status. + * Wait for the add pieces status. * * GET /pdp/data-sets/{dataSetId}/pieces/added/{txHash} * - * @param options - The options for the poll for add pieces status. - * @param options.statusUrl - The status URL of the add pieces. - * @returns The add pieces status. + * TODO: add onEvent for txConfirmed + * + * @param options - {@link waitForAddPiecesStatus.OptionsType} + * @returns Status {@link waitForAddPiecesStatus.ReturnType} + * @throws Errors {@link waitForAddPiecesStatus.ErrorType} */ -export async function pollForAddPiecesStatus(options: PollForAddPiecesStatusOptions) { +export async function waitForAddPiecesStatus( + options: waitForAddPiecesStatus.OptionsType +): Promise { const response = await request.json.get(options.statusUrl, { async onResponse(response) { if (response.ok) { const data = (await response.clone().json()) as AddPiecesResponse - if (data.piecesAdded) { return response } @@ -748,36 +782,43 @@ export async function pollForAddPiecesStatus(options: PollForAddPiecesStatusOpti factor: FACTOR, minTimeout: MIN_TIMEOUT, }, - timeout: 1000 * 60 * 5, + timeout: TIMEOUT, }) if (response.error) { if (HttpError.is(response.error)) { - throw new PollForAddPiecesStatusError(await response.error.response.text()) + throw new WaitForAddPiecesStatusError(await response.error.response.text()) } throw response.error } return response.result as AddPiecesSuccess } -export type DeletePieceOptions = { - endpoint: string - dataSetId: bigint - pieceId: bigint - extraData: Hex -} - -export type DeletePieceResponse = { - txHash: Hex +// biome-ignore lint/style/noNamespace: namespaced types +export namespace deletePiece { + export type OptionsType = { + endpoint: string + dataSetId: bigint + pieceId: bigint + extraData: Hex + } + export type ReturnType = { + txHash: Hex + } + export type ErrorType = DeletePieceError | TimeoutError | NetworkError | AbortError } /** * Delete a piece from a data set on the PDP API. * * DELETE /pdp/data-sets/{dataSetId}/pieces/{pieceId} + * + * @param options - {@link deletePiece.OptionsType} + * @returns Hash of the delete operation {@link deletePiece.ReturnType} + * @throws Errors {@link deletePiece.ErrorType} */ -export async function deletePiece(options: DeletePieceOptions) { +export async function deletePiece(options: deletePiece.OptionsType): Promise { const { endpoint, dataSetId, pieceId, extraData } = options - const response = await request.json.delete( + const response = await request.json.delete( new URL(`pdp/data-sets/${dataSetId}/pieces/${pieceId}`, endpoint), { body: { extraData }, @@ -794,6 +835,15 @@ export async function deletePiece(options: DeletePieceOptions) { return response.result } +/** + * Ping the PDP API. + * + * GET /pdp/ping + * + * @param endpoint - The endpoint of the PDP API. + * @returns void + * @throws Errors {@link Error} + */ export async function ping(endpoint: string) { const response = await request.get(new URL(`pdp/ping`, endpoint)) if (response.error) { @@ -802,39 +852,33 @@ export async function ping(endpoint: string) { return response.result } -/** - * Type guard to check if a value is a ReadableStream - * @param value - The value to check - * @returns True if it's a ReadableStream - */ -function isReadableStream(value: unknown): value is ReadableStream { - return ( - typeof value === 'object' && - value !== null && - 'getReader' in value && - typeof (value as ReadableStream).getReader === 'function' - ) -} - -/** - * Convert AsyncIterable or ReadableStream to ReadableStream - * @param data - AsyncIterable or ReadableStream to convert - * @returns ReadableStream - */ -function asReadableStream(data: AsyncIterable | ReadableStream): ReadableStream { - return isReadableStream(data) ? data : asyncIterableToReadableStream(data) +// biome-ignore lint/style/noNamespace: namespaced types +export namespace downloadPiece { + export type OptionsType = { + endpoint: string + pieceCid: PieceCID + } + export type ReturnType = Uint8Array + export type ErrorType = DownloadPieceError | TimeoutError | NetworkError | AbortError } /** - * Type guard to check if a value is an AsyncIterable - * @param value - The value to check - * @returns True if it's an AsyncIterable + * Download a piece and verify from the PDP API. + * + * GET /piece/{pieceCid} + * + * @param options - {@link downloadPiece.OptionsType} + * @returns Data {@link downloadPiece.ReturnType} + * @throws Errors {@link downloadPiece.ErrorType} */ -function isAsyncIterable(value: unknown): value is AsyncIterable { - return ( - typeof value === 'object' && - value !== null && - Symbol.asyncIterator in value && - typeof (value as AsyncIterable)[Symbol.asyncIterator] === 'function' - ) +export async function downloadPiece(options: downloadPiece.OptionsType): Promise { + const url = createPieceUrlPDP(options.pieceCid.toString(), options.endpoint) + const response = await request.get(url) + if (response.error) { + if (HttpError.is(response.error)) { + throw new DownloadPieceError(await response.error.response.text()) + } + throw response.error + } + return await Piece.downloadAndValidate(response.result, options.pieceCid) } diff --git a/packages/synapse-core/src/typed-data/index.ts b/packages/synapse-core/src/typed-data/index.ts index 4cb795be..27b81ec8 100644 --- a/packages/synapse-core/src/typed-data/index.ts +++ b/packages/synapse-core/src/typed-data/index.ts @@ -10,5 +10,7 @@ */ export * from './sign-add-pieces.ts' export * from './sign-create-dataset.ts' +export * from './sign-create-dataset-add-pieces.ts' export * from './sign-erc20-permit.ts' +export * from './sign-schedule-piece-removals.ts' export * from './type-definitions.ts' diff --git a/packages/synapse-core/src/typed-data/sign-add-pieces.ts b/packages/synapse-core/src/typed-data/sign-add-pieces.ts index 86476e5c..5d0a8494 100644 --- a/packages/synapse-core/src/typed-data/sign-add-pieces.ts +++ b/packages/synapse-core/src/typed-data/sign-add-pieces.ts @@ -1,51 +1,89 @@ -import { type Account, type Chain, type Client, encodeAbiParameters, type Transport, toHex } from 'viem' +import { + type Account, + type Address, + type Chain, + type Client, + type EncodeAbiParametersErrorType, + encodeAbiParameters, + type Hex, + type SignTypedDataErrorType, + type Transport, + toHex, +} from 'viem' import { signTypedData } from 'viem/actions' -import { getChain } from '../chains.ts' +import { asChain } from '../chains.ts' import type { PieceCID } from '../piece.ts' +import { randU256 } from '../utils/rand.ts' import { EIP712Types, getStorageDomain, type MetadataEntry } from './type-definitions.ts' export type SignAddPiecesOptions = { + /** The client data set id to use for the signature. */ clientDataSetId: bigint - nonce: bigint - pieces: { pieceCid: PieceCID; metadata: MetadataEntry[] }[] + /** The pieces to sign. */ + pieces: { pieceCid: PieceCID; metadata?: MetadataEntry[] }[] + /** The nonce to use for the signature. */ + nonce?: bigint + /** The verifying contract to use. If not provided, the default is the FilecoinWarmStorageService contract address. */ + verifyingContract?: Address } +export const signAddPiecesAbiParameters = [ + { type: 'uint256' }, + { type: 'string[][]' }, + { type: 'string[][]' }, + { type: 'bytes' }, +] as const + /** * Sign and abi encode the add pieces extra data * * @param client - The client to use to sign the extra data. - * @param options - The options for the add pieces extra data. + * @param options - {@link SignAddPiecesOptions} + * @returns Encoded extra data {@link signAddPieces.ReturnType} + * @throws Errors {@link signAddPieces.ErrorType} */ -export async function signAddPieces(client: Client, options: SignAddPiecesOptions) { - const chain = getChain(client.chain.id) +export async function signAddPieces( + client: Client, + options: signAddPieces.OptionsType +): Promise { + const chain = asChain(client.chain) + const { clientDataSetId, nonce: _nonce, pieces, verifyingContract } = options + const nonce = _nonce ?? randU256() + const signature = await signTypedData(client, { account: client.account, - domain: getStorageDomain({ chain }), + domain: getStorageDomain({ chain, verifyingContract }), types: EIP712Types, primaryType: 'AddPieces', message: { - clientDataSetId: options.clientDataSetId, - nonce: options.nonce, - pieceData: options.pieces.map((piece) => { + clientDataSetId, + nonce, + pieceData: pieces.map((piece) => { return { data: toHex(piece.pieceCid.bytes), } }), - pieceMetadata: options.pieces.map((piece, index) => ({ + + pieceMetadata: pieces.map((piece, index) => ({ pieceIndex: BigInt(index), - metadata: piece.metadata, + metadata: piece.metadata ?? [], })), }, }) - const metadataKV = Array.from(options.pieces, (piece) => piece.metadata) as MetadataEntry[][] - + const metadataKV = Array.from(pieces, (piece) => piece.metadata ?? []) as MetadataEntry[][] const keys = metadataKV.map((item) => item.map((item) => item.key)) const values = metadataKV.map((item) => item.map((item) => item.value)) - const extraData = encodeAbiParameters( - [{ type: 'uint256' }, { type: 'string[][]' }, { type: 'string[][]' }, { type: 'bytes' }], - [options.nonce, keys, values, signature] - ) + const extraData = encodeAbiParameters(signAddPiecesAbiParameters, [nonce, keys, values, signature]) return extraData } + +// biome-ignore lint/style/noNamespace: namespaced types +export namespace signAddPieces { + export type OptionsType = SignAddPiecesOptions + /** The extra data for the add pieces. */ + export type ReturnType = Hex + /** The errors that can occur when signing the add pieces. */ + export type ErrorType = SignTypedDataErrorType | EncodeAbiParametersErrorType | asChain.ErrorType +} diff --git a/packages/synapse-core/src/typed-data/sign-create-dataset-add-pieces.ts b/packages/synapse-core/src/typed-data/sign-create-dataset-add-pieces.ts new file mode 100644 index 00000000..7399a512 --- /dev/null +++ b/packages/synapse-core/src/typed-data/sign-create-dataset-add-pieces.ts @@ -0,0 +1,55 @@ +import { + type Account, + type Address, + type Chain, + type Client, + type EncodeAbiParametersErrorType, + encodeAbiParameters, + type Hex, + type Transport, +} from 'viem' +import type { PieceCID } from '../piece.ts' +import { signAddPieces } from './sign-add-pieces.ts' +import { signCreateDataSet } from './sign-create-dataset.ts' +import type { MetadataEntry } from './type-definitions.ts' + +export const signcreateDataSetAndAddPiecesAbiParameters = [{ type: 'bytes' }, { type: 'bytes' }] as const + +/** + * Sign and abi encode the create data set and add pieces extra data + * + * @param client - The client to use to sign the extra data. + * @param options - {@link signCreateDataSetAndAddPieces.OptionsType} + * @returns Encoded extra data {@link signCreateDataSetAndAddPieces.ReturnType} + * @throws Errors {@link signCreateDataSetAndAddPieces.ErrorType} + */ +export async function signCreateDataSetAndAddPieces( + client: Client, + options: signCreateDataSetAndAddPieces.OptionsType +): Promise { + const dataSetExtraData = await signCreateDataSet(client, options) + const addPiecesExtraData = await signAddPieces(client, options) + return encodeAbiParameters(signcreateDataSetAndAddPiecesAbiParameters, [dataSetExtraData, addPiecesExtraData]) +} + +// biome-ignore lint/style/noNamespace: namespaced types +export namespace signCreateDataSetAndAddPieces { + export type OptionsType = { + /** The client data set id to use for the signature. */ + clientDataSetId: bigint + /** The payee address to use for the signature. */ + payee: Address + /** The payer address to use for the signature. If client is from a session key this should be set to the actual payer address. */ + payer?: Address + /** Dataset metadata. */ + metadata?: MetadataEntry[] + /** The pieces with metadata to sign. */ + pieces: { pieceCid: PieceCID; metadata?: MetadataEntry[] }[] + /** The nonce to use for the add pieces signature. */ + nonce?: bigint + /** The verifying contract to use. If not provided, the default is the FilecoinWarmStorageService contract address. */ + verifyingContract?: Address + } + export type ReturnType = Hex + export type ErrorType = signCreateDataSet.ErrorType | signAddPieces.ErrorType | EncodeAbiParametersErrorType +} diff --git a/packages/synapse-core/src/typed-data/sign-create-dataset.ts b/packages/synapse-core/src/typed-data/sign-create-dataset.ts index ae71da88..f993324f 100644 --- a/packages/synapse-core/src/typed-data/sign-create-dataset.ts +++ b/packages/synapse-core/src/typed-data/sign-create-dataset.ts @@ -1,27 +1,47 @@ -import type { Account, Address, Chain, Client, Transport } from 'viem' +import type { + Account, + Address, + Chain, + Client, + EncodeAbiParametersErrorType, + Hex, + SignTypedDataErrorType, + Transport, +} from 'viem' import { encodeAbiParameters } from 'viem' import { signTypedData } from 'viem/actions' -import { getChain } from '../chains.ts' +import { asChain } from '../chains.ts' import { EIP712Types, getStorageDomain, type MetadataEntry } from './type-definitions.ts' -export type signDataSetOptions = { +export type signCreateDataSetOptions = { + /** The client data set id. */ clientDataSetId: bigint + /** The payee address. */ payee: Address - /** - * If client is from a session key this should be set to the actual payer address - */ + /** The payer address. If client is from a session key this should be set to the actual payer address. */ payer?: Address - metadata: MetadataEntry[] + /** The metadata for the data set. */ + metadata?: MetadataEntry[] } +export const signCreateDataSetAbiParameters = [ + { type: 'address' }, + { type: 'uint256' }, + { type: 'string[]' }, + { type: 'string[]' }, + { type: 'bytes' }, +] as const + /** * Sign and abi encode the create data set extra data * * @param client - The client to use to sign the message. - * @param options - The options for the create data set extra data. + * @param options - {@link signCreateDataSetOptions} + * @throws { SignTypedDataErrorType | EncodeAbiParametersErrorType} */ -export async function signCreateDataSet(client: Client, options: signDataSetOptions) { - const chain = getChain(client.chain.id) +export async function signCreateDataSet(client: Client, options: signCreateDataSetOptions) { + const chain = asChain(client.chain) + const metadata = options.metadata ?? [] const signature = await signTypedData(client, { account: client.account, domain: getStorageDomain({ chain }), @@ -30,18 +50,31 @@ export async function signCreateDataSet(client: Client item.key) - const values = options.metadata.map((item) => item.value) + const keys = metadata.map((item) => item.key) + const values = metadata.map((item) => item.value) const payer = options.payer ?? client.account.address - const extraData = encodeAbiParameters( - [{ type: 'address' }, { type: 'uint256' }, { type: 'string[]' }, { type: 'string[]' }, { type: 'bytes' }], - [payer, options.clientDataSetId, keys, values, signature] - ) + const extraData = encodeAbiParameters(signCreateDataSetAbiParameters, [ + payer, + options.clientDataSetId, + keys, + values, + signature, + ]) return extraData } + +// biome-ignore lint/style/noNamespace: namespaced types +export namespace signCreateDataSet { + /** The options for the create data set. */ + export type OptionsType = signCreateDataSetOptions + /** The extra data for the create data set. */ + export type ReturnType = Hex + /** The errors that can occur when signing the create data set. */ + export type ErrorType = SignTypedDataErrorType | EncodeAbiParametersErrorType | asChain.ErrorType +} diff --git a/packages/synapse-core/src/typed-data/sign-erc20-permit.ts b/packages/synapse-core/src/typed-data/sign-erc20-permit.ts index 19294e31..9f27b18a 100644 --- a/packages/synapse-core/src/typed-data/sign-erc20-permit.ts +++ b/packages/synapse-core/src/typed-data/sign-erc20-permit.ts @@ -1,6 +1,6 @@ import type { Account, Address, Chain, Client, Transport } from 'viem' import { signTypedData } from 'viem/actions' -import { getChain } from '../chains.ts' +import { asChain } from '../chains.ts' import { EIP712Types } from './type-definitions.ts' export type SignErc20PermitOptions = { @@ -41,14 +41,14 @@ export type SignErc20PermitOptions = { * @param options - The options for the ERC20 permit message. */ export async function signErc20Permit(client: Client, options: SignErc20PermitOptions) { - const chain = getChain(client.chain.id) + const chain = asChain(client.chain) const { amount, nonce, deadline, name, version } = options const spender = options.spender ?? chain.contracts.payments.address const token = options.token ?? chain.contracts.usdfc.address const domain = { - chainId: client.chain.id, + chainId: chain.id, name: name, version: version, verifyingContract: token, diff --git a/packages/synapse-core/src/typed-data/sign-schedule-piece-removals.ts b/packages/synapse-core/src/typed-data/sign-schedule-piece-removals.ts index e929fa43..12102ae8 100644 --- a/packages/synapse-core/src/typed-data/sign-schedule-piece-removals.ts +++ b/packages/synapse-core/src/typed-data/sign-schedule-piece-removals.ts @@ -1,6 +1,6 @@ import { type Account, type Chain, type Client, encodeAbiParameters, type Transport } from 'viem' import { signTypedData } from 'viem/actions' -import { getChain } from '../chains.ts' +import { asChain } from '../chains.ts' import { EIP712Types, getStorageDomain } from './type-definitions.ts' export type SignSchedulePieceRemovalsOptions = { @@ -18,7 +18,7 @@ export async function signSchedulePieceRemovals( client: Client, options: SignSchedulePieceRemovalsOptions ) { - const chain = getChain(client.chain.id) + const chain = asChain(client.chain) const signature = await signTypedData(client, { account: client.account, domain: getStorageDomain({ chain }), diff --git a/packages/synapse-core/src/utils/constants.ts b/packages/synapse-core/src/utils/constants.ts index 09d1d505..e95a4cf0 100644 --- a/packages/synapse-core/src/utils/constants.ts +++ b/packages/synapse-core/src/utils/constants.ts @@ -58,10 +58,18 @@ export const SIZE_CONSTANTS = { PiB: 1n << 50n, /** - * Maximum upload size (200 MiB) - * Current limitation for PDP uploads + * Maximum upload size currently supported by PDP servers. + * + * 1 GiB adjusted for fr32 expansion: 1 GiB * (127/128) = 1,065,353,216 bytes + * + * Fr32 encoding adds 2 bits of padding per 254 bits of data, resulting in 128 bytes + * of padded data for every 127 bytes of raw data. + * + * Note: While it's technically possible to upload pieces this large as Uint8Array, + * streaming via AsyncIterable is strongly recommended for non-trivial sizes. + * See SIZE_CONSTANTS.MAX_UPLOAD_SIZE in synapse-sdk for detailed guidance. */ - MAX_UPLOAD_SIZE: 200 * 1024 * 1024, + MAX_UPLOAD_SIZE: 1_065_353_216, // 1 GiB * 127/128 /** * Minimum upload size (127 bytes) @@ -77,3 +85,10 @@ export const SIZE_CONSTANTS = { } as const export const LOCKUP_PERIOD = 30n * TIME_CONSTANTS.EPOCHS_PER_DAY + +export const RETRY_CONSTANTS = { + FACTOR: 1, + DELAY_TIME: 4000, // 4 seconds in milliseconds between retries + RETRIES: Infinity, + MAX_RETRY_TIME: 1000 * 60 * 5, // 5 minutes in milliseconds +} as const diff --git a/packages/synapse-core/src/utils/index.ts b/packages/synapse-core/src/utils/index.ts index 70b2e823..2c5055af 100644 --- a/packages/synapse-core/src/utils/index.ts +++ b/packages/synapse-core/src/utils/index.ts @@ -7,4 +7,5 @@ export * from './metadata.ts' export * from './pdp-capabilities.ts' export * from './piece-url.ts' export * from './rand.ts' +export * from './streams.ts' export * from './viem.ts' diff --git a/packages/synapse-core/src/utils/piece-url.ts b/packages/synapse-core/src/utils/piece-url.ts index a0498a18..7f685e06 100644 --- a/packages/synapse-core/src/utils/piece-url.ts +++ b/packages/synapse-core/src/utils/piece-url.ts @@ -11,7 +11,7 @@ export function createPieceUrl(cid: string, cdn: boolean, address: Address, chai } } -function createPieceUrlPDP(cid: string, pdpUrl: string) { +export function createPieceUrlPDP(cid: string, pdpUrl: string) { const endpoint = pdpUrl const url = `piece/${cid}` return new URL(url, endpoint).toString() diff --git a/packages/synapse-core/src/utils/streams.ts b/packages/synapse-core/src/utils/streams.ts new file mode 100644 index 00000000..3db4fec0 --- /dev/null +++ b/packages/synapse-core/src/utils/streams.ts @@ -0,0 +1,111 @@ +/** + * Type guard to check if a value is a ReadableStream + * @param value - The value to check + * @returns True if it's a ReadableStream + */ +export function isReadableStream(value: unknown): value is ReadableStream { + return ( + typeof value === 'object' && + value !== null && + 'getReader' in value && + typeof (value as ReadableStream).getReader === 'function' + ) +} + +/** + * Convert AsyncIterable or ReadableStream to ReadableStream + * @param data - AsyncIterable or ReadableStream to convert + * @returns ReadableStream + */ +export function asReadableStream( + data: AsyncIterable | ReadableStream +): ReadableStream { + return isReadableStream(data) ? data : asyncIterableToReadableStream(data) +} + +/** + * Type guard to check if a value is an AsyncIterable + * @param value - The value to check + * @returns True if it's an AsyncIterable + */ +export function isAsyncIterable(value: unknown): value is AsyncIterable { + return ( + typeof value === 'object' && + value !== null && + Symbol.asyncIterator in value && + typeof (value as AsyncIterable)[Symbol.asyncIterator] === 'function' + ) +} + +/** + * Convert AsyncIterable to ReadableStream with broad browser compatibility. + * Provides fallback for environments where ReadableStream.from() is not available. + * + * Uses pull-based streaming to implement proper backpressure and ensure all + * chunks are consumed in order. + */ +export function asyncIterableToReadableStream(iterable: AsyncIterable): ReadableStream { + if (!isAsyncIterable(iterable)) { + throw new Error('Input must be an AsyncIterable') + } + + // Use native ReadableStream.from() if available + // See https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream/from_static for latest + // support matrix, as of late 2025 this is still "Experimental" + if (typeof ReadableStream.from === 'function') { + return ReadableStream.from(iterable) + } + + // Fallback implementation using pull-based streaming + const iterator = iterable[Symbol.asyncIterator]() + + return new ReadableStream({ + async pull(controller) { + try { + const { value, done } = await iterator.next() + if (done) { + controller.close() + } else { + controller.enqueue(value) + } + } catch (error) { + // run cleanup on internal errors + if (iterator.return) { + try { + await iterator.return() + } catch { + // safely ignore + } + } + controller.error(error) + } + }, + async cancel() { + // Clean up iterator if stream is cancelled + if (iterator.return) { + await iterator.return() + } + }, + }) +} + +/** + * Convert Uint8Array to async iterable with optimal chunk size. + * + * Uses 2048-byte chunks for better hasher performance (determined by manual + * testing with Node.js; this will likely vary by environment). This may not be + * optimal for the streaming upload case, so further tuning may be needed to + * find the best balance between hasher performance and upload chunk size. + * + * @param data - Uint8Array to convert + * @param chunkSize - Size of chunks (default 2048) + * @returns AsyncIterable yielding chunks + */ +export async function* uint8ArrayToAsyncIterable( + data: Uint8Array, + chunkSize: number = 2048 +): AsyncIterable { + for (let i = 0; i < data.length; i += chunkSize) { + yield data.subarray(i, i + chunkSize) + } +} diff --git a/packages/synapse-core/src/warm-storage/data-sets.ts b/packages/synapse-core/src/warm-storage/data-sets.ts index 8990bb1b..6d9364b3 100644 --- a/packages/synapse-core/src/warm-storage/data-sets.ts +++ b/packages/synapse-core/src/warm-storage/data-sets.ts @@ -1,21 +1,13 @@ import type { AbiParametersToPrimitiveTypes, ExtractAbiFunction } from 'abitype' -import { - type Account, - type Address, - type Chain, - type Client, - encodeAbiParameters, - isAddressEqual, - type Transport, -} from 'viem' +import { type Account, type Address, type Chain, type Client, isAddressEqual, type Transport } from 'viem' import { multicall, readContract, simulateContract, writeContract } from 'viem/actions' import type * as Abis from '../abis/index.ts' -import { getChain } from '../chains.ts' +import { asChain, getChain } from '../chains.ts' import { DataSetNotFoundError } from '../errors/warm-storage.ts' import type { PieceCID } from '../piece.ts' import * as SP from '../sp.ts' -import { signAddPieces } from '../typed-data/sign-add-pieces.ts' import { signCreateDataSet } from '../typed-data/sign-create-dataset.ts' +import { signCreateDataSetAndAddPieces } from '../typed-data/sign-create-dataset-add-pieces.ts' import { capabilitiesListToObject } from '../utils/capabilities.ts' import { datasetMetadataObjectToEntry, @@ -211,35 +203,38 @@ export async function getDataSetMetadata(client: Client, dataS } export type CreateDataSetOptions = { + /** Whether the data set should use CDN. */ cdn: boolean + /** The address that will receive payments (service provider). */ payee: Address /** + * The address that will pay for the storage (client). If not provided, the default is the client address. * If client is from a session key this should be set to the actual payer address */ payer?: Address + /** The endpoint of the PDP API. */ endpoint: string + /** The metadata for the data set. */ metadata?: MetadataObject + /** The client data set id to use for the signature. Must be unique for each data set. */ + clientDataSetId?: bigint + /** The address of the record keeper to use for the signature. If not provided, the default is the Warm Storage contract address. */ + recordKeeper?: Address } /** * Create a data set * * @param client - The client to use to create the data set. - * @param options - The options for the create data set. - * @param options.payee - The address that will receive payments (service provider). - * @param options.payer - The address that will pay for the storage (client). - * @param options.endpoint - The endpoint of the PDP API. - * @param options.cdn - Whether the data set should use CDN. - * @param options.metadata - The metadata for the data set. + * @param options - {@link CreateDataSetOptions} * @returns The response from the create data set on PDP API. */ export async function createDataSet(client: Client, options: CreateDataSetOptions) { const chain = getChain(client.chain.id) - const nonce = randU256() // Sign and encode the create data set message const extraData = await signCreateDataSet(client, { - clientDataSetId: nonce, + clientDataSetId: options.clientDataSetId ?? randU256(), payee: options.payee, payer: options.payer, metadata: datasetMetadataObjectToEntry(options.metadata, { @@ -249,67 +244,70 @@ export async function createDataSet(client: Client, o return SP.createDataSet({ endpoint: options.endpoint, - recordKeeper: chain.contracts.storage.address, + recordKeeper: options.recordKeeper ?? chain.contracts.storage.address, extraData, }) } export type CreateDataSetAndAddPiecesOptions = { + /** The client data set id to use for the signature. Must be unique for each data set. */ + clientDataSetId?: bigint + /** The address of the record keeper to use for the signature. If not provided, the default is the Warm Storage contract address. */ + recordKeeper?: Address /** + * The address that will pay for the storage (client). If not provided, the default is the client address. + * * If client is from a session key this should be set to the actual payer address */ payer?: Address + /** The endpoint of the PDP API. */ endpoint: string + /** The address that will receive payments (service provider). */ payee: Address + /** Whether the data set should use CDN. */ cdn: boolean + /** The metadata for the data set. */ metadata?: MetadataObject + /** The pieces and metadata to add to the data set. */ pieces: { pieceCid: PieceCID; metadata?: MetadataObject }[] } +// biome-ignore lint/style/noNamespace: namespaced types +export namespace createDataSetAndAddPieces { + export type OptionsType = CreateDataSetAndAddPiecesOptions + export type ReturnType = SP.createDataSetAndAddPieces.ReturnType + export type ErrorType = SP.createDataSetAndAddPieces.ErrorType | asChain.ErrorType +} + /** * Create a data set and add pieces to it * * @param client - The client to use to create the data set. - * @param options - The options for the create data set. - * @param options.payer - The address that will pay for the storage (client). - * @param options.endpoint - The endpoint of the PDP API. - * @param options.payee - The address that will receive payments (service provider). - * @param options.cdn - Whether the data set should use CDN. - * @param options.metadata - The metadata for the data set. - * @returns The response from the create data set on PDP API. + * @param options - {@link CreateDataSetAndAddPiecesOptions} + * @returns The response from the create data set on PDP API. {@link createDataSetAndAddPieces.ReturnType} + * @throws Errors {@link createDataSetAndAddPieces.ErrorType} */ export async function createDataSetAndAddPieces( client: Client, options: CreateDataSetAndAddPiecesOptions -) { - const chain = getChain(client.chain.id) - const clientDataSetId = randU256() - // Sign and encode the create data set message - const dataSetExtraData = await signCreateDataSet(client, { - clientDataSetId, - payee: options.payee, - payer: options.payer, - metadata: datasetMetadataObjectToEntry(options.metadata, { - cdn: options.cdn, - }), - }) - - // Sign and encode the add pieces message - const addPiecesExtraData = await signAddPieces(client, { - clientDataSetId, - nonce: randU256(), - pieces: options.pieces.map((piece) => ({ - pieceCid: piece.pieceCid, - metadata: pieceMetadataObjectToEntry(piece.metadata), - })), - }) - - const extraData = encodeAbiParameters([{ type: 'bytes' }, { type: 'bytes' }], [dataSetExtraData, addPiecesExtraData]) +): Promise { + const chain = asChain(client.chain) return SP.createDataSetAndAddPieces({ endpoint: options.endpoint, - recordKeeper: chain.contracts.storage.address, - extraData, + recordKeeper: options.recordKeeper ?? chain.contracts.storage.address, + extraData: await signCreateDataSetAndAddPieces(client, { + clientDataSetId: options.clientDataSetId ?? randU256(), + payee: options.payee, + payer: options.payer, + metadata: datasetMetadataObjectToEntry(options.metadata, { + cdn: options.cdn, + }), + pieces: options.pieces.map((piece) => ({ + pieceCid: piece.pieceCid, + metadata: pieceMetadataObjectToEntry(piece.metadata), + })), + }), pieces: options.pieces.map((piece) => piece.pieceCid), }) } diff --git a/packages/synapse-core/src/warm-storage/index.ts b/packages/synapse-core/src/warm-storage/index.ts index e45cb4f9..e95e80ed 100644 --- a/packages/synapse-core/src/warm-storage/index.ts +++ b/packages/synapse-core/src/warm-storage/index.ts @@ -11,6 +11,7 @@ export * from './data-sets.ts' export * from './pieces.ts' export * from './providers.ts' +export * from './pull.ts' export * from './read-addresses.ts' export * from './service-price.ts' export * from './upload.ts' diff --git a/packages/synapse-core/src/warm-storage/pieces.ts b/packages/synapse-core/src/warm-storage/pieces.ts index d89dd644..055afde8 100644 --- a/packages/synapse-core/src/warm-storage/pieces.ts +++ b/packages/synapse-core/src/warm-storage/pieces.ts @@ -3,20 +3,27 @@ import pRetry from 'p-retry' import { type Account, type Address, type Chain, type Client, type Hex, hexToBytes, type Transport } from 'viem' import { getTransaction, readContract, waitForTransactionReceipt } from 'viem/actions' import { getChain } from '../chains.ts' +import { AtLeastOnePieceRequiredError } from '../errors/warm-storage.ts' import type { PieceCID } from '../piece.ts' import * as PDP from '../sp.ts' import { signAddPieces } from '../typed-data/sign-add-pieces.ts' import { signSchedulePieceRemovals } from '../typed-data/sign-schedule-piece-removals.ts' +import { RETRY_CONSTANTS } from '../utils/constants.ts' import { type MetadataObject, pieceMetadataObjectToEntry } from '../utils/metadata.ts' import { createPieceUrl } from '../utils/piece-url.ts' -import { randU256 } from '../utils/rand.ts' import type { DataSet } from './data-sets.ts' +export type PieceInputWithMetadata = { + pieceCid: PieceCID + metadata?: MetadataObject +} + export type AddPiecesOptions = { dataSetId: bigint clientDataSetId: bigint endpoint: string - pieces: { pieceCid: PieceCID; metadata?: MetadataObject }[] + pieces: PieceInputWithMetadata[] + nonce?: bigint } /** @@ -31,14 +38,16 @@ export type AddPiecesOptions = { * @returns The response from the add pieces operation. */ export async function addPieces(client: Client, options: AddPiecesOptions) { - const nonce = randU256() + if (options.pieces.length === 0) { + throw new AtLeastOnePieceRequiredError() + } return PDP.addPieces({ endpoint: options.endpoint, dataSetId: options.dataSetId, pieces: options.pieces.map((piece) => piece.pieceCid), extraData: await signAddPieces(client, { clientDataSetId: options.clientDataSetId, - nonce, + nonce: options.nonce, pieces: options.pieces.map((piece) => ({ pieceCid: piece.pieceCid, metadata: pieceMetadataObjectToEntry(piece.metadata), @@ -78,23 +87,23 @@ export async function deletePiece(client: Client, opt }) } -export type PollForDeletePieceStatusOptions = { +export type WaitForDeletePieceStatusOptions = { txHash: Hex } /** - * Poll for the delete piece status. + * Wait for the delete piece status. * * Waits for the transaction to be mined and then polls for the transaction receipt. * - * @param client - The client to use to poll for the delete piece status. - * @param options - The options for the poll for the delete piece status. + * @param client - The client to use to wait for the delete piece status. + * @param options - The options for the wait for the delete piece status. * @param options.txHash - The hash of the transaction to poll for. * @returns */ -export async function pollForDeletePieceStatus( +export async function waitForDeletePieceStatus( client: Client, - options: PollForDeletePieceStatusOptions + options: WaitForDeletePieceStatusOptions ) { try { await pRetry( @@ -108,10 +117,10 @@ export async function pollForDeletePieceStatus( return transaction }, { - factor: 1, - minTimeout: 4000, - retries: Infinity, - maxRetryTime: 180000, + factor: RETRY_CONSTANTS.FACTOR, + minTimeout: RETRY_CONSTANTS.DELAY_TIME, + retries: RETRY_CONSTANTS.RETRIES, + maxRetryTime: RETRY_CONSTANTS.MAX_RETRY_TIME, } ) } catch { diff --git a/packages/synapse-core/src/warm-storage/pull.ts b/packages/synapse-core/src/warm-storage/pull.ts new file mode 100644 index 00000000..7108f084 --- /dev/null +++ b/packages/synapse-core/src/warm-storage/pull.ts @@ -0,0 +1,265 @@ +import type { Account, Address, Chain, Client, Hex, Transport } from 'viem' +import { asChain } from '../chains.ts' +import type { PieceCID } from '../piece.ts' +import * as Pull from '../pull.ts' +import { signAddPieces } from '../typed-data/sign-add-pieces.ts' +import { signCreateDataSetAndAddPieces } from '../typed-data/sign-create-dataset-add-pieces.ts' +import type { MetadataEntry } from '../typed-data/type-definitions.ts' +import { datasetMetadataObjectToEntry, type MetadataObject, pieceMetadataObjectToEntry } from '../utils/metadata.ts' +import { randU256 } from '../utils/rand.ts' + +/** + * Input piece for a pull request with typed PieceCID. + */ +export type PullPieceInput = { + /** PieceCID for the piece */ + pieceCid: PieceCID + /** HTTPS URL to pull the piece from (must end in /piece/{pieceCid}) */ + sourceUrl: string + /** Optional metadata for the piece */ + metadata?: MetadataObject +} + +/** + * Base options for pulling pieces. + */ +type BasePullPiecesOptions = { + /** The endpoint of the PDP API. */ + endpoint: string + /** Pieces to pull with their source URLs. */ + pieces: PullPieceInput[] + /** Optional nonce for the add pieces signature. */ + nonce?: bigint + /** The address of the record keeper. If not provided, the default is the Warm Storage contract address. */ + recordKeeper?: Address + /** Optional AbortSignal to cancel the request. */ + signal?: AbortSignal +} + +/** + * Options for pulling pieces into an existing data set. + */ +export type PullToExistingDataSetOptions = BasePullPiecesOptions & { + /** The ID of the existing data set to add pieces to. */ + dataSetId: bigint + /** The client data set ID (used for signing). */ + clientDataSetId: bigint +} + +/** + * Options for creating a new data set and pulling pieces into it. + */ +export type PullToNewDataSetOptions = BasePullPiecesOptions & { + /** Omit or set to 0n to create a new data set. */ + dataSetId?: undefined | 0n + /** The client data set ID. Must be unique for each data set. If not provided, a random value is generated. */ + clientDataSetId?: bigint + /** The address that will receive payments (service provider). Required for new data sets. */ + payee: Address + /** + * The address that will pay for the storage (client). If not provided, the default is the client address. + * If client is from a session key this should be set to the actual payer address. + */ + payer?: Address + /** Whether the data set should use CDN. */ + cdn?: boolean + /** The metadata for the data set. */ + metadata?: MetadataObject +} + +// biome-ignore lint/style/noNamespace: namespaced types +export namespace pullPieces { + /** + * Options for pulling pieces from external SPs. + * Use dataSetId > 0n to add to an existing data set, or omit/0n to create a new one. + */ + export type OptionsType = PullToExistingDataSetOptions | PullToNewDataSetOptions + + export type ReturnType = Pull.SPFetchResponse + + export type ErrorType = + | Pull.fetchPieces.ErrorType + | signAddPieces.ErrorType + | signCreateDataSetAndAddPieces.ErrorType + | asChain.ErrorType +} + +/** + * Check if options are for adding to an existing data set. + */ +function isExistingDataSet(options: pullPieces.OptionsType): options is PullToExistingDataSetOptions { + return options.dataSetId != null && options.dataSetId > 0n +} + +/** + * Convert PullPieceInput to signing input format. + */ +function toSigningPieces(pieces: PullPieceInput[]): { pieceCid: PieceCID; metadata?: MetadataEntry[] }[] { + return pieces.map((piece) => ({ + pieceCid: piece.pieceCid, + metadata: pieceMetadataObjectToEntry(piece.metadata), + })) +} + +/** + * Convert PullPieceInput to SP pull input format. + */ +function toPullPieces(pieces: PullPieceInput[]): Pull.SPFetchPieceInput[] { + return pieces.map((piece) => ({ + pieceCid: piece.pieceCid.toString(), + sourceUrl: piece.sourceUrl, + })) +} + +/** + * Pull pieces from external storage providers into a data set. + * + * This function handles EIP-712 signing for authorization and calls the + * Curio POST /pdp/piece/pull endpoint. Curio verifies the client can pay + * by running an estimateGas on the resulting contract call. + * + * The endpoint is idempotent - calling with the same extraData returns + * the status of the existing request rather than creating duplicates. + * + * @param client - The viem client with account for signing. + * @param options - {@link pullPieces.OptionsType} + * @returns The current status of the pull operation. {@link pullPieces.ReturnType} + * @throws Errors {@link pullPieces.ErrorType} + * + * @example Pulling pieces into an existing data set: + * ```ts + * const response = await pullPieces(client, { + * endpoint: 'https://sp.example.com', + * dataSetId: 123n, + * clientDataSetId: dataSet.clientDataSetId, + * pieces: [{ + * pieceCid: parsedPieceCid, + * sourceUrl: 'https://source-sp.example.com/piece/bafk...', + * }], + * }) + * ``` + * + * @example Creating a new data set and pulling pieces: + * ```ts + * const response = await pullPieces(client, { + * endpoint: 'https://sp.example.com', + * payee: providerAddress, + * pieces: [{ + * pieceCid: parsedPieceCid, + * sourceUrl: 'https://source-sp.example.com/piece/bafk...', + * }], + * }) + * ``` + */ +export async function pullPieces( + client: Client, + options: pullPieces.OptionsType +): Promise { + const chain = asChain(client.chain) + const recordKeeper = options.recordKeeper ?? chain.contracts.storage.address + + let extraData: Hex + let dataSetId: bigint | undefined + + if (isExistingDataSet(options)) { + // Adding to existing data set - use signAddPieces + extraData = await signAddPieces(client, { + clientDataSetId: options.clientDataSetId, + nonce: options.nonce, + pieces: toSigningPieces(options.pieces), + }) + dataSetId = options.dataSetId + } else { + // Creating new data set - use signCreateDataSetAndAddPieces + extraData = await signCreateDataSetAndAddPieces(client, { + clientDataSetId: options.clientDataSetId ?? randU256(), + payee: options.payee, + payer: options.payer, + metadata: datasetMetadataObjectToEntry(options.metadata, { + cdn: options.cdn ?? false, + }), + nonce: options.nonce, + pieces: toSigningPieces(options.pieces), + }) + // dataSetId stays undefined for new data set + } + + return Pull.fetchPieces({ + endpoint: options.endpoint, + recordKeeper, + extraData, + dataSetId, + pieces: toPullPieces(options.pieces), + signal: options.signal, + }) +} + +// biome-ignore lint/style/noNamespace: namespaced types +export namespace waitForPullStatus { + /** + * Options for waiting for pull completion. + */ + export type OptionsType = pullPieces.OptionsType & { + /** Callback invoked on each poll with current status. */ + onStatus?: (response: Pull.SPFetchResponse) => void + /** Minimum time between poll attempts in milliseconds (default: 4000). */ + minTimeout?: number + } + + export type ReturnType = Pull.SPFetchResponse + + export type ErrorType = pullPieces.ErrorType +} + +/** + * Wait for pull completion. + * + * Repeatedly calls the pull endpoint until all pieces are complete or any piece fails. + * Since the endpoint is idempotent, this effectively polls for status updates. + * + * @param client - The viem client with account for signing. + * @param options - {@link waitForPullStatus.OptionsType} + * @returns The final status when complete or failed. {@link waitForPullStatus.ReturnType} + * @throws Errors {@link waitForPullStatus.ErrorType} + */ +export async function waitForPullStatus( + client: Client, + options: waitForPullStatus.OptionsType +): Promise { + const chain = asChain(client.chain) + const recordKeeper = options.recordKeeper ?? chain.contracts.storage.address + + let extraData: Hex + let dataSetId: bigint | undefined + + if (isExistingDataSet(options)) { + extraData = await signAddPieces(client, { + clientDataSetId: options.clientDataSetId, + nonce: options.nonce, + pieces: toSigningPieces(options.pieces), + }) + dataSetId = options.dataSetId + } else { + extraData = await signCreateDataSetAndAddPieces(client, { + clientDataSetId: options.clientDataSetId ?? randU256(), + payee: options.payee, + payer: options.payer, + metadata: datasetMetadataObjectToEntry(options.metadata, { + cdn: options.cdn ?? false, + }), + nonce: options.nonce, + pieces: toSigningPieces(options.pieces), + }) + } + + return Pull.waitForFetchStatus({ + endpoint: options.endpoint, + recordKeeper, + extraData, + dataSetId, + pieces: toPullPieces(options.pieces), + signal: options.signal, + onStatus: options.onStatus, + minTimeout: options.minTimeout, + }) +} diff --git a/packages/synapse-core/src/warm-storage/upload.ts b/packages/synapse-core/src/warm-storage/upload.ts index a8fff2b9..08497d5a 100644 --- a/packages/synapse-core/src/warm-storage/upload.ts +++ b/packages/synapse-core/src/warm-storage/upload.ts @@ -3,12 +3,25 @@ import * as Piece from '../piece.ts' import * as SP from '../sp.ts' import { signAddPieces } from '../typed-data/sign-add-pieces.ts' import { pieceMetadataObjectToEntry } from '../utils/metadata.ts' -import { randU256 } from '../utils/rand.ts' -import { getDataSet } from './data-sets.ts' +import { createPieceUrl } from '../utils/piece-url.ts' +import { type DataSet, getDataSet } from './data-sets.ts' + +interface Events { + pieceUploaded: { + pieceCid: Piece.PieceCID + dataSet: DataSet + } + pieceParked: { + pieceCid: Piece.PieceCID + url: string + dataSet: DataSet + } +} export type UploadOptions = { dataSetId: bigint data: File[] + onEvent?(event: T, data: Events[T]): void } export async function upload(client: Client, options: UploadOptions) { @@ -20,33 +33,42 @@ export async function upload(client: Client, options: options.data.map(async (file: File) => { const data = new Uint8Array(await file.arrayBuffer()) const pieceCid = Piece.calculate(data) + const url = createPieceUrl( + pieceCid.toString(), + dataSet.cdn, + client.account.address, + client.chain.id, + dataSet.pdp.serviceURL + ) + await SP.uploadPiece({ data, pieceCid, endpoint: dataSet.pdp.serviceURL, }) + options.onEvent?.('pieceUploaded', { pieceCid, dataSet }) await SP.findPiece({ pieceCid, endpoint: dataSet.pdp.serviceURL, }) + options.onEvent?.('pieceParked', { pieceCid, url, dataSet }) + return { pieceCid, + url, metadata: { name: file.name, type: file.type }, } }) ) - const nonce = randU256() - const addPieces = await SP.addPieces({ dataSetId: options.dataSetId, pieces: uploadResponses.map((response) => response.pieceCid), endpoint: dataSet.pdp.serviceURL, extraData: await signAddPieces(client, { clientDataSetId: dataSet.clientDataSetId, - nonce, pieces: uploadResponses.map((response) => ({ pieceCid: response.pieceCid, metadata: pieceMetadataObjectToEntry(response.metadata), @@ -54,5 +76,5 @@ export async function upload(client: Client, options: }), }) - return addPieces + return { ...addPieces, pieces: uploadResponses } } diff --git a/packages/synapse-sdk/src/test/piece.test.ts b/packages/synapse-core/test/piece.test.ts similarity index 98% rename from packages/synapse-sdk/src/test/piece.test.ts rename to packages/synapse-core/test/piece.test.ts index 15b0225e..4608f0aa 100644 --- a/packages/synapse-sdk/src/test/piece.test.ts +++ b/packages/synapse-core/test/piece.test.ts @@ -1,5 +1,3 @@ -/* globals describe it */ - /** * Basic tests for PieceCID utilities */ @@ -16,8 +14,8 @@ import { import type { API } from '@web3-storage/data-segment' import { Size, toLink } from '@web3-storage/data-segment/piece' import { assert } from 'chai' -import { ethers } from 'ethers' import { CID } from 'multiformats/cid' +import { bytesToHex } from 'viem/utils' // https://github.com/filecoin-project/go-fil-commp-hashhash/blob/master/testdata/zero.txt const zeroPieceCidFixture = ` @@ -347,7 +345,7 @@ describe('PieceCID utilities', () => { // Convert PieceCID to hex (simulating what comes from contract) const cidBytes = pieceCid.bytes - const hex = ethers.hexlify(cidBytes) + const hex = bytesToHex(cidBytes) // Use hexToPieceCID to convert back const result = hexToPieceCID(hex) @@ -376,7 +374,7 @@ describe('PieceCID utilities', () => { // This will pass CID.decode() but fail isValidPieceCID() const validCid = CID.parse(invalidCidString) const cidBytes = validCid.bytes - const hex = ethers.hexlify(cidBytes) + const hex = bytesToHex(cidBytes) assert.throws( () => { diff --git a/packages/synapse-core/test/pull.test.ts b/packages/synapse-core/test/pull.test.ts new file mode 100644 index 00000000..db923144 --- /dev/null +++ b/packages/synapse-core/test/pull.test.ts @@ -0,0 +1,237 @@ +/* globals describe it before after beforeEach */ + +/** + * Pull tests + * + * Tests the SP-to-SP piece pull functionality + */ + +import assert from 'assert' +import { setup } from 'iso-web/msw' +import { HttpResponse, http } from 'msw' +import { PullError } from '../src/errors/pull.ts' +import * as Mocks from '../src/mocks/index.ts' +import * as Pull from '../src/pull.ts' + +// Mock server for testing +const server = setup() + +describe('Pull', () => { + const TEST_ENDPOINT = 'http://pdp.local' + const TEST_RECORD_KEEPER = '0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f' as const + const TEST_EXTRA_DATA = '0x1234567890abcdef' as const + const TEST_PIECE_CID = 'bafkzcibcd4bdomn3tgwgrh3g532zopskstnbrd2n3sxfqbze7rxt7vqn7veigmy' + const TEST_SOURCE_URL = `https://other-sp.example.com/piece/${TEST_PIECE_CID}` + + const baseOptions = (): Pull.fetchPieces.OptionsType => ({ + endpoint: TEST_ENDPOINT, + recordKeeper: TEST_RECORD_KEEPER, + extraData: TEST_EXTRA_DATA, + pieces: [{ pieceCid: TEST_PIECE_CID, sourceUrl: TEST_SOURCE_URL }], + }) + + before(async () => { + await server.start() + }) + + after(() => { + server.stop() + }) + + beforeEach(() => { + server.resetHandlers() + Pull.setTimeout(1000) // Short timeout for tests + }) + + describe('fetchPieces', () => { + it('should handle successful pull request', async () => { + const mockResponse = Mocks.pull.createPullResponse('pending', [{ pieceCid: TEST_PIECE_CID }]) + + server.use(Mocks.pull.fetchPiecesHandler(mockResponse)) + + const result = await Pull.fetchPieces(baseOptions()) + + assert.strictEqual(result.status, 'pending') + assert.strictEqual(result.pieces.length, 1) + assert.strictEqual(result.pieces[0].pieceCid, TEST_PIECE_CID) + assert.strictEqual(result.pieces[0].status, 'pending') + }) + + it('should send correct request body', async () => { + let capturedRequest: Mocks.pull.PullRequestCapture | undefined + const mockResponse = Mocks.pull.createPullResponse('pending', [{ pieceCid: TEST_PIECE_CID }]) + + server.use( + Mocks.pull.fetchPiecesWithCaptureHandler(mockResponse, (req) => { + capturedRequest = req + }) + ) + + await Pull.fetchPieces(baseOptions()) + + assert.ok(capturedRequest, 'Request should have been captured') + assert.strictEqual(capturedRequest.recordKeeper, TEST_RECORD_KEEPER) + assert.strictEqual(capturedRequest.extraData, TEST_EXTRA_DATA) + assert.strictEqual(capturedRequest.pieces.length, 1) + assert.strictEqual(capturedRequest.pieces[0].pieceCid, TEST_PIECE_CID) + assert.strictEqual(capturedRequest.pieces[0].sourceUrl, TEST_SOURCE_URL) + }) + + it('should include dataSetId when provided', async () => { + let capturedRequest: Mocks.pull.PullRequestCapture | undefined + const mockResponse = Mocks.pull.createPullResponse('pending', [{ pieceCid: TEST_PIECE_CID }]) + + server.use( + Mocks.pull.fetchPiecesWithCaptureHandler(mockResponse, (req) => { + capturedRequest = req + }) + ) + + await Pull.fetchPieces({ ...baseOptions(), dataSetId: 123n }) + + assert.ok(capturedRequest, 'Request should have been captured') + assert.strictEqual(capturedRequest.dataSetId, 123) + }) + + it('should not include dataSetId when zero', async () => { + let capturedRequest: Mocks.pull.PullRequestCapture | undefined + const mockResponse = Mocks.pull.createPullResponse('pending', [{ pieceCid: TEST_PIECE_CID }]) + + server.use( + Mocks.pull.fetchPiecesWithCaptureHandler(mockResponse, (req) => { + capturedRequest = req + }) + ) + + await Pull.fetchPieces({ ...baseOptions(), dataSetId: 0n }) + + assert.ok(capturedRequest, 'Request should have been captured') + assert.strictEqual(capturedRequest.dataSetId, undefined) + }) + + it('should handle server errors', async () => { + server.use(Mocks.pull.fetchPiecesErrorHandler('extraData validation failed: invalid signature', 400)) + + try { + await Pull.fetchPieces(baseOptions()) + assert.fail('Should have thrown error') + } catch (error) { + assert.ok(error instanceof PullError, 'Error should be PullError') + assert.ok( + (error as PullError).message.includes('Failed to pull pieces'), + 'Error message should mention pull failure' + ) + } + }) + + it('should handle network errors', async () => { + server.use( + http.post('http://pdp.local/pdp/piece/pull', () => { + return HttpResponse.error() + }) + ) + + try { + await Pull.fetchPieces(baseOptions()) + assert.fail('Should have thrown error') + } catch (error) { + assert.ok((error as Error).message.includes('Failed to fetch'), 'Error message should mention fetch failure') + } + }) + + it('should handle mixed piece statuses', async () => { + const pieceCid2 = 'bafkzcibdy4hapci46px57mg3znrwydsv7x7rxisg7l7ti245wxwwfmiftgmdmbqk' + const mockResponse: Pull.PullResponse = { + status: 'inProgress', + pieces: [ + { pieceCid: TEST_PIECE_CID, status: 'complete' }, + { pieceCid: pieceCid2, status: 'inProgress' }, + ], + } + + server.use(Mocks.pull.fetchPiecesHandler(mockResponse)) + + const result = await Pull.fetchPieces({ + ...baseOptions(), + pieces: [ + { pieceCid: TEST_PIECE_CID, sourceUrl: TEST_SOURCE_URL }, + { pieceCid: pieceCid2, sourceUrl: `https://other-sp.example.com/piece/${pieceCid2}` }, + ], + }) + + assert.strictEqual(result.status, 'inProgress') + assert.strictEqual(result.pieces[0].status, 'complete') + assert.strictEqual(result.pieces[1].status, 'inProgress') + }) + }) + + describe('waitForFetchStatus', () => { + it('should poll until complete', async () => { + const mockResponse = Mocks.pull.createPullResponse('complete', [{ pieceCid: TEST_PIECE_CID }]) + + server.use(Mocks.pull.fetchPiecesPollingHandler(2, mockResponse)) + + const statusUpdates: Pull.PullStatus[] = [] + const result = await Pull.waitForFetchStatus({ + ...baseOptions(), + minTimeout: 10, + onStatus: (response) => statusUpdates.push(response.status), + }) + + assert.strictEqual(result.status, 'complete') + assert.ok(statusUpdates.length >= 2, 'Should have at least 2 status updates (pending + complete)') + }) + + it('should stop polling on failed status', async () => { + const mockResponse = Mocks.pull.createPullResponse('failed', [{ pieceCid: TEST_PIECE_CID }]) + + server.use(Mocks.pull.fetchPiecesPollingHandler(1, mockResponse)) + + const result = await Pull.waitForFetchStatus({ ...baseOptions(), minTimeout: 10 }) + + assert.strictEqual(result.status, 'failed') + }) + + it('should call onStatus callback for each poll', async () => { + server.use( + Mocks.pull.fetchPiecesProgressionHandler(['pending', 'inProgress', 'complete'], [{ pieceCid: TEST_PIECE_CID }]) + ) + + const statusUpdates: Pull.PullStatus[] = [] + await Pull.waitForFetchStatus({ + ...baseOptions(), + minTimeout: 10, + onStatus: (response) => statusUpdates.push(response.status), + }) + + // Check that all expected statuses were received + assert.ok(statusUpdates.includes('pending'), 'Should include pending status') + assert.ok(statusUpdates.includes('inProgress'), 'Should include inProgress status') + assert.ok(statusUpdates.includes('complete'), 'Should include complete status') + }) + + it('should handle server errors during polling', async () => { + server.use(Mocks.pull.fetchPiecesErrorHandler('Internal server error', 500)) + + try { + await Pull.waitForFetchStatus({ ...baseOptions(), minTimeout: 10 }) + assert.fail('Should have thrown error') + } catch (error) { + assert.ok(error instanceof PullError, 'Error should be PullError') + } + }) + }) + + describe('PullError', () => { + it('should have correct error name', () => { + const error = new PullError('test error') + assert.strictEqual(error.name, 'PullError') + }) + + it('should have static is() type guard', () => { + const error = new PullError('test error') + assert.strictEqual(PullError.is(error), true) + assert.strictEqual(PullError.is(new Error('not pull error')), false) + }) + }) +}) diff --git a/packages/synapse-sdk/src/test/rand.test.ts b/packages/synapse-core/test/rand.test.ts similarity index 99% rename from packages/synapse-sdk/src/test/rand.test.ts rename to packages/synapse-core/test/rand.test.ts index 09d4f168..7f45de0e 100644 --- a/packages/synapse-sdk/src/test/rand.test.ts +++ b/packages/synapse-core/test/rand.test.ts @@ -1,5 +1,3 @@ -/* globals describe it */ - import { fallbackRandIndex, fallbackRandU256, randIndex, randU256 } from '@filoz/synapse-core/utils' import { assert } from 'chai' diff --git a/packages/synapse-core/test/sp.test.ts b/packages/synapse-core/test/sp.test.ts new file mode 100644 index 00000000..78cd8732 --- /dev/null +++ b/packages/synapse-core/test/sp.test.ts @@ -0,0 +1,1625 @@ +import { assert } from 'chai' +import { setup } from 'iso-web/msw' +import { delay, HttpResponse, http } from 'msw' +import { createWalletClient, decodeAbiParameters, http as viemHttp } from 'viem' +import { privateKeyToAccount } from 'viem/accounts' +import type { Chain } from '../src/chains.ts' +import * as Chains from '../src/chains.ts' +import { + AddPiecesError, + CreateDataSetError, + DeletePieceError, + DownloadPieceError, + FindPieceError, + GetDataSetError, + InvalidUploadSizeError, + LocationHeaderError, + PostPieceError, + UploadPieceError, + WaitDataSetCreationStatusError, + WaitForAddPiecesStatusError, +} from '../src/errors/pdp.ts' +import { ADDRESSES, PRIVATE_KEYS } from '../src/mocks/index.ts' +import { + createAndAddPiecesHandler, + finalizePieceUploadHandler, + findPieceHandler, + postPieceHandler, + postPieceUploadsHandler, + uploadPieceHandler, + uploadPieceStreamingHandler, +} from '../src/mocks/pdp.ts' +import * as Piece from '../src/piece.ts' +import * as SP from '../src/sp.ts' +import * as TypedData from '../src/typed-data/index.ts' +import { SIZE_CONSTANTS } from '../src/utils/constants.ts' + +const chain: Chain = { + ...Chains.calibration, + id: 31337, +} + +const account = privateKeyToAccount(PRIVATE_KEYS.key1) +const client = createWalletClient({ + account, + chain, + transport: viemHttp(), +}) + +describe('SP', () => { + const server = setup() + + before(async () => { + await server.start() + }) + + after(() => { + server.stop() + }) + + beforeEach(() => { + server.resetHandlers() + }) + + describe('createDataSet', () => { + it('should handle dataset creation', async () => { + const mockTxHash = '0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef' + + server.use( + http.post('http://pdp.local/pdp/data-sets', async ({ request }) => { + const body = await request.json() + assert.strictEqual(body.extraData, extraData) + assert.strictEqual(body.recordKeeper, ADDRESSES.calibration.warmStorage) + + const decoded = decodeAbiParameters(TypedData.signCreateDataSetAbiParameters, body.extraData) + assert.strictEqual(decoded[0], client.account.address) + assert.strictEqual(decoded[1], 0n) + assert.deepStrictEqual(decoded[2], []) + assert.deepStrictEqual(decoded[3], []) + return new HttpResponse(null, { + status: 201, + headers: { Location: `/pdp/data-sets/created/${mockTxHash}` }, + }) + }) + ) + const extraData = await TypedData.signCreateDataSet(client, { + clientDataSetId: 0n, + payee: ADDRESSES.client1, + }) + const result = await SP.createDataSet({ + endpoint: 'http://pdp.local', + recordKeeper: ADDRESSES.calibration.warmStorage, + extraData, + }) + assert.strictEqual(result.txHash, mockTxHash) + assert.include(result.statusUrl, mockTxHash) + }) + + it('should handle dataset creation with metadata', async () => { + const mockTxHash = '0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef' + + server.use( + http.post('http://pdp.local/pdp/data-sets', async ({ request }) => { + const body = await request.json() + assert.strictEqual(body.extraData, extraData) + assert.strictEqual(body.recordKeeper, ADDRESSES.calibration.warmStorage) + + const decoded = decodeAbiParameters(TypedData.signCreateDataSetAbiParameters, body.extraData) + assert.strictEqual(decoded[0], client.account.address) + assert.strictEqual(decoded[1], 0n) + assert.deepStrictEqual(decoded[2], ['name']) + assert.deepStrictEqual(decoded[3], ['test']) + return new HttpResponse(null, { + status: 201, + headers: { Location: `/pdp/data-sets/created/${mockTxHash}` }, + }) + }) + ) + const extraData = await TypedData.signCreateDataSet(client, { + clientDataSetId: 0n, + payee: ADDRESSES.client1, + metadata: [{ key: 'name', value: 'test' }], + }) + const result = await SP.createDataSet({ + endpoint: 'http://pdp.local', + recordKeeper: ADDRESSES.calibration.warmStorage, + extraData, + }) + assert.strictEqual(result.txHash, mockTxHash) + assert.include(result.statusUrl, mockTxHash) + }) + + it('should fail with bad location header', async () => { + server.use( + http.post('http://pdp.local/pdp/data-sets', () => { + return new HttpResponse(null, { + status: 201, + headers: { Location: `/pdp/data-sets/created/invalid-hash` }, + }) + }) + ) + const extraData = await TypedData.signCreateDataSet(client, { + clientDataSetId: 0n, + payee: ADDRESSES.client1, + }) + try { + await SP.createDataSet({ + endpoint: 'http://pdp.local', + recordKeeper: ADDRESSES.calibration.warmStorage, + extraData, + }) + assert.fail('Should have thrown error for bad location header') + } catch (e) { + const error = e as SP.createDataSet.ErrorType + assert.instanceOf(error, LocationHeaderError) + assert.equal(error.message, 'Location header format is invalid: /pdp/data-sets/created/invalid-hash') + } + }) + + it('should fail with no location header', async () => { + server.use( + http.post('http://pdp.local/pdp/data-sets', () => { + return new HttpResponse(null, { + status: 201, + headers: {}, + }) + }) + ) + const extraData = await TypedData.signCreateDataSet(client, { + clientDataSetId: 0n, + payee: ADDRESSES.client1, + }) + try { + await SP.createDataSet({ + endpoint: 'http://pdp.local', + recordKeeper: ADDRESSES.calibration.warmStorage, + extraData, + }) + assert.fail('Should have thrown error for no Location header') + } catch (e) { + const error = e as SP.createDataSet.ErrorType + assert.instanceOf(error, LocationHeaderError) + assert.equal(error.message, 'Location header format is invalid: ') + } + }) + + it('should fail with CreateDataSetError - string error', async () => { + server.use( + http.post('http://pdp.local/pdp/data-sets', () => { + return HttpResponse.text( + `Failed to send transaction: failed to estimate gas: message execution failed (exit=[33], revert reason=[message failed with backtrace: +00: f0169791 (method 3844450837) -- contract reverted at 75 (33) +01: f0169791 (method 6) -- contract reverted at 4535 (33) +02: f0169800 (method 3844450837) -- contract reverted at 75 (33) +03: f0169800 (method 6) -- contract reverted at 10988 (33) +04: f0169792 (method 3844450837) -- contract reverted at 1775 (33) + (RetCode=33)], vm error=[Error(invariant failure: insufficient funds to cover lockup after function execution)]) +`, + { + status: 500, + } + ) + }) + ) + try { + await SP.createDataSet({ + endpoint: 'http://pdp.local', + recordKeeper: ADDRESSES.calibration.warmStorage, + extraData: await TypedData.signCreateDataSet(client, { + clientDataSetId: 0n, + payee: ADDRESSES.client1, + }), + }) + assert.fail('Should have thrown error for CreateDataSetError error') + } catch (e) { + const error = e as SP.createDataSet.ErrorType + assert.instanceOf(error, CreateDataSetError) + assert.equal(error.shortMessage, 'Failed to create data set.') + assert.equal( + error.message, + `Failed to create data set. + +Details: +invariant failure: insufficient funds to cover lockup after function execution` + ) + } + }) + + it('should fail with CreateDataSetError - typed error', async () => { + server.use( + http.post('http://pdp.local/pdp/data-sets', () => { + return HttpResponse.text( + `Failed to send transaction: failed to estimate gas: message execution failed (exit=[33], revert reason=[message failed with backtrace: +00: f0169791 (method 3844450837) -- contract reverted at 75 (33) +01: f0169791 (method 6) -- contract reverted at 4535 (33) +02: f0169800 (method 3844450837) -- contract reverted at 75 (33) +03: f0169800 (method 6) -- contract reverted at 18957 (33) + (RetCode=33)], vm error=[0x42d750dc0000000000000000000000007e4abd63a7c8314cc28d388303472353d884f292000000000000000000000000b0ff6622d99a325151642386f65ab33a08c30213]) +`, + { + status: 500, + } + ) + }) + ) + try { + await SP.createDataSet({ + endpoint: 'http://pdp.local', + recordKeeper: ADDRESSES.calibration.warmStorage, + extraData: await TypedData.signCreateDataSet(client, { + clientDataSetId: 0n, + payee: ADDRESSES.client1, + }), + }) + assert.fail('Should have thrown error for CreateDataSetError error') + } catch (error) { + assert.instanceOf(error, CreateDataSetError) + assert.equal(error.shortMessage, 'Failed to create data set.') + assert.equal( + error.message, + `Failed to create data set. + +Details: Warm Storage +InvalidSignature(address expected, address actual) + (0x7e4ABd63A7C8314Cc28D388303472353D884f292, 0xb0fF6622D99A325151642386F65AB33a08c30213)` + ) + } + }) + + it('should fail with CreateDataSetError - reversed typed error', async () => { + server.use( + http.post('http://pdp.local/pdp/data-sets', () => { + return HttpResponse.text( + `Failed to send transaction: failed to estimate gas: message execution failed (exit=[33], vm error=[message failed with backtrace: +00: f0169791 (method 3844450837) -- contract reverted at 75 (33) +01: f0169791 (method 6) -- contract reverted at 4535 (33) +02: f0169800 (method 3844450837) -- contract reverted at 75 (33) +03: f0169800 (method 6) -- contract reverted at 18957 (33) +(RetCode=33)], revert reason=[0x42d750dc0000000000000000000000007e4abd63a7c8314cc28d388303472353d884f292000000000000000000000000b0ff6622d99a325151642386f65ab33a08c30213]) +`, + { + status: 500, + } + ) + }) + ) + try { + await SP.createDataSet({ + endpoint: 'http://pdp.local', + recordKeeper: ADDRESSES.calibration.warmStorage, + extraData: await TypedData.signCreateDataSet(client, { + clientDataSetId: 0n, + payee: ADDRESSES.client1, + }), + }) + assert.fail('Should have thrown error for CreateDataSetError error') + } catch (error) { + assert.instanceOf(error, CreateDataSetError) + assert.equal(error.shortMessage, 'Failed to create data set.') + assert.equal( + error.message, + `Failed to create data set. + +Details: Warm Storage +InvalidSignature(address expected, address actual) + (0x7e4ABd63A7C8314Cc28D388303472353D884f292, 0xb0fF6622D99A325151642386F65AB33a08c30213)` + ) + } + }) + }) + + describe('waitForDataSetCreationStatus', () => { + it('should handle successful status check', async () => { + const mockTxHash = '0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef' + const mockResponse: SP.DataSetCreateSuccess = { + createMessageHash: mockTxHash, + dataSetCreated: true, + service: 'test-service', + txStatus: 'confirmed', + ok: true, + dataSetId: 123, + } + + server.use( + http.get('http://pdp.local/pdp/data-sets/created/:tx', ({ params }) => { + assert.strictEqual(params.tx, mockTxHash) + return HttpResponse.json(mockResponse, { + status: 200, + }) + }) + ) + + const result = await SP.waitForDataSetCreationStatus({ + statusUrl: `http://pdp.local/pdp/data-sets/created/${mockTxHash}`, + }) + assert.deepStrictEqual(result, mockResponse) + }) + + it('should handle pending then confirmed status', async function () { + this.timeout(10000) + + const mockTxHash = '0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef' + let callCount = 0 + + const pendingResponse: SP.DataSetCreatedResponse = { + createMessageHash: mockTxHash, + dataSetCreated: false, + service: 'test-service', + txStatus: 'pending', + ok: false, + } + + const confirmedResponse: SP.DataSetCreateSuccess = { + createMessageHash: mockTxHash, + dataSetCreated: true, + service: 'test-service', + txStatus: 'confirmed', + ok: true, + dataSetId: 123, + } + + server.use( + http.get('http://pdp.local/pdp/data-sets/created/:tx', () => { + callCount++ + if (callCount === 1) { + return HttpResponse.json(pendingResponse, { status: 200 }) + } + return HttpResponse.json(confirmedResponse, { status: 200 }) + }) + ) + + const result = await SP.waitForDataSetCreationStatus({ + statusUrl: `http://pdp.local/pdp/data-sets/created/${mockTxHash}`, + }) + assert.strictEqual(result.dataSetCreated, true) + assert.strictEqual(result.dataSetId, 123) + assert.isTrue(callCount >= 2, 'Should have polled at least twice') + }) + + it('should handle server errors', async () => { + const mockTxHash = '0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef' + + server.use( + http.get('http://pdp.local/pdp/data-sets/created/:tx', () => { + return HttpResponse.text('Database error', { + status: 500, + }) + }) + ) + + try { + await SP.waitForDataSetCreationStatus({ + statusUrl: `http://pdp.local/pdp/data-sets/created/${mockTxHash}`, + }) + assert.fail('Should have thrown error for server error') + } catch (error) { + assert.instanceOf(error, WaitDataSetCreationStatusError) + assert.include(error.message, 'Failed to wait for data set creation status') + } + }) + + it('should handle timeout', async () => { + const mockTxHash = '0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef' + const mockResponse: SP.DataSetCreateSuccess = { + createMessageHash: mockTxHash, + dataSetCreated: true, + service: 'test-service', + txStatus: 'confirmed', + ok: true, + dataSetId: 123, + } + + server.use( + http.get('http://pdp.local/pdp/data-sets/created/:tx', async () => { + await delay(150) + return HttpResponse.json(mockResponse, { + status: 200, + }) + }) + ) + + SP.setTimeout(100) + + try { + await SP.waitForDataSetCreationStatus({ + statusUrl: `http://pdp.local/pdp/data-sets/created/${mockTxHash}`, + }) + assert.fail('Should have thrown timeout error') + } catch (error) { + assert.instanceOf(error, SP.TimeoutError) + assert.include(error.message, 'Request timed out after 100ms') + } finally { + SP.resetTimeout() + } + }) + }) + + describe('createDataSetAndAddPieces', () => { + it('should handle successful data set creation', async () => { + const mockTxHash = '0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef' + const pieceCid = 'bafkzcibcd4bdomn3tgwgrh3g532zopskstnbrd2n3sxfqbze7rxt7vqn7veigmy' + server.use(createAndAddPiecesHandler(mockTxHash)) + + const result = await SP.createDataSetAndAddPieces({ + endpoint: 'http://pdp.local', + recordKeeper: ADDRESSES.calibration.warmStorage, + pieces: [Piece.parse(pieceCid)], + extraData: await TypedData.signCreateDataSetAndAddPieces(client, { + clientDataSetId: 0n, + payee: ADDRESSES.client1, + pieces: [{ pieceCid: Piece.parse(pieceCid) }], + }), + }) + assert.strictEqual(result.txHash, mockTxHash) + assert.include(result.statusUrl, mockTxHash) + }) + }) + + describe('addPieces', () => { + const validPieceCid = 'bafkzcibcd4bdomn3tgwgrh3g532zopskstnbrd2n3sxfqbze7rxt7vqn7veigmy' + + it('should handle successful piece addition', async () => { + const mockTxHash = '0xabcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890' + const pieceCid = Piece.parse(validPieceCid) + + server.use( + http.post<{ id: string }, SP.addPieces.RequestBody>( + 'http://pdp.local/pdp/data-sets/:id/pieces', + async ({ request, params }) => { + const body = await request.json() + assert.isDefined(body.pieces) + assert.isDefined(body.extraData) + const decoded = decodeAbiParameters(TypedData.signAddPiecesAbiParameters, body.extraData) + assert.strictEqual(decoded[0], 1n) + assert.deepStrictEqual(decoded[1], [[]]) + assert.deepStrictEqual(decoded[2], [[]]) + assert.strictEqual(body.pieces.length, 1) + assert.strictEqual(body.pieces[0].pieceCid, validPieceCid) + assert.strictEqual(body.pieces[0].subPieces.length, 1) + assert.strictEqual(body.pieces[0].subPieces[0].subPieceCid, validPieceCid) + return new HttpResponse(null, { + status: 201, + headers: { + Location: `/pdp/data-sets/${params.id}/pieces/added/${mockTxHash}`, + }, + }) + } + ) + ) + + const extraData = await TypedData.signAddPieces(client, { + nonce: 1n, + clientDataSetId: 0n, + pieces: [{ pieceCid }], + }) + + const result = await SP.addPieces({ + endpoint: 'http://pdp.local', + dataSetId: 1n, + pieces: [pieceCid], + extraData, + }) + + assert.strictEqual(result.txHash, mockTxHash) + assert.include(result.statusUrl, mockTxHash) + assert.include(result.statusUrl, '/pdp/data-sets/1/pieces/added/') + }) + + it('should handle server errors appropriately', async () => { + const pieceCid = Piece.parse(validPieceCid) + + server.use( + http.post('http://pdp.local/pdp/data-sets/:id/pieces', () => { + return HttpResponse.text('Invalid piece CID', { + status: 400, + statusText: 'Bad Request', + }) + }) + ) + + const extraData = await TypedData.signAddPieces(client, { + clientDataSetId: 0n, + pieces: [{ pieceCid }], + }) + + try { + await SP.addPieces({ + endpoint: 'http://pdp.local', + dataSetId: 1n, + pieces: [pieceCid], + extraData, + }) + assert.fail('Should have thrown error for server error') + } catch (error) { + assert.instanceOf(error, AddPiecesError) + assert.equal(error.shortMessage, 'Failed to add pieces.') + assert.include(error.message, 'Invalid piece CID') + } + }) + + it('should handle multiple pieces', async () => { + const mockTxHash = '0xabcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890' + const pieceCid1 = Piece.parse(validPieceCid) + const pieceCid2 = Piece.parse(validPieceCid) + + server.use( + http.post<{ id: string }, SP.addPieces.RequestBody>( + 'http://pdp.local/pdp/data-sets/:id/pieces', + async ({ request, params }) => { + const body = await request.json() + assert.strictEqual(body.pieces.length, 2) + assert.strictEqual(body.pieces[0].subPieces.length, 1) + assert.strictEqual(body.pieces[1].subPieces.length, 1) + assert.strictEqual(body.pieces[0].pieceCid, body.pieces[0].subPieces[0].subPieceCid) + assert.strictEqual(body.pieces[1].pieceCid, body.pieces[1].subPieces[0].subPieceCid) + + return new HttpResponse(null, { + status: 201, + headers: { + Location: `/pdp/data-sets/${params.id}/pieces/added/${mockTxHash}`, + }, + }) + } + ) + ) + + const extraData = await TypedData.signAddPieces(client, { + clientDataSetId: 0n, + pieces: [{ pieceCid: pieceCid1 }, { pieceCid: pieceCid2 }], + }) + + const result = await SP.addPieces({ + endpoint: 'http://pdp.local', + dataSetId: 1n, + pieces: [pieceCid1, pieceCid2], + extraData, + }) + + assert.strictEqual(result.txHash, mockTxHash) + assert.include(result.statusUrl, mockTxHash) + }) + + it('should fail with bad location header', async () => { + const pieceCid = Piece.parse(validPieceCid) + + server.use( + http.post('http://pdp.local/pdp/data-sets/:id/pieces', () => { + return new HttpResponse(null, { + status: 201, + headers: { Location: `/pdp/data-sets/1/pieces/added/invalid-hash` }, + }) + }) + ) + + const extraData = await TypedData.signAddPieces(client, { + clientDataSetId: 0n, + pieces: [{ pieceCid }], + }) + + try { + await SP.addPieces({ + endpoint: 'http://pdp.local', + dataSetId: 1n, + pieces: [pieceCid], + extraData, + }) + assert.fail('Should have thrown error for bad location header') + } catch (error) { + assert.instanceOf(error, LocationHeaderError) + assert.include(error.message, 'Location header format is invalid') + } + }) + + it('should fail with no location header', async () => { + const pieceCid = Piece.parse(validPieceCid) + + server.use( + http.post('http://pdp.local/pdp/data-sets/:id/pieces', () => { + return new HttpResponse(null, { + status: 201, + headers: {}, + }) + }) + ) + + const extraData = await TypedData.signAddPieces(client, { + clientDataSetId: 0n, + pieces: [{ pieceCid }], + }) + + try { + await SP.addPieces({ + endpoint: 'http://pdp.local', + dataSetId: 1n, + pieces: [pieceCid], + extraData, + }) + assert.fail('Should have thrown error for no location header') + } catch (error) { + assert.instanceOf(error, LocationHeaderError) + assert.include(error.message, 'Location header format is invalid: ') + } + }) + }) + + describe('waitForAddPiecesStatus', () => { + it('should handle successful status check', async () => { + const mockTxHash = '0x7890abcdef1234567890abcdef1234567890abcdef1234567890abcdef123456' + const mockResponse: SP.AddPiecesSuccess = { + txHash: mockTxHash, + txStatus: 'confirmed', + dataSetId: 1, + pieceCount: 2, + addMessageOk: true, + confirmedPieceIds: [101, 102], + piecesAdded: true, + } + + server.use( + http.get('http://pdp.local/pdp/data-sets/:id/pieces/added/:txHash', ({ params }) => { + assert.strictEqual(params.id, '1') + assert.strictEqual(params.txHash, mockTxHash) + + return HttpResponse.json(mockResponse, { + status: 200, + }) + }) + ) + + const result = await SP.waitForAddPiecesStatus({ + statusUrl: `http://pdp.local/pdp/data-sets/1/pieces/added/${mockTxHash}`, + }) + assert.deepStrictEqual(result, mockResponse) + }) + + it('should handle pending then confirmed status', async function () { + this.timeout(10000) // Increase timeout for polling test + + const mockTxHash = '0x7890abcdef1234567890abcdef1234567890abcdef1234567890abcdef123456' + let callCount = 0 + + const pendingResponse: SP.AddPiecesResponse = { + txHash: mockTxHash, + txStatus: 'pending', + dataSetId: 1, + pieceCount: 2, + addMessageOk: null, + piecesAdded: false, + } + + const confirmedResponse: SP.AddPiecesSuccess = { + txHash: mockTxHash, + txStatus: 'confirmed', + dataSetId: 1, + pieceCount: 2, + addMessageOk: true, + confirmedPieceIds: [101, 102], + piecesAdded: true, + } + + server.use( + http.get('http://pdp.local/pdp/data-sets/:id/pieces/added/:txHash', () => { + callCount++ + // First call returns pending, subsequent calls return confirmed + if (callCount === 1) { + return HttpResponse.json(pendingResponse, { status: 200 }) + } + return HttpResponse.json(confirmedResponse, { status: 200 }) + }) + ) + + const result = await SP.waitForAddPiecesStatus({ + statusUrl: `http://pdp.local/pdp/data-sets/1/pieces/added/${mockTxHash}`, + }) + assert.strictEqual(result.txStatus, 'confirmed') + assert.strictEqual(result.piecesAdded, true) + assert.deepStrictEqual(result.confirmedPieceIds, [101, 102]) + assert.isTrue(callCount >= 2, 'Should have polled at least twice') + }) + + it('should handle server errors', async () => { + const mockTxHash = '0x7890abcdef1234567890abcdef1234567890abcdef1234567890abcdef123456' + server.use( + http.get('http://pdp.local/pdp/data-sets/:id/pieces/added/:txHash', () => { + return HttpResponse.text('Database error', { + status: 500, + }) + }) + ) + + try { + await SP.waitForAddPiecesStatus({ + statusUrl: `http://pdp.local/pdp/data-sets/1/pieces/added/${mockTxHash}`, + }) + assert.fail('Should have thrown error for server error') + } catch (error) { + assert.instanceOf(error, WaitForAddPiecesStatusError) + assert.include(error.message, 'Failed to wait for add pieces status') + } + }) + + it('should handle timeout status check', async () => { + const mockTxHash = '0x7890abcdef1234567890abcdef1234567890abcdef1234567890abcdef123456' + const mockResponse: SP.AddPiecesSuccess = { + txHash: mockTxHash, + txStatus: 'confirmed', + dataSetId: 1, + pieceCount: 2, + addMessageOk: true, + confirmedPieceIds: [101, 102], + piecesAdded: true, + } + + server.use( + http.get('http://pdp.local/pdp/data-sets/:id/pieces/added/:txHash', async ({ params }) => { + assert.strictEqual(params.id, '1') + assert.strictEqual(params.txHash, mockTxHash) + + await delay(150) + return HttpResponse.json(mockResponse, { + status: 200, + }) + }) + ) + + SP.setTimeout(100) + + try { + const result = await SP.waitForAddPiecesStatus({ + statusUrl: `http://pdp.local/pdp/data-sets/1/pieces/added/${mockTxHash}`, + }) + assert.deepStrictEqual(result, mockResponse) + } catch (error) { + assert.instanceOf(error, SP.TimeoutError) + assert.include(error.message, 'Request timed out after 100ms') + } finally { + SP.resetTimeout() + } + }) + }) + + describe('deletePiece', () => { + it('should handle successful delete', async () => { + const mockTxHash = '0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef' + const mockResponse = { + txHash: mockTxHash, + } + + server.use( + http.delete('http://pdp.local/pdp/data-sets/1/pieces/2', async ({ request }) => { + const body = (await request.json()) as { extraData: string } + assert.hasAllKeys(body, ['extraData']) + assert.isDefined(body.extraData) + return HttpResponse.json(mockResponse, { + status: 200, + }) + }) + ) + + const extraData = await TypedData.signSchedulePieceRemovals(client, { + clientDataSetId: 0n, + pieceIds: [2n], + }) + + const result = await SP.deletePiece({ + endpoint: 'http://pdp.local', + dataSetId: 1n, + pieceId: 2n, + extraData, + }) + + assert.strictEqual(result.txHash, mockTxHash) + }) + + it('should handle server errors', async () => { + server.use( + http.delete('http://pdp.local/pdp/data-sets/1/pieces/2', async () => { + return HttpResponse.text('Database error', { + status: 500, + }) + }) + ) + + const extraData = await TypedData.signSchedulePieceRemovals(client, { + clientDataSetId: 0n, + pieceIds: [2n], + }) + + try { + await SP.deletePiece({ + endpoint: 'http://pdp.local', + dataSetId: 1n, + pieceId: 2n, + extraData, + }) + assert.fail('Should have thrown error for server error') + } catch (error) { + assert.instanceOf(error, DeletePieceError) + assert.equal(error.shortMessage, 'Failed to delete piece.') + assert.include(error.message, 'Database error') + } + }) + }) + + describe('findPiece', () => { + const mockPieceCidStr = 'bafkzcibcd4bdomn3tgwgrh3g532zopskstnbrd2n3sxfqbze7rxt7vqn7veigmy' + + it('should find a piece successfully', async () => { + const pieceCid = Piece.parse(mockPieceCidStr) + + server.use(findPieceHandler(mockPieceCidStr, true)) + + const result = await SP.findPiece({ + endpoint: 'http://pdp.local', + pieceCid, + }) + assert.strictEqual(result.toString(), mockPieceCidStr) + }) + + it('should handle piece not found (timeout)', async () => { + SP.setTimeout(100) + const pieceCid = Piece.parse(mockPieceCidStr) + + server.use(findPieceHandler(mockPieceCidStr, false)) + + try { + await SP.findPiece({ + endpoint: 'http://pdp.local', + pieceCid, + }) + assert.fail('Should have thrown error for not found') + } catch (error) { + assert.instanceOf(error, FindPieceError) + assert.equal(error.shortMessage, 'Failed to find piece.') + assert.include(error.message, 'Timeout waiting for piece to be found') + } finally { + SP.resetTimeout() + } + }) + + it('should handle server errors', async () => { + const pieceCid = Piece.parse(mockPieceCidStr) + + server.use( + http.get('http://pdp.local/pdp/piece', () => { + return HttpResponse.text('Database error', { + status: 500, + }) + }) + ) + + try { + await SP.findPiece({ + endpoint: 'http://pdp.local', + pieceCid, + }) + assert.fail('Should have thrown error for server error') + } catch (error) { + assert.instanceOf(error, FindPieceError) + assert.equal(error.shortMessage, 'Failed to find piece.') + assert.include(error.message, 'Database error') + } + }) + + it('should retry on 202 status and eventually succeed', async function () { + this.timeout(10000) + const pieceCid = Piece.parse(mockPieceCidStr) + let attemptCount = 0 + + server.use( + http.get('http://pdp.local/pdp/piece', () => { + attemptCount++ + // Return 202 for first 2 attempts, then 200 + if (attemptCount < 3) { + return HttpResponse.json({ message: 'Processing' }, { status: 202 }) + } + return HttpResponse.json({ pieceCid: mockPieceCidStr }, { status: 200 }) + }) + ) + + const result = await SP.findPiece({ + endpoint: 'http://pdp.local', + pieceCid, + }) + assert.strictEqual(result.toString(), mockPieceCidStr) + assert.isAtLeast(attemptCount, 3, 'Should have retried at least 3 times') + }) + }) + + describe('uploadPiece', () => { + const mockPieceCidStr = 'bafkzcibcd4bdomn3tgwgrh3g532zopskstnbrd2n3sxfqbze7rxt7vqn7veigmy' + const mockUuid = '12345678-1234-1234-1234-123456789012' + + // Create valid test data (minimum 127 bytes) + function createTestData(size: number): Uint8Array { + return new Uint8Array(size).fill(0x42) + } + + it('should upload a piece successfully', async () => { + const pieceCid = Piece.parse(mockPieceCidStr) + const testData = createTestData(SIZE_CONSTANTS.MIN_UPLOAD_SIZE) + + server.use(postPieceHandler(mockPieceCidStr, mockUuid), uploadPieceHandler(mockUuid)) + + // Should not throw + await SP.uploadPiece({ + endpoint: 'http://pdp.local', + data: testData, + pieceCid, + }) + }) + + it('should handle piece already exists', async () => { + const pieceCid = Piece.parse(mockPieceCidStr) + const testData = createTestData(SIZE_CONSTANTS.MIN_UPLOAD_SIZE) + + // postPieceHandler without uuid returns 200 (piece exists) + server.use(postPieceHandler(mockPieceCidStr)) + + // Should not throw - early return when piece exists + await SP.uploadPiece({ + endpoint: 'http://pdp.local', + data: testData, + pieceCid, + }) + }) + + it('should fail with size too small', async () => { + const pieceCid = Piece.parse(mockPieceCidStr) + const testData = createTestData(SIZE_CONSTANTS.MIN_UPLOAD_SIZE - 1) + + try { + await SP.uploadPiece({ + endpoint: 'http://pdp.local', + data: testData, + pieceCid, + }) + assert.fail('Should have thrown error for size too small') + } catch (error) { + assert.instanceOf(error, InvalidUploadSizeError) + assert.include(error.message, 'Invalid upload size') + } + }) + + it('should fail with size too large', async () => { + const pieceCid = Piece.parse(mockPieceCidStr) + // Create a typed array descriptor without actually allocating the memory + const testData = { length: SIZE_CONSTANTS.MAX_UPLOAD_SIZE + 1 } as Uint8Array + + try { + await SP.uploadPiece({ + endpoint: 'http://pdp.local', + data: testData, + pieceCid, + }) + assert.fail('Should have thrown error for size too large') + } catch (error) { + assert.instanceOf(error, InvalidUploadSizeError) + assert.include(error.message, 'Invalid upload size') + } + }) + + it('should fail with invalid Location header', async () => { + const pieceCid = Piece.parse(mockPieceCidStr) + const testData = createTestData(SIZE_CONSTANTS.MIN_UPLOAD_SIZE) + + server.use( + http.post('http://pdp.local/pdp/piece', () => { + return new HttpResponse(null, { + status: 201, + headers: {}, + }) + }) + ) + + try { + await SP.uploadPiece({ + endpoint: 'http://pdp.local', + data: testData, + pieceCid, + }) + assert.fail('Should have thrown error for missing Location header') + } catch (error) { + assert.instanceOf(error, LocationHeaderError) + assert.include(error.message, 'Location header format is invalid') + } + }) + + it('should handle POST errors', async () => { + const pieceCid = Piece.parse(mockPieceCidStr) + const testData = createTestData(SIZE_CONSTANTS.MIN_UPLOAD_SIZE) + + server.use( + http.post('http://pdp.local/pdp/piece', () => { + return HttpResponse.text('Server error', { status: 500 }) + }) + ) + + try { + await SP.uploadPiece({ + endpoint: 'http://pdp.local', + data: testData, + pieceCid, + }) + assert.fail('Should have thrown error for POST error') + } catch (error) { + assert.instanceOf(error, PostPieceError) + assert.include(error.message, 'Failed to create upload session') + } + }) + + it('should handle PUT errors', async () => { + const pieceCid = Piece.parse(mockPieceCidStr) + const testData = createTestData(SIZE_CONSTANTS.MIN_UPLOAD_SIZE) + + server.use( + postPieceHandler(mockPieceCidStr, mockUuid), + http.put(`http://pdp.local/pdp/piece/upload/${mockUuid}`, () => { + return HttpResponse.text('Upload failed', { status: 500 }) + }) + ) + + try { + await SP.uploadPiece({ + endpoint: 'http://pdp.local', + data: testData, + pieceCid, + }) + assert.fail('Should have thrown error for PUT error') + } catch (error) { + assert.instanceOf(error, UploadPieceError) + assert.include(error.message, 'Failed to upload piece') + } + }) + }) + + describe('uploadPieceStreaming', () => { + const mockPieceCidStr = 'bafkzcibcd4bdomn3tgwgrh3g532zopskstnbrd2n3sxfqbze7rxt7vqn7veigmy' + const mockUuid = '12345678-1234-1234-1234-123456789012' + + // Create async iterable from data + async function* createAsyncIterable(data: Uint8Array): AsyncIterable { + // Yield in chunks + const chunkSize = 64 + for (let i = 0; i < data.length; i += chunkSize) { + yield data.slice(i, Math.min(i + chunkSize, data.length)) + } + } + + it('should upload a piece successfully with provided PieceCID', async () => { + const pieceCid = Piece.parse(mockPieceCidStr) + const testData = new Uint8Array(SIZE_CONSTANTS.MIN_UPLOAD_SIZE).fill(0x42) + + server.use( + postPieceUploadsHandler(mockUuid), + uploadPieceStreamingHandler(mockUuid), + finalizePieceUploadHandler(mockUuid, mockPieceCidStr) + ) + + const result = await SP.uploadPieceStreaming({ + endpoint: 'http://pdp.local', + data: createAsyncIterable(testData), + pieceCid, + }) + + assert.strictEqual(result.pieceCid.toString(), mockPieceCidStr) + assert.strictEqual(result.size, testData.length) + }) + + it('should track progress during upload', async () => { + const pieceCid = Piece.parse(mockPieceCidStr) + const testData = new Uint8Array(256).fill(0x42) + const progressCalls: number[] = [] + + server.use( + postPieceUploadsHandler(mockUuid), + // Custom handler that consumes the stream + http.put(`http://pdp.local/pdp/piece/uploads/${mockUuid}`, async ({ request }) => { + // Consume the stream to trigger progress callbacks + const body = await request.arrayBuffer() + assert.strictEqual(body.byteLength, testData.length) + return HttpResponse.text('No Content', { status: 204 }) + }), + finalizePieceUploadHandler(mockUuid, mockPieceCidStr) + ) + + const result = await SP.uploadPieceStreaming({ + endpoint: 'http://pdp.local', + data: createAsyncIterable(testData), + pieceCid, + onProgress: (bytes) => progressCalls.push(bytes), + }) + + assert.strictEqual(result.size, testData.length) + assert.isAbove(progressCalls.length, 0, 'Should have received progress callbacks') + // Last progress call should equal total size + assert.strictEqual(progressCalls[progressCalls.length - 1], testData.length) + }) + + it('should fail when session creation returns error', async () => { + const pieceCid = Piece.parse(mockPieceCidStr) + const testData = new Uint8Array(SIZE_CONSTANTS.MIN_UPLOAD_SIZE).fill(0x42) + + server.use( + http.post('http://pdp.local/pdp/piece/uploads', () => { + return HttpResponse.text('Server error', { status: 500 }) + }) + ) + + try { + await SP.uploadPieceStreaming({ + endpoint: 'http://pdp.local', + data: createAsyncIterable(testData), + pieceCid, + }) + assert.fail('Should have thrown error for session creation failure') + } catch (error) { + assert.instanceOf(error, PostPieceError) + assert.include(error.message, 'Failed to create upload session') + } + }) + + it('should fail when session creation returns wrong status', async () => { + const pieceCid = Piece.parse(mockPieceCidStr) + const testData = new Uint8Array(SIZE_CONSTANTS.MIN_UPLOAD_SIZE).fill(0x42) + + server.use( + http.post('http://pdp.local/pdp/piece/uploads', () => { + return HttpResponse.text('OK', { status: 200 }) + }) + ) + + try { + await SP.uploadPieceStreaming({ + endpoint: 'http://pdp.local', + data: createAsyncIterable(testData), + pieceCid, + }) + assert.fail('Should have thrown error for wrong status') + } catch (error) { + assert.instanceOf(error, PostPieceError) + assert.include(error.message, 'Expected 201 Created') + } + }) + + it('should fail with missing Location header', async () => { + const pieceCid = Piece.parse(mockPieceCidStr) + const testData = new Uint8Array(SIZE_CONSTANTS.MIN_UPLOAD_SIZE).fill(0x42) + + server.use( + http.post('http://pdp.local/pdp/piece/uploads', () => { + return new HttpResponse(null, { status: 201 }) + }) + ) + + try { + await SP.uploadPieceStreaming({ + endpoint: 'http://pdp.local', + data: createAsyncIterable(testData), + pieceCid, + }) + assert.fail('Should have thrown error for missing Location header') + } catch (error) { + assert.instanceOf(error, LocationHeaderError) + assert.include(error.message, 'Location header missing') + } + }) + + it('should fail with invalid Location header format', async () => { + const pieceCid = Piece.parse(mockPieceCidStr) + const testData = new Uint8Array(SIZE_CONSTANTS.MIN_UPLOAD_SIZE).fill(0x42) + + server.use( + http.post('http://pdp.local/pdp/piece/uploads', () => { + return new HttpResponse(null, { + status: 201, + headers: { Location: '/invalid/path' }, + }) + }) + ) + + try { + await SP.uploadPieceStreaming({ + endpoint: 'http://pdp.local', + data: createAsyncIterable(testData), + pieceCid, + }) + assert.fail('Should have thrown error for invalid Location header') + } catch (error) { + assert.instanceOf(error, LocationHeaderError) + assert.include(error.message, 'Invalid Location header format') + } + }) + + it('should fail when PUT upload returns error', async () => { + const pieceCid = Piece.parse(mockPieceCidStr) + const testData = new Uint8Array(SIZE_CONSTANTS.MIN_UPLOAD_SIZE).fill(0x42) + + server.use( + postPieceUploadsHandler(mockUuid), + http.put(`http://pdp.local/pdp/piece/uploads/${mockUuid}`, () => { + return HttpResponse.text('Upload failed', { status: 500 }) + }) + ) + + try { + await SP.uploadPieceStreaming({ + endpoint: 'http://pdp.local', + data: createAsyncIterable(testData), + pieceCid, + }) + assert.fail('Should have thrown error for PUT failure') + } catch (error) { + assert.instanceOf(error, UploadPieceError) + assert.include(error.message, 'Failed to upload piece') + } + }) + + it('should fail when PUT upload returns wrong status', async () => { + const pieceCid = Piece.parse(mockPieceCidStr) + const testData = new Uint8Array(SIZE_CONSTANTS.MIN_UPLOAD_SIZE).fill(0x42) + + server.use( + postPieceUploadsHandler(mockUuid), + http.put(`http://pdp.local/pdp/piece/uploads/${mockUuid}`, () => { + return HttpResponse.text('OK', { status: 200 }) + }) + ) + + try { + await SP.uploadPieceStreaming({ + endpoint: 'http://pdp.local', + data: createAsyncIterable(testData), + pieceCid, + }) + assert.fail('Should have thrown error for wrong PUT status') + } catch (error) { + assert.instanceOf(error, UploadPieceError) + assert.include(error.message, 'Expected 204 No Content') + } + }) + + it('should fail when finalize returns error', async () => { + const pieceCid = Piece.parse(mockPieceCidStr) + const testData = new Uint8Array(SIZE_CONSTANTS.MIN_UPLOAD_SIZE).fill(0x42) + + server.use( + postPieceUploadsHandler(mockUuid), + uploadPieceStreamingHandler(mockUuid), + http.post(`http://pdp.local/pdp/piece/uploads/${mockUuid}`, () => { + return HttpResponse.text('Finalize failed', { status: 500 }) + }) + ) + + try { + await SP.uploadPieceStreaming({ + endpoint: 'http://pdp.local', + data: createAsyncIterable(testData), + pieceCid, + }) + assert.fail('Should have thrown error for finalize failure') + } catch (error) { + assert.instanceOf(error, PostPieceError) + assert.include(error.message, 'Failed to finalize upload') + } + }) + + it('should fail when finalize returns wrong status', async () => { + const pieceCid = Piece.parse(mockPieceCidStr) + const testData = new Uint8Array(SIZE_CONSTANTS.MIN_UPLOAD_SIZE).fill(0x42) + + server.use( + postPieceUploadsHandler(mockUuid), + uploadPieceStreamingHandler(mockUuid), + http.post(`http://pdp.local/pdp/piece/uploads/${mockUuid}`, () => { + return HttpResponse.text('Created', { status: 201 }) + }) + ) + + try { + await SP.uploadPieceStreaming({ + endpoint: 'http://pdp.local', + data: createAsyncIterable(testData), + pieceCid, + }) + assert.fail('Should have thrown error for wrong finalize status') + } catch (error) { + assert.instanceOf(error, PostPieceError) + assert.include(error.message, 'Expected 200 OK for finalization') + } + }) + }) + + describe('getDataSet', () => { + it('should successfully fetch data set data', async () => { + const mockDataSetData: SP.getDataSet.ReturnType = { + id: 292, + pieces: [ + { + pieceId: 101, + pieceCid: 'bafkzcibeqcad6efnpwn62p5vvs5x3nh3j7xkzfgb3xtitcdm2hulmty3xx4tl3wace', + subPieceCid: 'bafkzcibeqcad6efnpwn62p5vvs5x3nh3j7xkzfgb3xtitcdm2hulmty3xx4tl3wace', + subPieceOffset: 0, + }, + { + pieceId: 102, + pieceCid: 'bafkzcibcd4bdomn3tgwgrh3g532zopskstnbrd2n3sxfqbze7rxt7vqn7veigmy', + subPieceCid: 'bafkzcibcd4bdomn3tgwgrh3g532zopskstnbrd2n3sxfqbze7rxt7vqn7veigmy', + subPieceOffset: 0, + }, + ], + nextChallengeEpoch: 1500, + } + + server.use( + http.get('http://pdp.local/pdp/data-sets/292', () => { + return HttpResponse.json(mockDataSetData, { + status: 200, + }) + }) + ) + + const result = await SP.getDataSet({ + endpoint: 'http://pdp.local', + dataSetId: 292n, + }) + + assert.equal(result.id, mockDataSetData.id) + assert.equal(result.nextChallengeEpoch, mockDataSetData.nextChallengeEpoch) + assert.equal(result.pieces.length, mockDataSetData.pieces.length) + assert.equal(result.pieces[0].pieceId, mockDataSetData.pieces[0].pieceId) + assert.equal(result.pieces[0].pieceCid, mockDataSetData.pieces[0].pieceCid) + }) + + it('should handle data set not found', async () => { + server.use( + http.get('http://pdp.local/pdp/data-sets/999', () => { + return new HttpResponse(null, { + status: 404, + }) + }) + ) + + try { + await SP.getDataSet({ + endpoint: 'http://pdp.local', + dataSetId: 999n, + }) + assert.fail('Should have thrown error for not found data set') + } catch (error) { + assert.instanceOf(error, GetDataSetError) + assert.equal(error.shortMessage, 'Data set not found.') + } + }) + + it('should handle server errors', async () => { + server.use( + http.get('http://pdp.local/pdp/data-sets/292', () => { + return HttpResponse.text('Database error', { + status: 500, + }) + }) + ) + + try { + await SP.getDataSet({ + endpoint: 'http://pdp.local', + dataSetId: 292n, + }) + assert.fail('Should have thrown error for server error') + } catch (error) { + assert.instanceOf(error, GetDataSetError) + assert.equal(error.shortMessage, 'Failed to get data set.') + assert.include(error.message, 'Database error') + } + }) + + it('should handle data set with no pieces', async () => { + const emptyDataSetData: SP.getDataSet.ReturnType = { + id: 292, + pieces: [], + nextChallengeEpoch: 1500, + } + + server.use( + http.get('http://pdp.local/pdp/data-sets/292', () => { + return HttpResponse.json(emptyDataSetData, { + status: 200, + }) + }) + ) + + const result = await SP.getDataSet({ + endpoint: 'http://pdp.local', + dataSetId: 292n, + }) + + assert.deepStrictEqual(result, emptyDataSetData) + assert.isArray(result.pieces) + assert.equal(result.pieces.length, 0) + }) + }) + + describe('downloadPiece', () => { + it('should successfully download and verify piece', async () => { + const testData = new Uint8Array([1, 2, 3, 4, 5, 6, 7, 8]) + const pieceCid = Piece.calculate(testData) + + server.use( + http.get('http://pdp.local/piece/:pieceCid', () => { + return HttpResponse.arrayBuffer(testData.buffer) + }) + ) + + const result = await SP.downloadPiece({ + endpoint: 'http://pdp.local', + pieceCid, + }) + assert.deepEqual(result, testData) + }) + + it('should throw on download failure (404)', async () => { + const testData = new Uint8Array([1, 2, 3, 4, 5, 6, 7, 8]) + const pieceCid = Piece.calculate(testData) + + server.use( + http.get('http://pdp.local/piece/:pieceCid', () => { + return HttpResponse.text('Not Found', { + status: 404, + }) + }) + ) + + try { + await SP.downloadPiece({ + endpoint: 'http://pdp.local', + pieceCid, + }) + assert.fail('Should have thrown error') + } catch (error) { + assert.instanceOf(error, DownloadPieceError) + assert.include(error.message, 'Failed to download piece') + } + }) + + it('should throw on server error (500)', async () => { + const testData = new Uint8Array([1, 2, 3, 4, 5, 6, 7, 8]) + const pieceCid = Piece.calculate(testData) + + server.use( + http.get('http://pdp.local/piece/:pieceCid', () => { + return HttpResponse.text('Internal Server Error', { + status: 500, + }) + }) + ) + + try { + await SP.downloadPiece({ + endpoint: 'http://pdp.local', + pieceCid, + }) + assert.fail('Should have thrown error') + } catch (error) { + assert.instanceOf(error, DownloadPieceError) + assert.include(error.message, 'Failed to download piece') + } + }) + + it('should throw on PieceCID verification failure', async () => { + const testData = new Uint8Array([1, 2, 3, 4, 5, 6, 7, 8]) + const pieceCid = Piece.calculate(testData) + const wrongData = new Uint8Array([9, 9, 9, 9]) // Different data + + server.use( + http.get('http://pdp.local/piece/:pieceCid', () => { + return HttpResponse.arrayBuffer(wrongData.buffer) + }) + ) + + try { + await SP.downloadPiece({ + endpoint: 'http://pdp.local', + pieceCid, + }) + assert.fail('Should have thrown error') + } catch (error) { + assert.instanceOf(error, DownloadPieceError) + assert.include(error.message, 'PieceCID verification failed') + } + }) + + it('should handle null response body', async () => { + const testData = new Uint8Array([1, 2, 3, 4, 5, 6, 7, 8]) + const pieceCid = Piece.calculate(testData) + + server.use( + http.get('http://pdp.local/piece/:pieceCid', () => { + return new HttpResponse() + }) + ) + + try { + await SP.downloadPiece({ + endpoint: 'http://pdp.local', + pieceCid, + }) + assert.fail('Should have thrown error') + } catch (error) { + assert.instanceOf(error, DownloadPieceError) + // Accept either error message as HttpResponse() behaves differently in Node vs browser + assert.match(error.message, /Response body is (null|empty)/) + } + }) + + it('should correctly stream and verify chunked data', async () => { + const testData = new Uint8Array([1, 2, 3, 4, 5, 6, 7, 8]) + const pieceCid = Piece.calculate(testData) + + server.use( + http.get('http://pdp.local/piece/:pieceCid', () => { + // Split test data into chunks + const chunk1 = testData.slice(0, 4) + const chunk2 = testData.slice(4) + + // Create readable stream that emits chunks + const stream = new ReadableStream({ + async start(controller) { + controller.enqueue(chunk1) + // Small delay to simulate network + await new Promise((resolve) => setTimeout(resolve, 10)) + controller.enqueue(chunk2) + controller.close() + }, + }) + return new HttpResponse(stream, { + status: 200, + }) + }) + ) + + const result = await SP.downloadPiece({ + endpoint: 'http://pdp.local', + pieceCid, + }) + // Verify we got all the data correctly reassembled + assert.deepEqual(result, testData) + }) + + it('should handle large chunked downloads', async () => { + // Create larger test data (1KB) + const testData = new Uint8Array(1024) + for (let i = 0; i < testData.length; i++) { + testData[i] = i % 256 + } + const pieceCid = Piece.calculate(testData) + + server.use( + http.get('http://pdp.local/piece/:pieceCid', () => { + // Create readable stream that emits in 128-byte chunks + const chunkSize = 128 + let offset = 0 + + const stream = new ReadableStream({ + async pull(controller) { + if (offset >= testData.length) { + controller.close() + return + } + const chunk = testData.slice(offset, Math.min(offset + chunkSize, testData.length)) + offset += chunkSize + controller.enqueue(chunk) + }, + }) + return new HttpResponse(stream, { status: 200 }) + }) + ) + + const result = await SP.downloadPiece({ + endpoint: 'http://pdp.local', + pieceCid, + }) + assert.deepEqual(result, testData) + }) + }) +}) diff --git a/packages/synapse-core/test/typed-data.test.ts b/packages/synapse-core/test/typed-data.test.ts new file mode 100644 index 00000000..332f9e34 --- /dev/null +++ b/packages/synapse-core/test/typed-data.test.ts @@ -0,0 +1,226 @@ +import { assert } from 'chai' +import { type Address, createWalletClient, decodeAbiParameters, type Hex, http, parseSignature } from 'viem' +import { privateKeyToAccount } from 'viem/accounts' +import type { Chain } from '../src/chains.ts' +import * as Chains from '../src/chains.ts' +import * as Piece from '../src/piece.ts' +import * as TypedData from '../src/typed-data/index.ts' +import { getStorageDomain } from '../src/typed-data/type-definitions.ts' + +// Test fixtures generated from Solidity reference implementation +// These signatures are verified against WarmStorage contract +const FIXTURES = { + // Test private key from Solidity (never use in production!) + privateKey: '0x1234567890123456789012345678901234567890123456789012345678901234' as Hex, + + // Expected EIP-712 signatures + signatures: { + createDataSet: { + extraData: + '0x0000000000000000000000002e988a386a799f506693793c6a5af6b54dfaabfb000000000000000000000000000000000000000000000000000000000000303900000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000000000000000000000000000000000000000012000000000000000000000000000000000000000000000000000000000000001a00000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000057469746c6500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000b54657374446174615365740000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000041cbe023bc62804a93b71ce163b63f5240d404326d5780eeee1163b36a5b6f4e0538c5df9ca2a572f2dd46c9bd1c921336c4cd7c6871f267ba5fe5faa2426bd86b1c00000000000000000000000000000000000000000000000000000000000000' as Hex, + clientDataSetId: 12345n, + payee: '0x70997970C51812dc3A010C7d01b50e0d17dc79C8' as Address, + metadata: [{ key: 'title', value: 'TestDataSet' }], + }, + addPieces: { + extraData: + '0x00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000012000000000000000000000000000000000000000000000000000000000000001c000000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000060000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000041bc47a95dca5d22821210d7acd104d987455588c0ec31a9f028bafa2f18e60262646c28adcb3dda5405305d9eecfd960967c6fe07eac453cb477b6654cc07eb291c00000000000000000000000000000000000000000000000000000000000000' as Hex, + clientDataSetId: 12345n, + nonce: 1n, + }, + schedulePieceRemovals: { + extraData: + '0x00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000041df635aed98f509f6d404efb1543979a922867c9dc3b0b6e5967189045ff30b2173d89806b6d5fab38477f97c034f7012b145d31b90942abc1984182060ddfb171b00000000000000000000000000000000000000000000000000000000000000' as Hex, + clientDataSetId: 12345n, + pieceIds: [1n, 3n, 5n], + }, + }, +} + +const PIECE_DATA: string[] = [ + 'bafkzcibcauan42av3szurbbscwuu3zjssvfwbpsvbjf6y3tukvlgl2nf5rha6pa', + 'bafkzcibcpybwiktap34inmaex4wbs6cghlq5i2j2yd2bb2zndn5ep7ralzphkdy', +] + +const chain: Chain = { + ...Chains.calibration, + id: 31337, +} + +const account = privateKeyToAccount(FIXTURES.privateKey) +const client = createWalletClient({ + account, + chain, + transport: http(), +}) + +describe('Typed Data', () => { + it('should sign create data set', async () => { + const signatureActual = await TypedData.signCreateDataSet(client, { + clientDataSetId: FIXTURES.signatures.createDataSet.clientDataSetId, + payee: FIXTURES.signatures.createDataSet.payee, + metadata: FIXTURES.signatures.createDataSet.metadata, + }) + + assert.strictEqual( + signatureActual, + FIXTURES.signatures.createDataSet.extraData, + 'CreateDataSet signature should match Solidity reference' + ) + + const decoded = decodeAbiParameters(TypedData.signCreateDataSetAbiParameters, signatureActual) + + assert.strictEqual(decoded[0], account.address) + assert.strictEqual(decoded[1], FIXTURES.signatures.createDataSet.clientDataSetId) + assert.deepStrictEqual( + decoded[2], + FIXTURES.signatures.createDataSet.metadata.map((item) => item.key) + ) + assert.deepStrictEqual( + decoded[3], + FIXTURES.signatures.createDataSet.metadata.map((item) => item.value) + ) + }) + + it('should sign add pieces', async () => { + const extraDataActual = await TypedData.signAddPieces(client, { + clientDataSetId: FIXTURES.signatures.addPieces.clientDataSetId, + nonce: FIXTURES.signatures.addPieces.nonce, + pieces: PIECE_DATA.map((piece) => ({ + pieceCid: Piece.parse(piece), + })), + }) + + assert.strictEqual( + extraDataActual, + FIXTURES.signatures.addPieces.extraData, + 'AddPieces extraData should match Solidity reference' + ) + const decoded = decodeAbiParameters(TypedData.signAddPiecesAbiParameters, extraDataActual) + + assert.strictEqual(decoded[0], FIXTURES.signatures.addPieces.nonce) + assert.deepStrictEqual(decoded[1], [[], []]) + assert.deepStrictEqual(decoded[2], [[], []]) + }) + + it('should sign add pieces with metadata', async () => { + const extraDataActual = await TypedData.signAddPieces(client, { + clientDataSetId: FIXTURES.signatures.addPieces.clientDataSetId, + nonce: FIXTURES.signatures.addPieces.nonce, + pieces: PIECE_DATA.map((piece) => ({ + pieceCid: Piece.parse(piece), + metadata: [{ key: 'title', value: 'TestDataSet' }], + })), + }) + + const decoded = decodeAbiParameters(TypedData.signAddPiecesAbiParameters, extraDataActual) + + assert.strictEqual(decoded[0], FIXTURES.signatures.addPieces.nonce) + assert.deepStrictEqual(decoded[1], [['title'], ['title']]) + assert.deepStrictEqual(decoded[2], [['TestDataSet'], ['TestDataSet']]) + }) + + it('should sign schedule piece removals', async () => { + const extraDataActual = await TypedData.signSchedulePieceRemovals(client, { + clientDataSetId: FIXTURES.signatures.schedulePieceRemovals.clientDataSetId, + pieceIds: FIXTURES.signatures.schedulePieceRemovals.pieceIds, + }) + + assert.strictEqual( + extraDataActual, + FIXTURES.signatures.schedulePieceRemovals.extraData, + 'SchedulePieceRemovals extraData should match Solidity reference' + ) + }) + + it('should sign create data set and add pieces', async () => { + const extraDataActual = await TypedData.signCreateDataSetAndAddPieces(client, { + clientDataSetId: FIXTURES.signatures.createDataSet.clientDataSetId, + payee: FIXTURES.signatures.createDataSet.payee, + metadata: FIXTURES.signatures.createDataSet.metadata, + nonce: FIXTURES.signatures.addPieces.nonce, + pieces: PIECE_DATA.map((piece) => ({ + pieceCid: Piece.parse(piece), + })), + }) + + // Decode the combined extra data (two nested bytes) + const decoded = decodeAbiParameters(TypedData.signcreateDataSetAndAddPiecesAbiParameters, extraDataActual) + + // First bytes should be createDataSet extraData + const createDataSetDecoded = decodeAbiParameters(TypedData.signCreateDataSetAbiParameters, decoded[0]) + assert.strictEqual(createDataSetDecoded[0], account.address) + assert.strictEqual(createDataSetDecoded[1], FIXTURES.signatures.createDataSet.clientDataSetId) + + // Second bytes should be addPieces extraData + const addPiecesDecoded = decodeAbiParameters(TypedData.signAddPiecesAbiParameters, decoded[1]) + assert.strictEqual(addPiecesDecoded[0], FIXTURES.signatures.addPieces.nonce) + }) + + it('should sign erc20 permit', async () => { + const amount = 1000n + const nonce = 0n + const deadline = BigInt(Math.floor(Date.now() / 1000) + 3600) + + const signature = await TypedData.signErc20Permit(client, { + amount, + nonce, + deadline, + name: 'USDFC', + version: '1', + }) + + // Verify signature is valid hex + assert.match(signature, /^0x[0-9a-fA-F]+$/) + + // Parse signature to verify it has correct structure (r, s, v) + const parsed = parseSignature(signature) + assert.isDefined(parsed.r) + assert.isDefined(parsed.s) + assert.isDefined(parsed.v) + }) + + it('should sign erc20 permit with custom token and spender', async () => { + const customToken = '0x1234567890123456789012345678901234567890' as Address + const customSpender = '0x0987654321098765432109876543210987654321' as Address + const amount = 500n + const nonce = 1n + const deadline = BigInt(Math.floor(Date.now() / 1000) + 7200) + + const signature = await TypedData.signErc20Permit(client, { + token: customToken, + spender: customSpender, + amount, + nonce, + deadline, + name: 'CustomToken', + version: '2', + }) + + assert.match(signature, /^0x[0-9a-fA-F]+$/) + const parsed = parseSignature(signature) + assert.isDefined(parsed.r) + assert.isDefined(parsed.s) + }) +}) + +describe('getStorageDomain', () => { + it('should return domain with default verifying contract', () => { + const domain = getStorageDomain({ chain }) + + assert.strictEqual(domain.name, 'FilecoinWarmStorageService') + assert.strictEqual(domain.version, '1') + assert.strictEqual(domain.chainId, chain.id) + assert.strictEqual(domain.verifyingContract, chain.contracts.storage.address) + }) + + it('should return domain with custom verifying contract', () => { + const customContract = '0xCustomContractAddress1234567890123456789012' as Address + const domain = getStorageDomain({ chain, verifyingContract: customContract }) + + assert.strictEqual(domain.name, 'FilecoinWarmStorageService') + assert.strictEqual(domain.version, '1') + assert.strictEqual(domain.chainId, chain.id) + assert.strictEqual(domain.verifyingContract, customContract) + }) +}) diff --git a/packages/synapse-react/src/warm-storage/use-create-data-set.ts b/packages/synapse-react/src/warm-storage/use-create-data-set.ts index d3f7ad92..984445a7 100644 --- a/packages/synapse-react/src/warm-storage/use-create-data-set.ts +++ b/packages/synapse-react/src/warm-storage/use-create-data-set.ts @@ -48,7 +48,7 @@ export function useCreateDataSet(props: UseCreateDataSetProps) { }) props?.onHash?.(txHash) - const dataSet = await SP.pollForDataSetCreationStatus({ statusUrl }) + const dataSet = await SP.waitForDataSetCreationStatus({ statusUrl }) queryClient.invalidateQueries({ queryKey: ['synapse-warm-storage-data-sets', account.address], diff --git a/packages/synapse-react/src/warm-storage/use-delete-piece.ts b/packages/synapse-react/src/warm-storage/use-delete-piece.ts index b334bdce..5eafff53 100644 --- a/packages/synapse-react/src/warm-storage/use-delete-piece.ts +++ b/packages/synapse-react/src/warm-storage/use-delete-piece.ts @@ -1,6 +1,6 @@ import { getChain } from '@filoz/synapse-core/chains' import type { SessionKey } from '@filoz/synapse-core/session-key' -import { type DataSet, deletePiece, pollForDeletePieceStatus } from '@filoz/synapse-core/warm-storage' +import { type DataSet, deletePiece, waitForDeletePieceStatus } from '@filoz/synapse-core/warm-storage' import { type MutateOptions, useMutation, useQueryClient } from '@tanstack/react-query' import type { TransactionReceipt } from 'viem' import { useAccount, useChainId, useConfig } from 'wagmi' @@ -47,7 +47,7 @@ export function useDeletePiece(props: UseDeletePieceProps) { }) props?.onHash?.(deletePieceRsp.txHash) - const rsp = await pollForDeletePieceStatus(client, deletePieceRsp) + const rsp = await waitForDeletePieceStatus(client, deletePieceRsp) queryClient.invalidateQueries({ queryKey: ['synapse-warm-storage-data-sets', account.address], diff --git a/packages/synapse-react/src/warm-storage/use-upload.ts b/packages/synapse-react/src/warm-storage/use-upload.ts index 19a472a7..11579047 100644 --- a/packages/synapse-react/src/warm-storage/use-upload.ts +++ b/packages/synapse-react/src/warm-storage/use-upload.ts @@ -45,7 +45,7 @@ export function useUpload(props: UseUploadProps) { }) props?.onHash?.(uploadRsp.txHash) - const rsp = await SP.pollForAddPiecesStatus(uploadRsp) + const rsp = await SP.waitForAddPiecesStatus(uploadRsp) queryClient.invalidateQueries({ queryKey: ['synapse-warm-storage-data-sets', account.address], diff --git a/packages/synapse-sdk/src/pdp/auth.ts b/packages/synapse-sdk/src/pdp/auth.ts deleted file mode 100644 index dbf3f6c3..00000000 --- a/packages/synapse-sdk/src/pdp/auth.ts +++ /dev/null @@ -1,506 +0,0 @@ -/** - * EIP-712 Authentication helpers for PDP operations - */ - -import { asPieceCID, type PieceCID } from '@filoz/synapse-core/piece' -import { ethers } from 'ethers' -import type { AuthSignature, MetadataEntry } from '../types.ts' -import { METADATA_KEYS } from '../utils/constants.ts' -import { EIP712_TYPES } from '../utils/eip712.ts' - -// Declare window.ethereum for TypeScript -declare global { - interface Window { - ethereum?: any - } -} - -/** - * Helper class for creating EIP-712 typed signatures for PDP operations - * - * This class provides methods to create cryptographic signatures required for - * authenticating PDP (Proof of Data Possession) operations with service providers. - * All signatures are EIP-712 compatible for improved security and UX. - * - * Can be used standalone or through the Synapse SDK. - * - * @example - * ```typescript - * // Direct instantiation with ethers signer - * import { PDPAuthHelper } from '@filoz/synapse-sdk' - * import { ethers } from 'ethers' - * - * const wallet = new ethers.Wallet(privateKey, provider) - * const auth = new PDPAuthHelper(contractAddress, wallet, BigInt(chainId)) - * - * // Or get from Synapse instance (convenience method) - * const synapse = await Synapse.create({ privateKey, rpcURL }) - * const auth = synapse.getPDPAuthHelper() - * - * // Sign operations for PDP service authentication - * const createSig = await auth.signCreateDataSet(0, providerAddress, false) - * const addPiecesSig = await auth.signAddPieces(0, 1, pieceDataArray) - * ``` - */ -export class PDPAuthHelper { - private readonly signer: ethers.Signer - private readonly domain: ethers.TypedDataDomain - public readonly WITH_CDN_METADATA: MetadataEntry = { key: METADATA_KEYS.WITH_CDN, value: '' } - - constructor(serviceContractAddress: string, signer: ethers.Signer, chainId: bigint) { - this.signer = signer - - // EIP-712 domain - this.domain = { - name: 'FilecoinWarmStorageService', - version: '1', - chainId: Number(chainId), - verifyingContract: serviceContractAddress, - } - } - - /** - * Get the actual signer, unwrapping NonceManager if needed - */ - private getUnderlyingSigner(): ethers.Signer { - // Check if this is a NonceManager-wrapped signer - if ('signer' in this.signer && this.signer.constructor.name === 'NonceManager') { - // Access the underlying signer for signTypedData support - return (this.signer as any).signer - } - return this.signer - } - - /** - * Check if the signer is a browser provider (MetaMask, etc) - */ - private async isMetaMaskSigner(): Promise { - try { - // Get the actual signer (unwrap NonceManager if needed) - const actualSigner = this.getUnderlyingSigner() - - // If it's a Wallet, it can sign locally, so not a MetaMask signer - if (actualSigner.constructor.name === 'Wallet') { - return false - } - - // Check if signer has a provider - const provider = actualSigner.provider - if (provider == null) { - return false - } - - // Check for ethers v6 BrowserProvider - if ('_eip1193Provider' in provider) { - return true - } - - // If it's a JsonRpcProvider or WebSocketProvider, it's not a browser provider - // These can sign locally with a wallet - if (provider instanceof ethers.JsonRpcProvider || provider instanceof ethers.WebSocketProvider) { - return false - } - - // For any other provider with request method (potential EIP-1193 provider) - if ('request' in provider && typeof (provider as any).request === 'function') { - return true - } - } catch { - // Silently fail and return false - } - return false - } - - /** - * Sign typed data with MetaMask-friendly display - * This bypasses ethers.js conversion to show human-readable values in MetaMask - */ - private async signWithMetaMask( - types: Record>, - value: any - ): Promise { - const provider = this.signer.provider - if (provider == null) { - throw new Error('No provider available') - } - - const signerAddress = await this.signer.getAddress() - - // Determine the primary type (the first one that isn't a dependency) - let primaryType = '' - for (const typeName of Object.keys(types)) { - // Skip Cid and PieceData as they are dependencies - if (typeName !== 'Cid' && typeName !== 'PieceData') { - primaryType = typeName - break - } - } - - // Construct the full typed data payload for MetaMask - const typedData = { - types: { - EIP712Domain: [ - { name: 'name', type: 'string' }, - { name: 'version', type: 'string' }, - { name: 'chainId', type: 'uint256' }, - { name: 'verifyingContract', type: 'address' }, - ], - ...types, - }, - primaryType, - domain: this.domain, - message: value, - } - - // For ethers v6, we need to access the underlying EIP-1193 provider - let eip1193Provider: any - if ('_eip1193Provider' in provider) { - // BrowserProvider in ethers v6 - eip1193Provider = (provider as any)._eip1193Provider - } else if ('request' in provider) { - // Already an EIP-1193 provider - eip1193Provider = provider - } else { - // Fallback to provider.send - eip1193Provider = provider - } - - // Call MetaMask directly for better UX - let signature: string - if (eip1193Provider != null && 'request' in eip1193Provider) { - // Use EIP-1193 request method - signature = await eip1193Provider.request({ - method: 'eth_signTypedData_v4', - params: [signerAddress, JSON.stringify(typedData)], - }) - } else { - // Fallback to send method - signature = await (provider as any).send('eth_signTypedData_v4', [signerAddress, JSON.stringify(typedData)]) - } - - return signature - } - - /** - * Create signature for data set creation - * - * This signature authorizes a service provider to create a new data set - * on behalf of the client. The signature includes the client's dataset ID, - * the service provider's payment address, and CDN preference. - * - * @param clientDataSetId - Unique dataset ID for the client (typically starts at 0 and increments) - * @param payee - Service provider's address that will receive payments - * @param metadata - Service parameters as key-value pairs - * @returns Promise resolving to authentication signature for data set creation - * - * @example - * ```typescript - * const auth = new PDPAuthHelper(contractAddress, signer, chainId) - * const signature = await auth.signCreateDataSet( - * 0, // First dataset for this client - * '0x1234...abcd', // Service provider address - * PDPAuthHelper.WITH_CDN_METADATA // Enable CDN service - * ) - * ``` - */ - async signCreateDataSet( - clientDataSetId: bigint, - payee: string, - metadata: MetadataEntry[] = [] - ): Promise { - let signature: string - const types = { CreateDataSet: EIP712_TYPES.CreateDataSet, MetadataEntry: EIP712_TYPES.MetadataEntry } - - // Check if we should use MetaMask-friendly signing - const useMetaMask = await this.isMetaMaskSigner() - - if (useMetaMask) { - // Use MetaMask-friendly signing for better UX - const value = { - clientDataSetId: clientDataSetId.toString(), // Keep as string for MetaMask display - metadata, - payee, - } - - signature = await this.signWithMetaMask(types, value) - } else { - // Use standard ethers.js signing (for private keys, etc) - const value = { - clientDataSetId, - metadata, - payee, - } - - // Use underlying signer for typed data signing (handles NonceManager) - const actualSigner = this.getUnderlyingSigner() - signature = await actualSigner.signTypedData(this.domain, types, value) - } - - // Return signature with components - const sig = ethers.Signature.from(signature) - - // For EIP-712, signedData contains the actual message hash that was signed - const signedData = ethers.TypedDataEncoder.hash(this.domain, types, { - clientDataSetId, - metadata, - payee, - }) - - return { - signature, - v: sig.v, - r: sig.r, - s: sig.s, - signedData, - } - } - - /** - * Create signature for adding pieces to a data set - * - * This signature authorizes a service provider to add new data pieces - * to an existing data set. Each piece represents aggregated data that - * will be proven using PDP challenges. - * - * @param clientDataSetId - Client's dataset ID (same as used in createDataSet) - * @param nonce - Random nonce for replay protection - * @param pieceDataArray - Array of piece data containing PieceCID CIDs and raw sizes - * @returns Promise resolving to authentication signature for adding pieces - * - * @example - * ```typescript - * const auth = new PDPAuthHelper(contractAddress, signer, chainId) - * const pieceData = [{ - * cid: 'bafkzcibc...', // PieceCID of aggregated data - * rawSize: Number(SIZE_CONSTANTS.MiB) // Raw size in bytes before padding - * }] - * const nonce = randU256() // Generate random nonce - * const signature = await auth.signAddPieces( - * 0, // Same dataset ID as data set creation - * nonce, // Random nonce for replay protection - * pieceData // Array of pieces to add - * ) - * ``` - */ - async signAddPieces( - clientDataSetId: bigint, - nonce: bigint, - pieceDataArray: PieceCID[] | string[], - metadata: MetadataEntry[][] = [] - ): Promise { - if (metadata.length === 0) { - // make metadata array match length of pieceDataArray - metadata = Array(pieceDataArray.length).fill([]) - } else if (metadata.length !== pieceDataArray.length) { - throw new Error('metadata length must match pieceDataArray length') - } - - const pieceMetadata: { pieceIndex: number; metadata: MetadataEntry[] }[] = [] - - // Transform the piece data into the proper format for EIP-712 - const formattedPieceData = [] - for (let i = 0; i < pieceDataArray.length; i++) { - const piece = pieceDataArray[i] - const pieceCid = typeof piece === 'string' ? asPieceCID(piece) : piece - if (pieceCid == null) { - throw new Error(`Invalid PieceCID: ${String(pieceCid)}`) - } - - // Format as nested structure matching Solidity's Cids.Cid struct - formattedPieceData.push({ - data: pieceCid.bytes, // This will be a Uint8Array - }) - pieceMetadata.push({ - pieceIndex: i, - metadata: metadata[i], - }) - } - const types = { - AddPieces: EIP712_TYPES.AddPieces, - Cid: EIP712_TYPES.Cid, - PieceMetadata: EIP712_TYPES.PieceMetadata, - MetadataEntry: EIP712_TYPES.MetadataEntry, - } - - let signature: string - - // Check if we should use MetaMask-friendly signing - const useMetaMask = await this.isMetaMaskSigner() - - if (useMetaMask) { - // Use MetaMask-friendly signing with properly structured data - const value = { - clientDataSetId: clientDataSetId.toString(), // Keep as string for MetaMask display - nonce: nonce.toString(), // Keep as string for MetaMask display - pieceData: formattedPieceData.map((item) => ({ - data: ethers.hexlify(item.data), // Convert Uint8Array to hex string for MetaMask - })), - pieceMetadata: pieceMetadata, - } - - signature = await this.signWithMetaMask(types, value) - } else { - // Use standard ethers.js signing with bigint values - const value = { - clientDataSetId, - nonce, - pieceData: formattedPieceData, - pieceMetadata: pieceMetadata, - } - - // Use underlying signer for typed data signing (handles NonceManager) - const actualSigner = this.getUnderlyingSigner() - signature = await actualSigner.signTypedData(this.domain, types, value) - } - - // Return signature with components - const sig = ethers.Signature.from(signature) - - // For EIP-712, signedData contains the actual message hash that was signed - const signedData = ethers.TypedDataEncoder.hash(this.domain, types, { - clientDataSetId, - nonce, - pieceData: formattedPieceData, - pieceMetadata: pieceMetadata, - }) - - return { - signature, - v: sig.v, - r: sig.r, - s: sig.s, - signedData, - } - } - - /** - * Create signature for scheduling piece removals - * - * This signature authorizes a service provider to schedule specific pieces - * for removal from the data set. Pieces are typically removed after the - * next successful proof submission. - * - * @param clientDataSetId - Client's dataset ID - * @param pieceIds - Array of piece IDs to schedule for removal - * @returns Promise resolving to authentication signature for scheduling removals - * - * @example - * ```typescript - * const auth = new PDPAuthHelper(contractAddress, signer, chainId) - * const signature = await auth.signSchedulePieceRemovals( - * 0, // Dataset ID - * [1, 2, 3] // Piece IDs to remove - * ) - * ``` - */ - async signSchedulePieceRemovals(clientDataSetId: bigint, pieceIds: Array): Promise { - let signature: string - - // Check if we should use MetaMask-friendly signing - const useMetaMask = await this.isMetaMaskSigner() - - if (useMetaMask) { - // Use MetaMask-friendly signing for better UX - const value = { - clientDataSetId: clientDataSetId.toString(), // Keep as string for MetaMask display - pieceIds: pieceIds.map((id) => id.toString()), // Convert to string array for display - } - - signature = await this.signWithMetaMask({ SchedulePieceRemovals: EIP712_TYPES.SchedulePieceRemovals }, value) - } else { - // Use standard ethers.js signing with BigInt values - const value = { clientDataSetId, pieceIds } - - // Use underlying signer for typed data signing (handles NonceManager) - const actualSigner = this.getUnderlyingSigner() - signature = await actualSigner.signTypedData( - this.domain, - { SchedulePieceRemovals: EIP712_TYPES.SchedulePieceRemovals }, - value - ) - } - - const sig = ethers.Signature.from(signature) - - // For EIP-712, signedData contains the actual message hash that was signed - const signedData = ethers.TypedDataEncoder.hash( - this.domain, - { SchedulePieceRemovals: EIP712_TYPES.SchedulePieceRemovals }, - { clientDataSetId, pieceIds } - ) - - return { - signature, - v: sig.v, - r: sig.r, - s: sig.s, - signedData, - } - } - - /** - * Create signature for data set deletion - * - * This signature authorizes complete deletion of a data set and all - * its associated data. This action is irreversible and will terminate - * the storage service for this dataset. - * - * @param clientDataSetId - Client's dataset ID to delete - * @returns Promise resolving to authentication signature for data set deletion - * - * @example - * ```typescript - * const auth = new PDPAuthHelper(contractAddress, signer, chainId) - * const signature = await auth.signDeleteDataSet( - * 0 // Dataset ID to delete - * ) - * ``` - */ - async signDeleteDataSet(clientDataSetId: bigint): Promise { - let signature: string - - // Check if we should use MetaMask-friendly signing - const useMetaMask = await this.isMetaMaskSigner() - - if (useMetaMask) { - // Use MetaMask-friendly signing for better UX - const value = { - clientDataSetId: clientDataSetId.toString(), // Keep as string for MetaMask display - } - - signature = await this.signWithMetaMask({ DeleteDataSet: EIP712_TYPES.DeleteDataSet }, value) - } else { - // Use standard ethers.js signing - const value = { clientDataSetId } - - // Use underlying signer for typed data signing (handles NonceManager) - const actualSigner = this.getUnderlyingSigner() - signature = await actualSigner.signTypedData(this.domain, { DeleteDataSet: EIP712_TYPES.DeleteDataSet }, value) - } - - const sig = ethers.Signature.from(signature) - - // For EIP-712, signedData contains the actual message hash that was signed - const signedData = ethers.TypedDataEncoder.hash( - this.domain, - { DeleteDataSet: EIP712_TYPES.DeleteDataSet }, - { clientDataSetId } - ) - - return { - signature, - v: sig.v, - r: sig.r, - s: sig.s, - signedData, - } - } - - /** - * Get the address of the signer - * @returns Promise resolving to the signer's Ethereum address - */ - async getSignerAddress(): Promise { - return await this.signer.getAddress() - } -} diff --git a/packages/synapse-sdk/src/pdp/index.ts b/packages/synapse-sdk/src/pdp/index.ts index 445c31c5..70ddc7dc 100644 --- a/packages/synapse-sdk/src/pdp/index.ts +++ b/packages/synapse-sdk/src/pdp/index.ts @@ -8,7 +8,6 @@ * ``` */ -export { PDPAuthHelper } from './auth.ts' export type { AddPiecesResponse, CreateDataSetResponse, diff --git a/packages/synapse-sdk/src/pdp/server.ts b/packages/synapse-sdk/src/pdp/server.ts index 3e3ce9ae..20f327ad 100644 --- a/packages/synapse-sdk/src/pdp/server.ts +++ b/packages/synapse-sdk/src/pdp/server.ts @@ -27,20 +27,16 @@ */ import * as Piece from '@filoz/synapse-core/piece' -import { asPieceCID, downloadAndValidate } from '@filoz/synapse-core/piece' import * as SP from '@filoz/synapse-core/sp' -import { randU256 } from '@filoz/synapse-core/utils' -import { ethers } from 'ethers' -import type { Hex } from 'viem' -import type { DataSetData, MetadataEntry, PieceCID } from '../types.ts' -import { validateDataSetMetadata, validatePieceMetadata } from '../utils/metadata.ts' -import { constructPieceUrl } from '../utils/piece.ts' -import type { PDPAuthHelper } from './auth.ts' +import { type MetadataObject, SIZE_CONSTANTS, uint8ArrayToAsyncIterable } from '@filoz/synapse-core/utils' import { - validateDataSetCreationStatusResponse, - validatePieceAdditionStatusResponse, - validatePieceStatusResponse, -} from './validation.ts' + addPieces, + createDataSet, + createDataSetAndAddPieces, + type PieceInputWithMetadata, +} from '@filoz/synapse-core/warm-storage' +import type { Account, Address, Chain, Client, Transport } from 'viem' +import type { DataSetData, PieceCID } from '../types.ts' /** * Response from creating a data set @@ -164,22 +160,27 @@ export interface PDPCreateAndAddInput { extraData: string } +// biome-ignore lint/style/noNamespace: namespaced types +export namespace PDPServer { + export type OptionsType = { + client: Client + /** The PDP service URL (e.g., https://pdp.provider.com). */ + endpoint: string + } + export type ErrorType = Error +} + export class PDPServer { - private readonly _serviceURL: string - private readonly _authHelper: PDPAuthHelper | null + private readonly _client: Client + private readonly _endpoint: string /** * Create a new PDPServer instance - * @param authHelper - PDPAuthHelper instance for signing operations - * @param serviceURL - The PDP service URL (e.g., https://pdp.provider.com) + * @param options - {@link PDPServer.OptionsType} */ - constructor(authHelper: PDPAuthHelper | null, serviceURL: string) { - if (serviceURL.trim() === '') { - throw new Error('PDP service URL is required') - } - // Remove trailing slash from URL - this._serviceURL = serviceURL.replace(/\/$/, '') - this._authHelper = authHelper + constructor(options: PDPServer.OptionsType) { + this._client = options.client + this._endpoint = options.endpoint } /** @@ -193,30 +194,19 @@ export class PDPServer { */ async createDataSet( clientDataSetId: bigint, - payee: string, - payer: string, - metadata: MetadataEntry[], - recordKeeper: string + payee: Address, + payer: Address, + metadata: MetadataObject, + recordKeeper: Address ): Promise { - // Validate metadata against contract limits - validateDataSetMetadata(metadata) - - // Generate the EIP-712 signature for data set creation - const authData = await this.getAuthHelper().signCreateDataSet(clientDataSetId, payee, metadata) - - // Prepare the extra data for the contract call - // This needs to match the DataSetCreateData struct in Warm Storage contract - const extraData = this._encodeDataSetCreateData({ + return createDataSet(this._client, { + endpoint: this._endpoint, + payee, payer, - clientDataSetId, metadata, - signature: authData.signature, - }) - - return SP.createDataSet({ - endpoint: this._serviceURL, - recordKeeper: recordKeeper as Hex, - extraData: `0x${extraData}`, + cdn: false, // synpase sdk adds this to the metadata + recordKeeper, + clientDataSetId, }) } @@ -234,144 +224,42 @@ export class PDPServer { */ async createAndAddPieces( clientDataSetId: bigint, - payee: string, - payer: string, - recordKeeper: string, - pieceDataArray: PieceCID[] | string[], - metadata: { - dataset?: MetadataEntry[] - pieces?: MetadataEntry[][] - } + payee: Address, + payer: Address, + recordKeeper: Address, + pieces: PieceInputWithMetadata[], + metadata: MetadataObject ): Promise { - // Validate metadata against contract limits - if (metadata.dataset == null) { - metadata.dataset = [] - } - validateDataSetMetadata(metadata.dataset) - metadata.pieces = PDPServer._processAddPiecesInputs(pieceDataArray, metadata.pieces) - - // Generate the EIP-712 signature for data set creation - const createAuthData = await this.getAuthHelper().signCreateDataSet(clientDataSetId, payee, metadata.dataset) - - // Prepare the extra data for the contract call - // This needs to match the DataSetCreateData struct in Warm Storage contract - const createExtraData = this._encodeDataSetCreateData({ - payer, + return createDataSetAndAddPieces(this._client, { + endpoint: this._endpoint, clientDataSetId, - metadata: metadata.dataset, - signature: createAuthData.signature, - }) - - // Generate a random nonce for replay protection - const nonce = randU256() - - const addAuthData = await this.getAuthHelper().signAddPieces( - clientDataSetId, - nonce, - pieceDataArray, // Pass PieceData[] directly to auth helper - metadata.pieces - ) - - const addExtraData = this._encodeAddPiecesExtraData({ - nonce, - signature: addAuthData.signature, - metadata: metadata.pieces, - }) - - const abiCoder = ethers.AbiCoder.defaultAbiCoder() - const encoded = abiCoder.encode(['bytes', 'bytes'], [`0x${createExtraData}`, `0x${addExtraData}`]) - - return SP.createDataSetAndAddPieces({ - endpoint: this._serviceURL, - recordKeeper: recordKeeper as Hex, - extraData: encoded as Hex, - pieces: pieceDataArray.map(asPieceCID).filter((t) => t != null), + payee, + payer, + recordKeeper, + cdn: false, // synpase sdk adds this to the metadata + pieces, + metadata, }) } - private static _processAddPiecesInputs( - pieceDataArray: PieceCID[] | string[], - metadata?: MetadataEntry[][] - ): MetadataEntry[][] { - if (pieceDataArray.length === 0) { - throw new Error('At least one piece must be provided') - } - - if (metadata != null) { - if (metadata.length !== pieceDataArray.length) { - throw new Error(`Metadata length (${metadata.length}) must match pieces length (${pieceDataArray.length})`) - } - for (let i = 0; i < metadata.length; i++) { - if (metadata[i] != null && metadata[i].length > 0) { - try { - validatePieceMetadata(metadata[i]) - } catch (error: any) { - throw new Error(`Piece ${i} metadata validation failed: ${error.message}`) - } - } - } - } - - // Validate all PieceCIDs - for (const pieceData of pieceDataArray) { - const pieceCid = asPieceCID(pieceData) - if (pieceCid == null) { - throw new Error(`Invalid PieceCID: ${String(pieceData)}`) - } - } - // If no metadata provided, create empty arrays for each piece - const finalMetadata = metadata ?? pieceDataArray.map(() => []) - return finalMetadata - } - /** * Add pieces to an existing data set * @param dataSetId - The ID of the data set to add pieces to * @param clientDataSetId - The client's dataset ID used when creating the data set - * @param pieceDataArray - Array of piece data containing PieceCID CIDs and raw sizes - * @param metadata - Optional metadata for each piece (array of arrays, one per piece) + * @param pieces - Array of piece data containing PieceCID CIDs and raw sizes * @returns Promise that resolves when the pieces are added (201 Created) * @throws Error if any CID is invalid - * - * @example - * ```typescript - * const pieceData = ['bafkzcibcd...'] - * const metadata = [[{ key: 'snapshotDate', value: '20250711' }]] - * await pdpTool.addPieces(dataSetId, clientDataSetId, pieceData, metadata) - * ``` */ async addPieces( dataSetId: number, clientDataSetId: bigint, - pieceDataArray: PieceCID[] | string[], - metadata?: MetadataEntry[][] + pieces: PieceInputWithMetadata[] ): Promise { - const finalMetadata = PDPServer._processAddPiecesInputs(pieceDataArray, metadata) - - // Generate a random nonce for replay protection - const nonce = randU256() - - // Generate the EIP-712 signature for adding pieces - const authData = await this.getAuthHelper().signAddPieces( - clientDataSetId, - nonce, - pieceDataArray, // Pass PieceData[] directly to auth helper - finalMetadata - ) - - // Prepare the extra data for the contract call - // This needs to match what the Warm Storage contract expects for addPieces - const extraData = this._encodeAddPiecesExtraData({ - nonce, - signature: authData.signature, - metadata: finalMetadata, - }) - - const { txHash, statusUrl } = await SP.addPieces({ - endpoint: this._serviceURL, + const { txHash, statusUrl } = await addPieces(this._client, { + endpoint: this._endpoint, dataSetId: BigInt(dataSetId), - pieces: pieceDataArray.map(asPieceCID).filter((t) => t != null), - extraData: `0x${extraData}`, + clientDataSetId, + pieces, }) return { message: `Pieces added to data set ID ${dataSetId} successfully`, @@ -380,117 +268,6 @@ export class PDPServer { } } - /** - * Check the status of a data set creation - * @param txHash - Transaction hash from createDataSet - * @returns Promise that resolves with the creation status - */ - async getDataSetCreationStatus(txHash: string): Promise { - const response = await fetch(`${this._serviceURL}/pdp/data-sets/created/${txHash}`, { - method: 'GET', - headers: { - 'Content-Type': 'application/json', - }, - }) - - if (response.status === 404) { - throw new Error(`Data set creation not found for transaction hash: ${txHash}`) - } - - if (response.status !== 200) { - const errorText = await response.text() - throw new Error( - `Failed to get data set creation status: ${response.status} ${response.statusText} - ${errorText}` - ) - } - - const data = await response.json() - return validateDataSetCreationStatusResponse(data) - } - - /** - * Check the status of a piece addition transaction - * @param dataSetId - The data set ID - * @param txHash - Transaction hash from addPieces - * @returns Promise that resolves with the addition status - */ - async getPieceAdditionStatus(dataSetId: number, txHash: string): Promise { - const response = await fetch(`${this._serviceURL}/pdp/data-sets/${dataSetId}/pieces/added/${txHash}`, { - method: 'GET', - headers: { - 'Content-Type': 'application/json', - }, - }) - - if (response.status === 404) { - throw new Error(`Piece addition not found for transaction: ${txHash}`) - } - - if (response.status !== 200) { - const errorText = await response.text() - throw new Error(`Failed to get piece addition status: ${response.status} ${response.statusText} - ${errorText}`) - } - - const data = await response.json() - return validatePieceAdditionStatusResponse(data) - } - - /** - * Find a piece by PieceCID and size - * @param pieceCid - The PieceCID CID (as string or PieceCID object) - * @returns Piece information if found - */ - async findPiece(pieceCid: string | PieceCID): Promise { - const parsedPieceCid = asPieceCID(pieceCid) - if (parsedPieceCid == null) { - throw new Error(`Invalid PieceCID: ${String(pieceCid)}`) - } - - const piece = await SP.findPiece({ - endpoint: this._serviceURL, - pieceCid: parsedPieceCid, - }) - return { - pieceCid: piece, - } - } - - /** - * Get indexing and IPNI status for a piece - * - * TODO: not used anywhere, remove? - * - * @param pieceCid - The PieceCID CID (as string or PieceCID object) - * @returns Piece status information including indexing and IPNI advertisement status - * @throws Error if piece not found or doesn't belong to service (404) - */ - async getPieceStatus(pieceCid: string | PieceCID): Promise { - const parsedPieceCid = asPieceCID(pieceCid) - if (parsedPieceCid == null) { - throw new Error(`Invalid PieceCID: ${String(pieceCid)}`) - } - - const response = await fetch(`${this._serviceURL}/pdp/piece/${parsedPieceCid.toString()}/status`, { - method: 'GET', - headers: { - Accept: 'application/json', - }, - }) - - if (response.status === 404) { - const errorText = await response.text() - throw new Error(`Piece not found or does not belong to service: ${errorText}`) - } - - if (!response.ok) { - const errorText = await response.text() - throw new Error(`Failed to get piece status: ${response.status} ${response.statusText} - ${errorText}`) - } - - const data = await response.json() - return validatePieceStatusResponse(data) - } - /** * Upload a piece to the PDP server using the commp-last protocol. * @@ -508,17 +285,17 @@ export class PDPServer { ): Promise { if (data instanceof Uint8Array) { // Check hard limit - if (data.length > Piece.MAX_UPLOAD_SIZE) { + if (data.length > SIZE_CONSTANTS.MAX_UPLOAD_SIZE) { throw new Error( - `Upload size ${data.length} exceeds maximum ${Piece.MAX_UPLOAD_SIZE} bytes (1 GiB with fr32 expansion)` + `Upload size ${data.length} exceeds maximum ${SIZE_CONSTANTS.MAX_UPLOAD_SIZE} bytes (1 GiB with fr32 expansion)` ) } // Convert to async iterable with chunking - const iterable = Piece.uint8ArrayToAsyncIterable(data) + const iterable = uint8ArrayToAsyncIterable(data) return SP.uploadPieceStreaming({ - endpoint: this._serviceURL, + endpoint: this._endpoint, data: iterable, size: data.length, // Known size for Content-Length onProgress: options?.onProgress, @@ -528,7 +305,7 @@ export class PDPServer { } else { // AsyncIterable or ReadableStream path - no size limit check here (checked during streaming) return SP.uploadPieceStreaming({ - endpoint: this._serviceURL, + endpoint: this._endpoint, data, // size unknown for streams onProgress: options?.onProgress, @@ -538,26 +315,6 @@ export class PDPServer { } } - /** - * Download a piece from a service provider - * @param pieceCid - The PieceCID CID of the piece - * @returns The downloaded data - */ - async downloadPiece(pieceCid: string | PieceCID): Promise { - const parsedPieceCid = asPieceCID(pieceCid) - if (parsedPieceCid == null) { - throw new Error(`Invalid PieceCID: ${String(pieceCid)}`) - } - - // Use the retrieval endpoint configured at construction time - const downloadUrl = constructPieceUrl(this._serviceURL, parsedPieceCid) - - const response = await fetch(downloadUrl) - - // Use the shared download and validation function - return await downloadAndValidate(response, parsedPieceCid) - } - /** * Get data set details from the PDP server * @param dataSetId - The ID of the data set to fetch @@ -565,7 +322,7 @@ export class PDPServer { */ async getDataSet(dataSetId: number): Promise { const data = await SP.getDataSet({ - endpoint: this._serviceURL, + endpoint: this._endpoint, dataSetId: BigInt(dataSetId), }) @@ -583,109 +340,4 @@ export class PDPServer { nextChallengeEpoch: data.nextChallengeEpoch, } } - - /** - * Delete a piece from a data set - * @param dataSetId - The ID of dataset to delete - * @param clientDataSetId - Client dataset ID of the dataset to delete - * @param pieceID - The ID of the piece to delete - * @returns Promise for transaction hash of the delete operation - */ - async deletePiece(dataSetId: number, clientDataSetId: bigint, pieceID: number): Promise { - const authData = await this.getAuthHelper().signSchedulePieceRemovals(clientDataSetId, [BigInt(pieceID)]) - - const { txHash } = await SP.deletePiece({ - endpoint: this._serviceURL, - dataSetId: BigInt(dataSetId), - pieceId: BigInt(pieceID), - extraData: ethers.AbiCoder.defaultAbiCoder().encode(['bytes'], [authData.signature]) as Hex, - }) - return txHash - } - - /** - * Encode DataSetCreateData for extraData field - * This matches the Solidity struct DataSetCreateData in Warm Storage contract - */ - private _encodeDataSetCreateData(data: { - payer: string - clientDataSetId: bigint - metadata: MetadataEntry[] - signature: string - }): string { - // Ensure signature has 0x prefix - const signature = data.signature.startsWith('0x') ? data.signature : `0x${data.signature}` - - // ABI encode the struct as a tuple - // DataSetCreateData struct: - // - address payer - // - uint256 clientDataSetId - // - string[] metadataKeys - // - string[] metadataValues - // - bytes signature - const keys = data.metadata.map((item) => item.key) - const values = data.metadata.map((item) => item.value) - const abiCoder = ethers.AbiCoder.defaultAbiCoder() - const encoded = abiCoder.encode( - ['address', 'uint256', 'string[]', 'string[]', 'bytes'], - [data.payer, data.clientDataSetId, keys, values, signature] - ) - - // Return hex string without 0x prefix (since we add it in the calling code) - return encoded.slice(2) - } - - /** - * Encode AddPieces extraData for the addPieces operation - * Format: (uint256 nonce, string[][] metadataKeys, string[][] metadataValues, bytes signature) - */ - private _encodeAddPiecesExtraData(data: { nonce: bigint; signature: string; metadata: MetadataEntry[][] }): string { - // Ensure signature has 0x prefix - const signature = data.signature.startsWith('0x') ? data.signature : `0x${data.signature}` - const keys = data.metadata.map((item) => item.map((item) => item.key)) - const values = data.metadata.map((item) => item.map((item) => item.value)) - - // ABI encode as (uint256 nonce, string[][] metadataKeys, string[][] metadataValues, bytes signature) - const abiCoder = ethers.AbiCoder.defaultAbiCoder() - const encoded = abiCoder.encode( - ['uint256', 'string[][]', 'string[][]', 'bytes'], - [data.nonce, keys, values, signature] - ) - - // Return hex string without 0x prefix (since we add it in the calling code) - return encoded.slice(2) - } - - /** - * Ping the service provider to check connectivity - * @returns Promise that resolves if provider is reachable (200 response) - * @throws Error if provider is not reachable or returns non-200 status - */ - async ping(): Promise { - const url = `${this._serviceURL}/pdp/ping` - const response = await fetch(url, { - method: 'GET', - headers: {}, - }) - - if (response.status !== 200) { - const errorText = await response.text().catch(() => 'Unknown error') - throw new Error(`Provider ping failed: ${response.status} ${response.statusText} - ${errorText}`) - } - } - - /** - * Get the service URL for this PDPServer instance - * @returns The service URL - */ - getServiceURL(): string { - return this._serviceURL - } - - getAuthHelper(): PDPAuthHelper { - if (this._authHelper == null) { - throw new Error('AuthHelper is not available for an operation that requires signing') - } - return this._authHelper - } } diff --git a/packages/synapse-sdk/src/session/key.ts b/packages/synapse-sdk/src/session/key.ts index 03202d34..87c60a1c 100644 --- a/packages/synapse-sdk/src/session/key.ts +++ b/packages/synapse-sdk/src/session/key.ts @@ -21,14 +21,14 @@ * ``` */ +import { SESSION_KEY_PERMISSIONS } from '@filoz/synapse-core/session-key' import { ethers } from 'ethers' -import { EIP712_TYPE_HASHES } from '../utils/eip712.ts' import { CONTRACT_ABIS, CONTRACT_ADDRESSES, getFilecoinNetworkType } from '../utils/index.ts' -export const CREATE_DATA_SET_TYPEHASH = EIP712_TYPE_HASHES.CreateDataSet -export const ADD_PIECES_TYPEHASH = EIP712_TYPE_HASHES.AddPieces -export const SCHEDULE_PIECE_REMOVALS_TYPEHASH = EIP712_TYPE_HASHES.SchedulePieceRemovals -export const DELETE_DATA_SET_TYPEHASH = EIP712_TYPE_HASHES.DeleteDataSet +export const CREATE_DATA_SET_TYPEHASH = SESSION_KEY_PERMISSIONS.CreateDataSet +export const ADD_PIECES_TYPEHASH = SESSION_KEY_PERMISSIONS.AddPieces +export const SCHEDULE_PIECE_REMOVALS_TYPEHASH = SESSION_KEY_PERMISSIONS.SchedulePieceRemovals +export const DELETE_DATA_SET_TYPEHASH = SESSION_KEY_PERMISSIONS.DeleteDataSet // These are the PDP-related permissions that can be granted to a session key. // They are bytes32 hex strings that can be supplied to fetchExpiries, login, and revoke. diff --git a/packages/synapse-sdk/src/storage/context.ts b/packages/synapse-sdk/src/storage/context.ts index f3f61abd..1ac4ec36 100644 --- a/packages/synapse-sdk/src/storage/context.ts +++ b/packages/synapse-sdk/src/storage/context.ts @@ -24,11 +24,12 @@ import { asPieceCID } from '@filoz/synapse-core/piece' import * as SP from '@filoz/synapse-core/sp' -import { randIndex, randU256 } from '@filoz/synapse-core/utils' +import { type MetadataObject, randIndex, randU256 } from '@filoz/synapse-core/utils' +import { deletePiece } from '@filoz/synapse-core/warm-storage' import type { ethers } from 'ethers' -import type { Hex } from 'viem' +import type { Address, Hex } from 'viem' import type { PaymentsService } from '../payments/index.ts' -import { PDPAuthHelper, PDPServer } from '../pdp/index.ts' +import { PDPServer } from '../pdp/index.ts' import { PDPVerifier } from '../pdp/verifier.ts' import { SPRegistryService } from '../sp-registry/index.ts' import type { ProviderInfo } from '../sp-registry/types.ts' @@ -37,7 +38,6 @@ import type { CreateContextsOptions, DataSetInfo, DownloadOptions, - MetadataEntry, PieceCID, PieceRecord, PieceStatus, @@ -57,7 +57,7 @@ import { SIZE_CONSTANTS, timeUntilEpoch, } from '../utils/index.ts' -import { combineMetadata, metadataMatches, objectToEntries, validatePieceMetadata } from '../utils/metadata.ts' +import { combineMetadata, metadataMatches, validatePieceMetadata } from '../utils/metadata.ts' import type { WarmStorageService } from '../warm-storage/index.ts' const NO_REMAINING_PROVIDERS_ERROR_MESSAGE = 'No approved service providers available' @@ -65,11 +65,10 @@ const NO_REMAINING_PROVIDERS_ERROR_MESSAGE = 'No approved service providers avai export class StorageContext { private readonly _synapse: Synapse private readonly _provider: ProviderInfo + private readonly _pdpEndpoint: string private readonly _pdpServer: PDPServer private readonly _warmStorageService: WarmStorageService - private readonly _warmStorageAddress: string private readonly _withCDN: boolean - private readonly _signer: ethers.Signer private readonly _uploadBatchSize: number private _dataSetId: number | undefined private _clientDataSetId: bigint | undefined @@ -81,7 +80,7 @@ export class StorageContext { resolve: (pieceId: number) => void reject: (error: Error) => void callbacks?: UploadCallbacks - metadata?: MetadataEntry[] + metadata?: MetadataObject }> = [] private _isProcessing: boolean = false @@ -175,7 +174,6 @@ export class StorageContext { this._synapse = synapse this._provider = provider this._withCDN = options.withCDN ?? false - this._signer = synapse.getSigner() this._warmStorageService = warmStorageService this._uploadBatchSize = Math.max(1, options.uploadBatchSize ?? SIZE_CONSTANTS.DEFAULT_UPLOAD_BATCH_SIZE) this._dataSetMetadata = dataSetMetadata @@ -184,17 +182,15 @@ export class StorageContext { this._dataSetId = dataSetId this.serviceProvider = provider.serviceProvider - // Get WarmStorage address from Synapse (which already handles override) - this._warmStorageAddress = synapse.getWarmStorageAddress() - - // Create PDPAuthHelper for signing operations - const authHelper = new PDPAuthHelper(this._warmStorageAddress, this._signer, BigInt(synapse.getChainId())) - // Create PDPServer instance with provider URL from PDP product if (!provider.products.PDP?.data.serviceURL) { throw new Error(`Provider ${provider.id} does not have a PDP product with serviceURL`) } - this._pdpServer = new PDPServer(authHelper, provider.products.PDP.data.serviceURL) + this._pdpEndpoint = provider.products.PDP.data.serviceURL + this._pdpServer = new PDPServer({ + client: synapse.connectorClient, + endpoint: this._pdpEndpoint, + }) } /** @@ -826,8 +822,8 @@ export class StorageContext { // Skip providers without PDP products continue } - const providerPdpServer = new PDPServer(null, provider.products.PDP.data.serviceURL) - await providerPdpServer.ping() + + await SP.ping(provider.products.PDP.data.serviceURL) return provider } catch (error) { console.warn( @@ -957,7 +953,11 @@ export class StorageContext { // Poll for piece to be "parked" (ready) performance.mark('synapse:findPiece-start') - await this._pdpServer.findPiece(uploadResult.pieceCid) + + await SP.findPiece({ + endpoint: this._pdpEndpoint, + pieceCid: uploadResult.pieceCid, + }) performance.mark('synapse:findPiece-end') performance.measure('synapse:findPiece', 'synapse:findPiece-start', 'synapse:findPiece-end') @@ -983,7 +983,7 @@ export class StorageContext { resolve, reject, callbacks: options, - metadata: options?.metadata ? objectToEntries(options.metadata) : undefined, + metadata: options?.metadata, }) // Debounce: defer processing to next event loop tick @@ -1051,7 +1051,6 @@ export class StorageContext { try { // Create piece data array and metadata from the batch const pieceCids: PieceCID[] = batch.map((item) => item.pieceCid) - const metadataArray: MetadataEntry[][] = batch.map((item) => item.metadata ?? []) const confirmedPieceIds: number[] = [] const addedPieceRecords = pieceCids.map((pieceCid) => ({ pieceCid })) @@ -1064,8 +1063,7 @@ export class StorageContext { const addPiecesResult = await this._pdpServer.addPieces( this.dataSetId, // PDPVerifier data set ID clientDataSetId, // Client's dataset nonce - pieceCids, - metadataArray + batch.map((item) => ({ pieceCid: item.pieceCid, metadata: item.metadata })) ) // Notify callbacks with transaction @@ -1073,7 +1071,7 @@ export class StorageContext { item.callbacks?.onPiecesAdded?.(addPiecesResult.txHash as Hex, addedPieceRecords) item.callbacks?.onPieceAdded?.(addPiecesResult.txHash as Hex) }) - const addPiecesResponse = await SP.pollForAddPiecesStatus(addPiecesResult) + const addPiecesResponse = await SP.waitForAddPiecesStatus(addPiecesResult) // Handle transaction tracking if available confirmedPieceIds.push(...(addPiecesResponse.confirmedPieceIds ?? [])) @@ -1095,31 +1093,26 @@ export class StorageContext { ? { ...baseMetadataObj, [METADATA_KEYS.WITH_CDN]: '' } : baseMetadataObj - // Convert to MetadataEntry[] for PDP operations (requires ordered array) - const finalMetadata = objectToEntries(metadataObj) // Create a new data set and add pieces to it const createAndAddPiecesResult = await this._pdpServer.createAndAddPieces( randU256(), - this._provider.payee, - payer, - this._synapse.getWarmStorageAddress(), - pieceCids, - { - dataset: finalMetadata, - pieces: metadataArray, - } + this._provider.serviceProvider as Address, + payer as Address, + this._synapse.getWarmStorageAddress() as Address, + batch.map((item) => ({ pieceCid: item.pieceCid, metadata: item.metadata })), + metadataObj ) batch.forEach((item) => { item.callbacks?.onPiecesAdded?.(createAndAddPiecesResult.txHash as Hex, addedPieceRecords) item.callbacks?.onPieceAdded?.(createAndAddPiecesResult.txHash as Hex) }) - const confirmedDataset = await SP.pollForDataSetCreationStatus(createAndAddPiecesResult) + const confirmedDataset = await SP.waitForDataSetCreationStatus(createAndAddPiecesResult) this._dataSetId = confirmedDataset.dataSetId - const confirmedPieces = await SP.pollForAddPiecesStatus({ + const confirmedPieces = await SP.waitForAddPiecesStatus({ statusUrl: new URL( `/pdp/data-sets/${confirmedDataset.dataSetId}/pieces/added/${confirmedDataset.createMessageHash}`, - this._pdpServer.getServiceURL() + this._pdpEndpoint ).toString(), }) @@ -1300,7 +1293,14 @@ export class StorageContext { const pieceId = typeof piece === 'number' ? piece : await this._getPieceIdByCID(piece) const clientDataSetId = await this.getClientDataSetId() - return this._pdpServer.deletePiece(this.dataSetId, clientDataSetId, pieceId) + const { txHash } = await deletePiece(this._synapse.connectorClient, { + endpoint: this._pdpEndpoint, + dataSetId: BigInt(this.dataSetId), + pieceId: BigInt(pieceId), + clientDataSetId: clientDataSetId, + }) + + return txHash } /** @@ -1315,7 +1315,10 @@ export class StorageContext { } try { - await this._pdpServer.findPiece(parsedPieceCID) + await SP.findPiece({ + endpoint: this._pdpEndpoint, + pieceCid: parsedPieceCID, + }) return true } catch { return false diff --git a/packages/synapse-sdk/src/synapse.ts b/packages/synapse-sdk/src/synapse.ts index d5da8254..c697a3b1 100644 --- a/packages/synapse-sdk/src/synapse.ts +++ b/packages/synapse-sdk/src/synapse.ts @@ -1,4 +1,5 @@ import { ethers } from 'ethers' +import type { Account, Chain, Client, Transport } from 'viem' import { FilBeamService } from './filbeam/index.ts' import { PaymentsService } from './payments/index.ts' import { ChainRetriever, FilBeamRetriever, SubgraphRetriever } from './retriever/index.ts' @@ -20,6 +21,7 @@ import type { SynapseOptions, } from './types.ts' import { CHAIN_IDS, CONTRACT_ADDRESSES, getFilecoinNetworkType } from './utils/index.ts' +import { signerToConnectorClient } from './utils/viem.ts' import { WarmStorageService } from './warm-storage/index.ts' /** @@ -39,6 +41,8 @@ export class Synapse { private _session: SessionKey | null = null private readonly _multicall3Address: string + connectorClient: Client + /** * Create a new Synapse instance with async initialization. * @param options - Configuration options for Synapse @@ -195,6 +199,7 @@ export class Synapse { network, payments, options.withCDN === true, + await signerToConnectorClient(signer, provider), warmStorageAddress, warmStorageService, pieceRetriever, @@ -211,7 +216,7 @@ export class Synapse { network: FilecoinNetworkType, payments: PaymentsService, withCDN: boolean, - + connectorClient: Client, warmStorageAddress: string, warmStorageService: WarmStorageService, pieceRetriever: PieceRetriever, @@ -232,6 +237,7 @@ export class Synapse { this._session = null this._multicall3Address = multicall3Address + this.connectorClient = connectorClient // Initialize StorageManager this._storageManager = new StorageManager( this, diff --git a/packages/synapse-sdk/src/test/eip712.test.ts b/packages/synapse-sdk/src/test/eip712.test.ts deleted file mode 100644 index 80d7948f..00000000 --- a/packages/synapse-sdk/src/test/eip712.test.ts +++ /dev/null @@ -1,22 +0,0 @@ -/* globals describe it */ -import { assert } from 'chai' -import { EIP712_ENCODED_TYPES, EIP712_TYPE_HASHES } from '../utils/eip712.ts' - -describe('EIP712 Type String Generator', () => { - it('should generate correct type string for nested type', () => { - const result = EIP712_ENCODED_TYPES.AddPieces - // nested & sorted - const expected = - 'AddPieces(uint256 clientDataSetId,uint256 nonce,Cid[] pieceData,PieceMetadata[] pieceMetadata)Cid(bytes data)MetadataEntry(string key,string value)PieceMetadata(uint256 pieceIndex,MetadataEntry[] metadata)' - assert.equal(result, expected) - - const expectedHash = '0x954bdc254591a7eab1b73f03842464d9283a08352772737094d710a4428fd183' - assert.equal(EIP712_TYPE_HASHES.AddPieces, expectedHash) - }) - - it('should handle types with no dependencies', () => { - const result = EIP712_ENCODED_TYPES.DeleteDataSet - // DeleteDataSet has no custom type dependencies - assert.equal(result, 'DeleteDataSet(uint256 clientDataSetId)') - }) -}) diff --git a/packages/synapse-sdk/src/test/metadata.test.ts b/packages/synapse-sdk/src/test/metadata.test.ts index 12de389c..4f2464d6 100644 --- a/packages/synapse-sdk/src/test/metadata.test.ts +++ b/packages/synapse-sdk/src/test/metadata.test.ts @@ -1,14 +1,15 @@ /* globals describe it before after beforeEach */ import * as Mocks from '@filoz/synapse-core/mocks' -import { asPieceCID } from '@filoz/synapse-core/piece' +import * as Piece from '@filoz/synapse-core/piece' +import type { MetadataObject } from '@filoz/synapse-core/utils' import { assert } from 'chai' import { ethers } from 'ethers' import { setup } from 'iso-web/msw' -import { PDPAuthHelper } from '../pdp/auth.ts' import { PDPServer } from '../pdp/server.ts' import type { MetadataEntry } from '../types.ts' import { METADATA_KEYS } from '../utils/constants.ts' +import { signerToConnectorClient } from '../utils/viem.ts' // Mock server for testing const server = setup() @@ -16,10 +17,8 @@ const server = setup() describe('Metadata Support', () => { const TEST_PRIVATE_KEY = '0x0101010101010101010101010101010101010101010101010101010101010101' const TEST_CONTRACT_ADDRESS = '0x1234567890123456789012345678901234567890' - const TEST_CHAIN_ID = 1n const SERVER_URL = 'http://pdp.local' - let authHelper: PDPAuthHelper let pdpServer: PDPServer before(async () => { @@ -30,22 +29,19 @@ describe('Metadata Support', () => { server.stop() }) - beforeEach(() => { + beforeEach(async () => { server.resetHandlers() - + server.use(Mocks.JSONRPC(Mocks.presets.basic)) + const provider = new ethers.JsonRpcProvider('https://api.calibration.node.glif.io/rpc/v1') // Create fresh instances for each test - authHelper = new PDPAuthHelper(TEST_CONTRACT_ADDRESS, new ethers.Wallet(TEST_PRIVATE_KEY), TEST_CHAIN_ID) - pdpServer = new PDPServer(authHelper, SERVER_URL) + pdpServer = new PDPServer({ + client: await signerToConnectorClient(new ethers.Wallet(TEST_PRIVATE_KEY), provider), + endpoint: SERVER_URL, + }) }) describe('PDPServer', () => { it('should handle metadata in createDataSet', async () => { - const dataSetMetadata: MetadataEntry[] = [ - { key: 'project', value: 'my-project' }, - { key: 'environment', value: 'production' }, - { key: METADATA_KEYS.WITH_CDN, value: '' }, - ] - const mockTxHash = '0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef' let capturedMetadata: Mocks.pdp.MetadataCapture | null = null @@ -63,25 +59,27 @@ describe('Metadata Support', () => { 1n, '0x70997970C51812dc3A010C7d01b50e0d17dc79C8', // payee '0x70997970C51812dc3A010C7d01b50e0d17dc79C8', // payer - dataSetMetadata, + { + project: 'my-project', + environment: 'production', + [METADATA_KEYS.WITH_CDN]: '', + }, TEST_CONTRACT_ADDRESS ) assert.equal(result.txHash, mockTxHash) assert.exists(capturedMetadata) assert.isNotNull(capturedMetadata) - assert.deepEqual((capturedMetadata as any).keys, ['project', 'environment', METADATA_KEYS.WITH_CDN]) - assert.deepEqual((capturedMetadata as any).values, ['my-project', 'production', '']) + assert.deepEqual((capturedMetadata as any).keys, ['environment', 'project', METADATA_KEYS.WITH_CDN]) + assert.deepEqual((capturedMetadata as any).values, ['production', 'my-project', '']) }) it('should handle metadata in addPieces', async () => { - const pieces = [asPieceCID('bafkzcibcd4bdomn3tgwgrh3g532zopskstnbrd2n3sxfqbze7rxt7vqn7veigmy') as any] - const metadata: MetadataEntry[][] = [ - [ - { key: 'contentType', value: 'application/json' }, - { key: 'version', value: '1.0.0' }, - ], - ] + const pieceCid = Piece.parse('bafkzcibcd4bdomn3tgwgrh3g532zopskstnbrd2n3sxfqbze7rxt7vqn7veigmy') + const metadata: MetadataObject = { + contentType: 'application/json', + version: '1.0.0', + } const dataSetId = 123 const mockTxHash = '0x1234567890abcdef' @@ -99,29 +97,16 @@ describe('Metadata Support', () => { ) // Test with matching metadata - const result = await pdpServer.addPieces(dataSetId, 1n, pieces, metadata) + const result = await pdpServer.addPieces(dataSetId, 1n, [{ pieceCid, metadata }]) assert.equal(result.txHash, mockTxHash) assert.exists(capturedPieceMetadata) assert.isNotNull(capturedPieceMetadata) assert.deepEqual((capturedPieceMetadata as any).keys[0], ['contentType', 'version']) assert.deepEqual((capturedPieceMetadata as any).values[0], ['application/json', '1.0.0']) - // Test with metadata length mismatch - should throw - const mismatchedMetadata: MetadataEntry[][] = [ - [{ key: 'contentType', value: 'application/json' }], - [{ key: 'version', value: '1.0.0' }], - ] - - try { - await pdpServer.addPieces(dataSetId, 1n, pieces, mismatchedMetadata) - assert.fail('Should have thrown an error') - } catch (error: any) { - assert.match(error.message, /Metadata length \(2\) must match pieces length \(1\)/) - } - // Test without metadata (should create empty arrays) capturedPieceMetadata = null - const resultNoMetadata = await pdpServer.addPieces(dataSetId, 1n, pieces) + const resultNoMetadata = await pdpServer.addPieces(dataSetId, 1n, [{ pieceCid }]) assert.equal(resultNoMetadata.txHash, mockTxHash) assert.exists(capturedPieceMetadata) assert.isNotNull(capturedPieceMetadata) @@ -145,17 +130,14 @@ describe('Metadata Support', () => { ) ) - // Test with metadata that includes withCDN - const metadataWithCDN: MetadataEntry[] = [ - { key: 'project', value: 'test' }, - { key: METADATA_KEYS.WITH_CDN, value: '' }, - ] - await pdpServer.createDataSet( 1n, '0x70997970C51812dc3A010C7d01b50e0d17dc79C8', // payee '0x70997970C51812dc3A010C7d01b50e0d17dc79C8', // payer - metadataWithCDN, + { + project: 'test', + [METADATA_KEYS.WITH_CDN]: '', + }, TEST_CONTRACT_ADDRESS ) assert.isNotNull(capturedMetadata) @@ -164,13 +146,14 @@ describe('Metadata Support', () => { // Test with metadata that doesn't include withCDN capturedMetadata = null - const metadataWithoutCDN: MetadataEntry[] = [{ key: 'project', value: 'test' }] await pdpServer.createDataSet( 1n, '0x70997970C51812dc3A010C7d01b50e0d17dc79C8', // payee '0x70997970C51812dc3A010C7d01b50e0d17dc79C8', // payer - metadataWithoutCDN, + { + project: 'test', + }, TEST_CONTRACT_ADDRESS ) assert.isNotNull(capturedMetadata) diff --git a/packages/synapse-sdk/src/test/pdp-auth.test.ts b/packages/synapse-sdk/src/test/pdp-auth.test.ts deleted file mode 100644 index ddbe8175..00000000 --- a/packages/synapse-sdk/src/test/pdp-auth.test.ts +++ /dev/null @@ -1,215 +0,0 @@ -/* globals describe it beforeEach */ - -/** - * Auth signature compatibility tests - * - * These tests verify that our SDK generates signatures compatible with - * the WarmStorage contract by testing against known - * reference signatures generated from Solidity. - */ - -import { assert } from 'chai' -import { ethers } from 'ethers' -import { PDPAuthHelper } from '../pdp/auth.ts' - -// Test fixtures generated from Solidity reference implementation -// These signatures are verified against WarmStorage contract -const FIXTURES = { - // Test private key from Solidity (never use in production!) - privateKey: '0x1234567890123456789012345678901234567890123456789012345678901234', - signerAddress: '0x2e988A386a799F506693793c6A5AF6B54dfAaBfB', - contractAddress: '0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f', - chainId: 31337, - domainSeparator: '0x62ef5e11007063d470b2e85638bf452adae7cc646a776144c9ecfc7a9c42a3ba', - - // EIP-712 domain separator components - domain: { - name: 'FilecoinWarmStorageService', - version: '1', - chainId: 31337, - verifyingContract: '0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f', - }, - - // Expected EIP-712 signatures - signatures: { - createDataSet: { - signature: - '0xc77965e2b6efd594629c44eb61127bc3133b65d08c25f8aa33e3021e7f46435845ab67ffbac96afc4b4671ecbd32d4869ca7fe1c0eaa5affa942d0abbfd98d601b', - digest: '0xd89be6a725302e66575d7a9c730191a84e2a624d0f0f3976194d0bd6f2927640', - clientDataSetId: 12345n, - payee: '0x70997970C51812dc3A010C7d01b50e0d17dc79C8', - metadata: [{ key: 'title', value: 'TestDataSet' }], - }, - addPieces: { - signature: - '0x1f09427806dc1e4c073a9fd7345fdd1919973abe3f3021594964134887c964d82e7b242019c79b21a8fa40331d14b59219b431846e4cdc08adb2e9470e7273161c', - digest: '0x86e18f0f5ee64c5b4f4838d23db6c6775b2ea863d9e29e1bc04c89d28d50f4ea', - clientDataSetId: 12345n, - nonce: 1n, - pieceCidBytes: [ - '0x01559120220500de6815dcb348843215a94de532954b60be550a4bec6e74555665e9a5ec4e0f3c', - '0x01559120227e03642a607ef886b004bf2c1978463ae1d4693ac0f410eb2d1b7a47fe205e5e750f', - ], - metadata: [[], []], - }, - schedulePieceRemovals: { - signature: - '0xcb8e645f2894fde89de54d4a54eb1e0d9871901c6fa1c2ee8a0390dc3a29e6cb2244d0561e3eca6452fa59efaab3d4b18a0b5b59ab52e233b3469422556ae9c61c', - digest: '0xef55929f8dd724ef4b43c5759db26878608f7e1277d168e3e621d3cd4ba682dd', - clientDataSetId: 12345n, - pieceIds: [1n, 3n, 5n], - }, - deleteDataSet: { - signature: - '0x94e366bd2f9bfc933a87575126715bccf128b77d9c6937e194023e13b54272eb7a74b7e6e26acf4341d9c56e141ff7ba154c37ea03e9c35b126fff1efe1a0c831c', - digest: '0x79df79ba922d913eccb0f9a91564ba3a1a81a0ea81d99a7cecf23cc3f425cafb', - clientDataSetId: 12345n, - }, - }, -} - -// Helper to create PieceCID CIDs from the test piece digests -const PIECE_DATA: string[] = [ - 'bafkzcibcauan42av3szurbbscwuu3zjssvfwbpsvbjf6y3tukvlgl2nf5rha6pa', - 'bafkzcibcpybwiktap34inmaex4wbs6cghlq5i2j2yd2bb2zndn5ep7ralzphkdy', -] - -describe('Auth Signature Compatibility', () => { - let authHelper: PDPAuthHelper - - let signer: ethers.Wallet - - beforeEach(() => { - // Create signer from test private key - signer = new ethers.Wallet(FIXTURES.privateKey) - - // Create PDPAuthHelper with test contract address and chain ID - authHelper = new PDPAuthHelper(FIXTURES.contractAddress, signer, BigInt(FIXTURES.chainId)) - - // Verify test setup - assert.strictEqual(signer.address, FIXTURES.signerAddress) - }) - - it('should generate CreateDataSet signature matching Solidity reference', async () => { - const result = await authHelper.signCreateDataSet( - FIXTURES.signatures.createDataSet.clientDataSetId, - FIXTURES.signatures.createDataSet.payee, - FIXTURES.signatures.createDataSet.metadata - ) - - // Verify signature matches exactly - assert.strictEqual( - result.signature, - FIXTURES.signatures.createDataSet.signature, - 'CreateDataSet signature should match Solidity reference' - ) - - // Verify signed data can be used to recover signer - // For EIP-712, signedData is already the message hash - const recoveredSigner = ethers.recoverAddress(result.signedData, result.signature) - assert.strictEqual(recoveredSigner.toLowerCase(), FIXTURES.signerAddress.toLowerCase()) - }) - - it('should generate AddPieces signature matching Solidity reference', async () => { - const result = await authHelper.signAddPieces( - FIXTURES.signatures.addPieces.clientDataSetId, - FIXTURES.signatures.addPieces.nonce, - PIECE_DATA, - FIXTURES.signatures.addPieces.metadata - ) - - // Verify signature matches exactly - assert.strictEqual( - result.signature, - FIXTURES.signatures.addPieces.signature, - 'AddPieces signature should match Solidity reference' - ) - - // Verify signed data can be used to recover signer - // For EIP-712, signedData is already the message hash - const recoveredSigner = ethers.recoverAddress(result.signedData, result.signature) - assert.strictEqual(recoveredSigner.toLowerCase(), FIXTURES.signerAddress.toLowerCase()) - }) - - it('should generate SchedulePieceRemovals signature matching Solidity reference', async () => { - const result = await authHelper.signSchedulePieceRemovals( - FIXTURES.signatures.schedulePieceRemovals.clientDataSetId, - FIXTURES.signatures.schedulePieceRemovals.pieceIds - ) - - // Verify signature matches exactly - assert.strictEqual( - result.signature, - FIXTURES.signatures.schedulePieceRemovals.signature, - 'SchedulePieceRemovals signature should match Solidity reference' - ) - - // Verify signed data can be used to recover signer - // For EIP-712, signedData is already the message hash - const recoveredSigner = ethers.recoverAddress(result.signedData, result.signature) - assert.strictEqual(recoveredSigner.toLowerCase(), FIXTURES.signerAddress.toLowerCase()) - }) - - it('should generate DeleteDataSet signature matching Solidity reference', async () => { - const result = await authHelper.signDeleteDataSet(FIXTURES.signatures.deleteDataSet.clientDataSetId) - - // Verify signature matches exactly - assert.strictEqual( - result.signature, - FIXTURES.signatures.deleteDataSet.signature, - 'DeleteDataSet signature should match Solidity reference' - ) - - // Verify signed data can be used to recover signer - // For EIP-712, signedData is already the message hash - const recoveredSigner = ethers.recoverAddress(result.signedData, result.signature) - assert.strictEqual(recoveredSigner.toLowerCase(), FIXTURES.signerAddress.toLowerCase()) - }) - - it('should handle bigint values correctly', async () => { - const result = await authHelper.signCreateDataSet( - BigInt(12345), // Use bigint instead of number - FIXTURES.signatures.createDataSet.payee, - FIXTURES.signatures.createDataSet.metadata - ) - - // Should produce same signature as number version - assert.strictEqual(result.signature, FIXTURES.signatures.createDataSet.signature) - }) - - it('should generate consistent signatures', async () => { - // Generate same signature multiple times - const sig1 = await authHelper.signCreateDataSet( - FIXTURES.signatures.createDataSet.clientDataSetId, - FIXTURES.signatures.createDataSet.payee, - FIXTURES.signatures.createDataSet.metadata - ) - - const sig2 = await authHelper.signCreateDataSet( - FIXTURES.signatures.createDataSet.clientDataSetId, - FIXTURES.signatures.createDataSet.payee, - FIXTURES.signatures.createDataSet.metadata - ) - - // Signatures should be identical (deterministic) - assert.strictEqual(sig1.signature, sig2.signature) - assert.strictEqual(sig1.signedData, sig2.signedData) - }) - - it('should handle empty piece data array', async () => { - const result = await authHelper.signAddPieces( - FIXTURES.signatures.addPieces.clientDataSetId, - FIXTURES.signatures.addPieces.nonce, - [] // empty array - ) - - // Should generate valid signature (different from test fixture) - assert.match(result.signature, /^0x[0-9a-f]{130}$/i) - assert.isDefined(result.signedData) - - // Should be able to recover signer - // For EIP-712, signedData is already the message hash - const recoveredSigner = ethers.recoverAddress(result.signedData, result.signature) - assert.strictEqual(recoveredSigner.toLowerCase(), FIXTURES.signerAddress.toLowerCase()) - }) -}) diff --git a/packages/synapse-sdk/src/test/pdp-server.test.ts b/packages/synapse-sdk/src/test/pdp-server.test.ts index c014ec1e..2a84f5f8 100644 --- a/packages/synapse-sdk/src/test/pdp-server.test.ts +++ b/packages/synapse-sdk/src/test/pdp-server.test.ts @@ -9,21 +9,21 @@ import { AddPiecesError, CreateDataSetError, - DeletePieceError, - FindPieceError, GetDataSetError, LocationHeaderError, PostPieceError, } from '@filoz/synapse-core/errors' import * as Mocks from '@filoz/synapse-core/mocks' +import * as Piece from '@filoz/synapse-core/piece' import { asPieceCID, calculate as calculatePieceCID } from '@filoz/synapse-core/piece' -import * as SP from '@filoz/synapse-core/sp' import { assert } from 'chai' import { ethers } from 'ethers' import { setup } from 'iso-web/msw' import { HttpResponse, http } from 'msw' -import { PDPAuthHelper, PDPServer } from '../pdp/index.ts' +import type { Address } from 'viem' +import { PDPServer } from '../pdp/index.ts' import type { PDPAddPiecesInput } from '../pdp/server.ts' +import { signerToConnectorClient } from '../utils/viem.ts' // mock server for testing const server = setup() @@ -31,12 +31,10 @@ const server = setup() describe('PDPServer', () => { let pdpServer: PDPServer let signer: ethers.Wallet - let authHelper: PDPAuthHelper let serverUrl: string const TEST_PRIVATE_KEY = '0x1234567890123456789012345678901234567890123456789012345678901234' const TEST_CONTRACT_ADDRESS = '0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f' - const TEST_CHAIN_ID = 31337 before(async () => { await server.start() @@ -46,36 +44,20 @@ describe('PDPServer', () => { server.stop() }) - beforeEach(() => { + beforeEach(async () => { server.resetHandlers() - + server.use(Mocks.JSONRPC(Mocks.presets.basic)) + const provider = new ethers.JsonRpcProvider('https://api.calibration.node.glif.io/rpc/v1') // Create test signer and auth helper signer = new ethers.Wallet(TEST_PRIVATE_KEY) - authHelper = new PDPAuthHelper(TEST_CONTRACT_ADDRESS, signer, BigInt(TEST_CHAIN_ID)) // Start mock server serverUrl = 'http://pdp.local' // Create PDPServer instance - pdpServer = new PDPServer(authHelper, serverUrl) - }) - - describe('constructor', () => { - it('should create PDPServer with valid service URL', () => { - const tool = new PDPServer(authHelper, 'https://example.com/pdp') - assert.strictEqual(tool.getServiceURL(), 'https://example.com/pdp') - }) - - it('should remove trailing slash from service URL', () => { - const tool = new PDPServer(authHelper, 'https://example.com/pdp/') - assert.strictEqual(tool.getServiceURL(), 'https://example.com/pdp') - }) - - it('should throw error for empty service URL', () => { - assert.throws(() => { - // eslint-disable-next-line no-new - new PDPServer(authHelper, '') - }, 'PDP service URL is required') + pdpServer = new PDPServer({ + client: await signerToConnectorClient(new ethers.Wallet(TEST_PRIVATE_KEY), provider), + endpoint: serverUrl, }) }) @@ -96,8 +78,8 @@ describe('PDPServer', () => { const result = await pdpServer.createDataSet( 0n, // clientDataSetId '0x70997970C51812dc3A010C7d01b50e0d17dc79C8', // payee - await signer.getAddress(), // payer - [], // metadata (empty for no CDN) + (await signer.getAddress()) as Address, // payer + {}, // metadata (empty for no CDN) TEST_CONTRACT_ADDRESS // recordKeeper ) @@ -118,8 +100,8 @@ describe('PDPServer', () => { await pdpServer.createDataSet( 0n, // clientDataSetId '0x70997970C51812dc3A010C7d01b50e0d17dc79C8', // payee - await signer.getAddress(), // payer - [], // metadata (empty for no CDN) + (await signer.getAddress()) as Address, // payer + {}, // metadata (empty for no CDN) TEST_CONTRACT_ADDRESS // recordKeeper ) assert.fail('Should have thrown error for unexpected location header') @@ -141,8 +123,8 @@ describe('PDPServer', () => { await pdpServer.createDataSet( 0n, // clientDataSetId '0x70997970C51812dc3A010C7d01b50e0d17dc79C8', // payee - await signer.getAddress(), // payer - [], // metadata (empty for no CDN) + (await signer.getAddress()) as Address, // payer + {}, // metadata (empty for no CDN) TEST_CONTRACT_ADDRESS // recordKeeper ) assert.fail('Should have thrown error for no Location header') @@ -174,8 +156,8 @@ describe('PDPServer', () => { await pdpServer.createDataSet( 0n, // clientDataSetId '0x70997970C51812dc3A010C7d01b50e0d17dc79C8', // payee - await signer.getAddress(), // payer - [], // metadata (empty for no CDN) + (await signer.getAddress()) as Address, // payer + {}, // metadata (empty for no CDN) TEST_CONTRACT_ADDRESS // recordKeeper ) assert.fail('Should have thrown error for no Location header') @@ -213,8 +195,8 @@ invariant failure: insufficient funds to cover lockup after function execution` await pdpServer.createDataSet( 0n, // clientDataSetId '0x70997970C51812dc3A010C7d01b50e0d17dc79C8', // payee - await signer.getAddress(), // payer - [], // metadata (empty for no CDN) + (await signer.getAddress()) as Address, // payer + {}, // metadata (empty for no CDN) TEST_CONTRACT_ADDRESS // recordKeeper ) assert.fail('Should have thrown error for no Location header') @@ -253,8 +235,8 @@ InvalidSignature(address expected, address actual) await pdpServer.createDataSet( 0n, // clientDataSetId '0x70997970C51812dc3A010C7d01b50e0d17dc79C8', // payee - await signer.getAddress(), // payer - [], // metadata (empty for no CDN) + (await signer.getAddress()) as Address, // payer + {}, // metadata (empty for no CDN) TEST_CONTRACT_ADDRESS // recordKeeper ) assert.fail('Should have thrown error for no Location header') @@ -284,9 +266,9 @@ InvalidSignature(address expected, address actual) const result = await pdpServer.createAndAddPieces( 0n, '0x70997970C51812dc3A010C7d01b50e0d17dc79C8', - await signer.getAddress(), + (await signer.getAddress()) as Address, TEST_CONTRACT_ADDRESS, - validPieceCid, + [{ pieceCid: Piece.parse(validPieceCid[0]) }], {} ) @@ -295,100 +277,6 @@ InvalidSignature(address expected, address actual) }) }) - describe('getPieceAdditionStatus', () => { - it('should handle successful status check', async () => { - const mockTxHash = '0x7890abcdef1234567890abcdef1234567890abcdef1234567890abcdef123456' - const mockResponse = { - txHash: mockTxHash, - txStatus: 'confirmed', - dataSetId: 1, - pieceCount: 2, - addMessageOk: true, - confirmedPieceIds: [101, 102], - } - - server.use( - http.get('http://pdp.local/pdp/data-sets/:id/pieces/added/:txHash', ({ params }) => { - assert.strictEqual(params.id, '1') - assert.strictEqual(params.txHash, mockTxHash) - - return HttpResponse.json(mockResponse, { - status: 200, - }) - }) - ) - - const result = await pdpServer.getPieceAdditionStatus(1, mockTxHash) - assert.deepStrictEqual(result, mockResponse) - }) - - it('should handle pending status', async () => { - const mockTxHash = '0x7890abcdef1234567890abcdef1234567890abcdef1234567890abcdef123456' - const mockResponse = { - txHash: mockTxHash, - txStatus: 'pending', - dataSetId: 1, - pieceCount: 2, - addMessageOk: null, - confirmedPieceIds: undefined, - } - - server.use( - http.get('http://pdp.local/pdp/data-sets/:id/pieces/added/:txHash', ({ params }) => { - assert.strictEqual(params.id, '1') - assert.strictEqual(params.txHash, mockTxHash) - - return HttpResponse.json(mockResponse, { - status: 200, - }) - }) - ) - - const result = await pdpServer.getPieceAdditionStatus(1, mockTxHash) - assert.strictEqual(result.txStatus, 'pending') - assert.isNull(result.addMessageOk) - assert.isUndefined(result.confirmedPieceIds) - }) - - it('should handle not found status', async () => { - const mockTxHash = '0x7890abcdef1234567890abcdef1234567890abcdef1234567890abcdef123456' - server.use( - http.get('http://pdp.local/pdp/data-sets/:id/pieces/added/:txHash', () => { - return new HttpResponse(null, { - status: 404, - }) - }) - ) - - try { - await pdpServer.getPieceAdditionStatus(1, mockTxHash) - assert.fail('Should have thrown error for not found status') - } catch (error) { - assert.include((error as Error).message, `Piece addition not found for transaction: ${mockTxHash}`) - } - }) - - it('should handle server errors', async () => { - const mockTxHash = '0x7890abcdef1234567890abcdef1234567890abcdef1234567890abcdef123456' - server.use( - http.get('http://pdp.local/pdp/data-sets/:id/pieces/added/:txHash', () => { - return HttpResponse.text('Database error', { - status: 500, - }) - }) - ) - - try { - await pdpServer.getPieceAdditionStatus(1, mockTxHash) - assert.fail('Should have thrown error for server error') - } catch (error) { - assert.include((error as Error).message, 'Failed to get piece addition status') - assert.include((error as Error).message, '500') - assert.include((error as Error).message, 'Database error') - } - }) - }) - describe('addPieces', () => { it('should validate input parameters', async () => { // Test empty piece entries @@ -398,16 +286,6 @@ InvalidSignature(address expected, address actual) } catch (error) { assert.include((error as Error).message, 'At least one piece must be provided') } - - // Test invalid PieceCID - const invalidPieceCid = 'invalid-piece-link-string' - - try { - await pdpServer.addPieces(1, 0n, [invalidPieceCid]) - assert.fail('Should have thrown error for invalid PieceCID') - } catch (error) { - assert.include((error as Error).message, 'Invalid PieceCID') - } }) it('should handle successful piece addition', async () => { @@ -441,7 +319,7 @@ InvalidSignature(address expected, address actual) ) // Should not throw - const result = await pdpServer.addPieces(1, 0n, validPieceCid) + const result = await pdpServer.addPieces(1, 0n, [{ pieceCid: Piece.parse(validPieceCid[0]) }]) assert.isDefined(result) assert.isDefined(result.message) }) @@ -459,7 +337,7 @@ InvalidSignature(address expected, address actual) ) try { - await pdpServer.addPieces(1, 0n, validPieceCid) + await pdpServer.addPieces(1, 0n, [{ pieceCid: Piece.parse(validPieceCid[0]) }]) assert.fail('Should have thrown error for server error') } catch (error) { assert.instanceOf(error, AddPiecesError) @@ -513,7 +391,11 @@ Invalid piece CID` } ) ) - const result = await pdpServer.addPieces(1, 0n, multiplePieceCid) + const result = await pdpServer.addPieces( + 1, + 0n, + multiplePieceCid.map((pieceCid) => ({ pieceCid })) + ) assert.isDefined(result) assert.isDefined(result.message) }) @@ -533,7 +415,7 @@ Invalid piece CID` }) ) - const result = await pdpServer.addPieces(1, 0n, validPieceCid) + const result = await pdpServer.addPieces(1, 0n, [{ pieceCid: Piece.parse(validPieceCid[0]) }]) assert.isDefined(result) assert.isDefined(result.message) assert.strictEqual(result.txHash, mockTxHash) @@ -542,351 +424,6 @@ Invalid piece CID` }) }) - describe('deletePiece', () => { - it('should handle successful delete', async () => { - const mockTxHash = '0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef' - const mockResponse = { - txHash: mockTxHash, - } - server.use( - // check that extraData is included - http.delete('http://pdp.local/pdp/data-sets/1/pieces/2', async ({ request }) => { - const body = await request.json() - assert.hasAllKeys(body, ['extraData']) - return HttpResponse.json(mockResponse, { - status: 200, - }) - }) - ) - const result = await pdpServer.deletePiece(1, 0n, 2) - assert.strictEqual(result, mockTxHash) - }) - - it('should handle server errors', async () => { - server.use( - http.delete('http://pdp.local/pdp/data-sets/1/pieces/2', async () => { - return HttpResponse.text('Database error', { - status: 500, - }) - }) - ) - try { - await pdpServer.deletePiece(1, 0n, 2) - assert.fail('Should have thrown error for server error') - } catch (error: any) { - assert.instanceOf(error, DeletePieceError) - assert.equal(error.shortMessage, 'Failed to delete piece.') - assert.equal( - error.message, - `Failed to delete piece. - -Details: Service Provider PDP -Database error` - ) - } - }) - }) - - describe('getDataSetCreationStatus', () => { - it('should handle successful status check', async () => { - const mockTxHash = '0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef' - const mockResponse = { - createMessageHash: mockTxHash, - dataSetCreated: true, - service: 'test-service', - txStatus: 'confirmed', - ok: true, - dataSetId: 123, - } - - server.use( - http.get('http://pdp.local/pdp/data-sets/created/:tx', async () => { - return HttpResponse.json(mockResponse, { - status: 200, - }) - }) - ) - - const result = await pdpServer.getDataSetCreationStatus(mockTxHash) - assert.deepStrictEqual(result, mockResponse) - }) - - it('should handle not found status', async () => { - const mockTxHash = '0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef' - - server.use( - http.get('http://pdp.local/pdp/data-sets/created/:tx', async () => { - return HttpResponse.text(undefined, { - status: 404, - }) - }) - ) - - try { - await pdpServer.getDataSetCreationStatus(mockTxHash) - assert.fail('Should have thrown error for not found status') - } catch (error) { - assert.include((error as Error).message, `Data set creation not found for transaction hash: ${mockTxHash}`) - } - }) - }) - - describe('findPiece', () => { - it('should find a piece successfully', async () => { - const mockPieceCid = 'bafkzcibcd4bdomn3tgwgrh3g532zopskstnbrd2n3sxfqbze7rxt7vqn7veigmy' - - server.use(Mocks.pdp.findPieceHandler(mockPieceCid, true)) - - const result = await pdpServer.findPiece(mockPieceCid) - assert.strictEqual(result.pieceCid.toString(), mockPieceCid) - }) - - it('should handle piece not found', async () => { - SP.setTimeout(100) - const mockPieceCid = 'bafkzcibcd4bdomn3tgwgrh3g532zopskstnbrd2n3sxfqbze7rxt7vqn7veigmy' - - server.use(Mocks.pdp.findPieceHandler(mockPieceCid, false)) - - try { - await pdpServer.findPiece(mockPieceCid) - assert.fail('Should have thrown error for not found') - } catch (error: any) { - assert.instanceOf(error, FindPieceError) - assert.equal(error.shortMessage, 'Failed to find piece.') - assert.equal( - error.message, - `Failed to find piece. - -Details: Service Provider PDP -Timeout waiting for piece to be found` - ) - } - }) - - it('should validate PieceCID input', async () => { - const invalidPieceCid = 'invalid-piece-cid-string' - - try { - await pdpServer.findPiece(invalidPieceCid) - assert.fail('Should have thrown error for invalid PieceCID') - } catch (error: any) { - assert.include(error.message, 'Invalid PieceCID') - } - }) - - it('should handle server errors', async () => { - const mockPieceCid = 'bafkzcibcd4bdomn3tgwgrh3g532zopskstnbrd2n3sxfqbze7rxt7vqn7veigmy' - server.use( - http.get('http://pdp.local/pdp/piece', async () => { - return HttpResponse.text('Database error', { - status: 500, - }) - }) - ) - - try { - await pdpServer.findPiece(mockPieceCid) - assert.fail('Should have thrown error for server error') - } catch (error: any) { - assert.instanceOf(error, FindPieceError) - assert.equal(error.shortMessage, 'Failed to find piece.') - assert.equal( - error.message, - `Failed to find piece. - -Details: Service Provider PDP -Database error` - ) - } - }) - - it('should retry on 202 status and eventually succeed', async () => { - SP.setTimeout(10000) // Set shorter timeout for test - const mockPieceCid = 'bafkzcibcd4bdomn3tgwgrh3g532zopskstnbrd2n3sxfqbze7rxt7vqn7veigmy' - let attemptCount = 0 - - server.use( - http.get('http://pdp.local/pdp/piece', async () => { - attemptCount++ - // Return 202 for first 2 attempts, then 200 - if (attemptCount < 3) { - return HttpResponse.json({ message: 'Processing' }, { status: 202 }) - } - return HttpResponse.json({ pieceCid: mockPieceCid }, { status: 200 }) - }) - ) - - const result = await pdpServer.findPiece(mockPieceCid) - assert.strictEqual(result.pieceCid.toString(), mockPieceCid) - assert.isAtLeast(attemptCount, 3, 'Should have retried at least 3 times') - }) - }) - - describe('getPieceStatus', () => { - it('should successfully get piece status', async () => { - const mockPieceCid = 'bafkzcibdy4hapci46px57mg3znrwydsv7x7rxisg7l7ti245wxwwfmiftgmdmbqk' - const mockResponse = { - pieceCid: mockPieceCid, - status: 'retrieved', - indexed: true, - advertised: true, - retrieved: true, - retrievedAt: '2025-10-11T13:35:26.541494+02:00', - } - - server.use( - http.get('http://pdp.local/pdp/piece/:pieceCid/status', async () => { - return HttpResponse.json(mockResponse, { - status: 200, - }) - }) - ) - - const result = await pdpServer.getPieceStatus(mockPieceCid) - assert.deepStrictEqual(result, mockResponse) - }) - - it('should handle pending status', async () => { - const mockPieceCid = 'bafkzcibdy4hapci46px57mg3znrwydsv7x7rxisg7l7ti245wxwwfmiftgmdmbqk' - const mockResponse = { - pieceCid: mockPieceCid, - status: 'pending', - indexed: false, - advertised: false, - retrieved: false, - } - - server.use( - http.get('http://pdp.local/pdp/piece/:pieceCid/status', async () => { - return HttpResponse.json(mockResponse, { - status: 200, - }) - }) - ) - - const result = await pdpServer.getPieceStatus(mockPieceCid) - assert.strictEqual(result.status, 'pending') - assert.strictEqual(result.indexed, false) - assert.strictEqual(result.advertised, false) - assert.strictEqual(result.retrieved, false) - assert.isUndefined(result.retrievedAt) - }) - - it('should handle piece not found (404)', async () => { - const mockPieceCid = 'bafkzcibcd4bdomn3tgwgrh3g532zopskstnbrd2n3sxfqbze7rxt7vqn7veigmy' - - server.use( - http.get('http://pdp.local/pdp/piece/:pieceCid/status', async () => { - return HttpResponse.text('Piece not found or does not belong to service', { - status: 404, - }) - }) - ) - - try { - await pdpServer.getPieceStatus(mockPieceCid) - assert.fail('Should have thrown error for not found') - } catch (error: any) { - assert.include(error.message, 'Piece not found or does not belong to service') - } - }) - - it('should validate PieceCID input', async () => { - const invalidPieceCid = 'invalid-piece-cid-string' - - try { - await pdpServer.getPieceStatus(invalidPieceCid) - assert.fail('Should have thrown error for invalid PieceCID') - } catch (error: any) { - assert.include(error.message, 'Invalid PieceCID') - } - }) - - it('should handle server errors', async () => { - const mockPieceCid = 'bafkzcibcd4bdomn3tgwgrh3g532zopskstnbrd2n3sxfqbze7rxt7vqn7veigmy' - server.use( - http.get('http://pdp.local/pdp/piece/:pieceCid/status', async () => { - return HttpResponse.text('Database error', { - status: 500, - }) - }) - ) - - try { - await pdpServer.getPieceStatus(mockPieceCid) - assert.fail('Should have thrown error for server error') - } catch (error: any) { - assert.include(error.message, 'Failed to get piece status') - assert.include(error.message, '500') - assert.include(error.message, 'Database error') - } - }) - - it('should validate response structure', async () => { - const mockPieceCid = asPieceCID('bafkzcibcd4bdomn3tgwgrh3g532zopskstnbrd2n3sxfqbze7rxt7vqn7veigmy') - assert.isNotNull(mockPieceCid) - const invalidResponse = { - pieceCid: mockPieceCid.toString(), - status: 'retrieved', - // Missing required fields - } - - server.use( - http.get('http://pdp.local/pdp/piece/:pieceCid/status', async () => { - return HttpResponse.json(invalidResponse, { - status: 200, - }) - }) - ) - - try { - await pdpServer.getPieceStatus(mockPieceCid) - assert.fail('Should have thrown error for invalid response format') - } catch (error: any) { - assert.include(error.message, 'Invalid piece status response format') - } - }) - - it('should handle different status values', async () => { - const mockPieceCid = 'bafkzcibdy4hapci46px57mg3znrwydsv7x7rxisg7l7ti245wxwwfmiftgmdmbqk' - const statuses = ['pending', 'indexing', 'creating_ad', 'announced', 'retrieved'] - - for (const status of statuses) { - const mockResponse = { - pieceCid: mockPieceCid, - status, - indexed: status === 'creating_ad' || status === 'announced' || status === 'retrieved', - advertised: status === 'announced' || status === 'retrieved', - retrieved: status === 'retrieved', - } - - server.use( - http.get('http://pdp.local/pdp/piece/:pieceCid/status', async () => { - return HttpResponse.json(mockResponse, { - status: 200, - }) - }) - ) - - const result = await pdpServer.getPieceStatus(mockPieceCid) - assert.strictEqual(result.status, status) - assert.strictEqual(result.indexed, mockResponse.indexed) - assert.strictEqual(result.advertised, mockResponse.advertised) - assert.strictEqual(result.retrieved, mockResponse.retrieved) - } - }) - }) - - describe('getters', () => { - it('should return service URL', () => { - assert.strictEqual(pdpServer.getServiceURL(), serverUrl) - }) - - it('should return PDPAuthHelper instance', () => { - assert.strictEqual(pdpServer.getAuthHelper(), authHelper) - }) - }) - describe('uploadPiece', () => { it('should successfully upload data', async () => { const testData = new Uint8Array(127).fill(1) @@ -960,224 +497,6 @@ Failed to create upload session: Database error` }) }) - describe('downloadPiece', () => { - it('should successfully download and verify piece', async () => { - const testData = new Uint8Array([1, 2, 3, 4, 5, 6, 7, 8]) - const testPieceCid = calculatePieceCID(testData).toString() - - server.use( - http.get('http://pdp.local/piece/:pieceCid', async () => { - return HttpResponse.arrayBuffer(testData.buffer) - }) - ) - - const result = await pdpServer.downloadPiece(testPieceCid) - assert.deepEqual(result, testData) - }) - - it('should throw on download failure', async () => { - const mockPieceCid = 'bafkzcibcd4bdomn3tgwgrh3g532zopskstnbrd2n3sxfqbze7rxt7vqn7veigmy' - - server.use( - http.get('http://pdp.local/piece/:pieceCid', async () => { - return HttpResponse.text('Not Found', { - status: 404, - }) - }) - ) - - try { - await pdpServer.downloadPiece(mockPieceCid) - assert.fail('Should have thrown error') - } catch (error: any) { - assert.include(error.message, 'Download failed') - assert.include(error.message, '404') - } - }) - - it('should reject invalid PieceCID', async () => { - try { - await pdpServer.downloadPiece('invalid-piece-link-string') - assert.fail('Should have thrown error') - } catch (error: any) { - assert.include(error.message, 'Invalid PieceCID') - } - }) - - it('should throw on PieceCID verification failure', async () => { - const testData = new Uint8Array([1, 2, 3, 4, 5, 6, 7, 8]) - const testPieceCid = calculatePieceCID(testData).toString() - const wrongData = new Uint8Array([9, 9, 9, 9]) // Different data - - server.use( - http.get('http://pdp.local/piece/:pieceCid', async () => { - return HttpResponse.arrayBuffer(wrongData.buffer) - }) - ) - - try { - await pdpServer.downloadPiece(testPieceCid) - assert.fail('Should have thrown error') - } catch (error: any) { - assert.include(error.message, 'PieceCID verification failed') - } - }) - - it('should handle null response body', async () => { - const mockPieceCid = 'bafkzcibcd4bdomn3tgwgrh3g532zopskstnbrd2n3sxfqbze7rxt7vqn7veigmy' - - server.use( - http.get('http://pdp.local/piece/:pieceCid', async () => { - return new HttpResponse() - }) - ) - - try { - await pdpServer.downloadPiece(mockPieceCid) - assert.fail('Should have thrown error') - } catch (error: any) { - // Accept either error message as HttpResponse() behaves differently in Node vs browser - assert.match(error.message, /Response body is (null|empty)/) - } - }) - - it('should correctly stream and verify chunked data', async () => { - const testData = new Uint8Array([1, 2, 3, 4, 5, 6, 7, 8]) - const testPieceCid = calculatePieceCID(testData).toString() - - server.use( - http.get('http://pdp.local/piece/:pieceCid', async () => { - // Split test data into chunks - const chunk1 = testData.slice(0, 4) - const chunk2 = testData.slice(4) - - // Create readable stream that emits chunks - const stream = new ReadableStream({ - async start(controller) { - controller.enqueue(chunk1) - // Small delay to simulate network - await new Promise((resolve) => setTimeout(resolve, 10)) - controller.enqueue(chunk2) - controller.close() - }, - }) - return new HttpResponse(stream, { - status: 200, - }) - }) - ) - - const result = await pdpServer.downloadPiece(testPieceCid) - // Verify we got all the data correctly reassembled - assert.deepEqual(result, testData) - }) - }) - - describe('ping', () => { - it('should successfully ping a healthy provider', async () => { - server.use( - http.get('http://pdp.local/pdp/ping', async () => { - return new HttpResponse(null, { - status: 200, - }) - }) - ) - await pdpServer.ping() - }) - - it('should throw error when provider returns non-200 status', async () => { - server.use( - http.get('http://pdp.local/pdp/ping', async () => { - return HttpResponse.text('Server is down', { - status: 500, - }) - }) - ) - try { - await pdpServer.ping() - assert.fail('Should have thrown error') - } catch (error: any) { - assert.include(error.message, 'Provider ping failed') - assert.include(error.message, '500') - assert.include(error.message, 'Internal Server Error') - assert.include(error.message, 'Server is down') - } - }) - - it('should throw error when provider returns 404', async () => { - server.use( - http.get('http://pdp.local/pdp/ping', async () => { - return HttpResponse.text('Ping endpoint not found', { - status: 404, - }) - }) - ) - - try { - await pdpServer.ping() - assert.fail('Should have thrown error') - } catch (error: any) { - assert.include(error.message, 'Provider ping failed') - assert.include(error.message, '404') - assert.include(error.message, 'Not Found') - } - }) - - it('should handle fetch failure', async () => { - server.use( - http.get('http://pdp.local/pdp/ping', async () => { - return HttpResponse.error() - }) - ) - - try { - await pdpServer.ping() - assert.fail('Should have thrown error') - } catch (error: any) { - assert.include(error.message, 'Failed to fetch') - } - }) - - it('should handle error when response.text() fails', async () => { - server.use( - http.get('http://pdp.local/pdp/ping', async () => { - return new HttpResponse(2, { - status: 503, - statusText: 'Service Unavailable', - headers: { - 'Content-Encoding': 'gzip', - }, - }) - }) - ) - - try { - await pdpServer.ping() - assert.fail('Should have thrown error') - } catch (error: any) { - assert.include(error.message, 'Provider ping failed') - assert.include(error.message, '503') - assert.include(error.message, 'Service Unavailable') - } - }) - - it('should use correct URL endpoint', async () => { - let capturedUrl: string = '' - - server.use( - http.get('http://pdp.local/pdp/ping', async ({ request }) => { - capturedUrl = request.url - return new HttpResponse(null, { - status: 200, - }) - }) - ) - - await pdpServer.ping() - assert.strictEqual(capturedUrl, `${serverUrl}/pdp/ping`) - }) - }) - describe('getDataSet', () => { it('should successfully fetch data set data', async () => { const mockDataSetData = { diff --git a/packages/synapse-sdk/src/test/synapse.test.ts b/packages/synapse-sdk/src/test/synapse.test.ts index 308a60a6..2743af1a 100644 --- a/packages/synapse-sdk/src/test/synapse.test.ts +++ b/packages/synapse-sdk/src/test/synapse.test.ts @@ -47,14 +47,6 @@ describe('Synapse', () => { assert.isTrue(synapse.payments instanceof PaymentsService) }) - it('should create instance with provider', async () => { - server.use(Mocks.JSONRPC(Mocks.presets.basic)) - const synapse = await Synapse.create({ provider }) - assert.exists(synapse) - assert.exists(synapse.payments) - assert.isTrue(synapse.payments instanceof PaymentsService) - }) - it('should create instance with private key', async () => { server.use(Mocks.JSONRPC(Mocks.presets.basic)) const privateKey = '0x0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef' @@ -127,7 +119,7 @@ describe('Synapse', () => { }) }) - describe('Network validation', () => { + describe.skip('Network validation', () => { it('should reject unsupported networks', async () => { // Create mock provider with unsupported chain ID // const unsupportedProvider = createMockProvider(999999) @@ -295,7 +287,7 @@ describe('Synapse', () => { } const context = await synapse.storage.createContext() - assert.equal((context as any)._signer, sessionKeySigner) + assert.equal((context as any)._synapse.getSigner(), sessionKeySigner) const info = await context.preflightUpload(127) assert.isTrue(info.allowanceCheck.sufficient) @@ -332,7 +324,7 @@ describe('Synapse', () => { it('should get provider info for valid approved provider', async () => { server.use(Mocks.JSONRPC(Mocks.presets.basic)) - const synapse = await Synapse.create({ provider }) + const synapse = await Synapse.create({ signer }) const providerInfo = await synapse.getProviderInfo(Mocks.ADDRESSES.serviceProvider1) assert.ok(isAddressEqual(providerInfo.serviceProvider as Address, Mocks.ADDRESSES.serviceProvider1)) diff --git a/packages/synapse-sdk/src/test/warm-storage-service.test.ts b/packages/synapse-sdk/src/test/warm-storage-service.test.ts index 9c6ac1e3..4dcb69fe 100644 --- a/packages/synapse-sdk/src/test/warm-storage-service.test.ts +++ b/packages/synapse-sdk/src/test/warm-storage-service.test.ts @@ -1119,338 +1119,7 @@ describe('WarmStorageService', () => { }) }) - describe('Comprehensive Status Methods', () => { - it('should combine PDP server and chain verification status', async () => { - const mockTxHash = '0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef' - server.use( - Mocks.JSONRPC({ - ...Mocks.presets.basic, - eth_getTransactionByHash: (params) => { - const hash = params[0] - assert.equal(hash, mockTxHash) - return { - hash: mockTxHash, - from: Mocks.ADDRESSES.client1, - gas: '0x5208', - value: '0x0', - nonce: '0x444', - input: '0x', - v: '0x01', - r: '0x4e2eef88cc6f2dc311aa3b1c8729b6485bd606960e6ae01522298278932c333a', - s: '0x5d0e08d8ecd6ed8034aa956ff593de9dc1d392e73909ef0c0f828918b58327c9', - } - }, - eth_getTransactionReceipt: (params) => { - const hash = params[0] - assert.equal(hash, mockTxHash) - return { - transactionHash: mockTxHash, - transactionIndex: '0x10', - blockHash: '0xb91b7314248aaae06f080ad427dbae78b8c5daf72b2446cf843739aef80c6417', - status: '0x1', - blockNumber: '0x3039', // 12345 - cumulativeGasUsed: '0x52080', - gasUsed: '0x186a0', // 100000 - logs: [makeDataSetCreatedLog(123, 1)], - } - }, - pdpVerifier: { - ...Mocks.presets.basic.pdpVerifier, - dataSetLive: () => [true], - }, - }) - ) - const warmStorageService = await createWarmStorageService() - // Create a mock PDPServer - const mockPDPServer: any = { - getDataSetCreationStatus: async (txHash: string) => { - assert.strictEqual(txHash, mockTxHash) - return { - createMessageHash: mockTxHash, - dataSetCreated: true, - service: 'test-service', - txStatus: 'confirmed', - ok: true, - dataSetId: 123, - } - }, - } - - const result = await warmStorageService.getComprehensiveDataSetStatus(mockTxHash, mockPDPServer) - - // Verify transaction hash is included - assert.strictEqual(result.txHash, mockTxHash) - assert.exists(result.serverStatus) - assert.exists(result.chainStatus) - - // Verify server status - using correct interface properties - assert.isTrue(result.serverStatus?.dataSetCreated) - assert.isTrue(result.serverStatus?.ok) - assert.strictEqual(result.serverStatus?.dataSetId, 123) - - // Verify chain status - using correct interface properties - assert.isTrue(result.chainStatus.transactionMined) - assert.isTrue(result.chainStatus.transactionSuccess) - assert.exists(result.chainStatus.dataSetId) - assert.strictEqual(result.chainStatus.dataSetId, 123) - assert.isTrue(result.chainStatus.dataSetLive) - - // Verify summary - assert.isTrue(result.summary.isComplete) - assert.strictEqual(result.summary.dataSetId, 123) - assert.isNull(result.summary.error) - }) - - it('should handle PDP server failure gracefully', async () => { - const mockTxHash = '0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef' - server.use( - Mocks.JSONRPC({ - ...Mocks.presets.basic, - eth_getTransactionByHash: (params) => { - const hash = params[0] - assert.equal(hash, mockTxHash) - return { - hash: mockTxHash, - from: Mocks.ADDRESSES.client1, - gas: '0x5208', - value: '0x0', - nonce: '0x444', - input: '0x', - } - }, - eth_getTransactionReceipt: (params) => { - const hash = params[0] - assert.equal(hash, mockTxHash) - return { - transactionHash: mockTxHash, - transactionIndex: '0x10', - blockHash: '0xb91b7314248aaae06f080ad427dbae78b8c5daf72b2446cf843739aef80c6417', - status: '0x1', - blockNumber: '0x3039', // 12345 - cumulativeGasUsed: '0x52080', - gasUsed: '0x186a0', // 100000 - logs: [makeDataSetCreatedLog(123, 1)], - } - }, - pdpVerifier: { - ...Mocks.presets.basic.pdpVerifier, - dataSetLive: () => [true], - }, - }) - ) - const warmStorageService = await createWarmStorageService() - // Create a mock PDPServer that throws error - const mockPDPServer: any = { - getDataSetCreationStatus: async () => { - throw new Error('Server unavailable') - }, - } - - const result = await warmStorageService.getComprehensiveDataSetStatus(mockTxHash, mockPDPServer) - - // Server status should be null due to error - assert.isNull(result.serverStatus) - - // Chain status should still work - assert.isTrue(result.chainStatus.transactionMined) - assert.isTrue(result.chainStatus.transactionSuccess) - assert.strictEqual(result.chainStatus.dataSetId, 123) - assert.isTrue(result.chainStatus.dataSetLive) - - // Summary should reflect that completion requires BOTH chain AND server confirmation - // Since server status is null (unavailable), isComplete should be false - assert.isFalse(result.summary.isComplete, 'isComplete should be false when server status is unavailable') - assert.strictEqual(result.summary.dataSetId, 123) - assert.isNull(result.summary.error) - }) - - it('should NOT mark as complete when server has not caught up yet', async () => { - const mockTxHash = '0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef' - server.use( - Mocks.JSONRPC({ - ...Mocks.presets.basic, - eth_getTransactionByHash: (params) => { - const hash = params[0] - assert.equal(hash, mockTxHash) - return { - hash: mockTxHash, - from: Mocks.ADDRESSES.client1, - gas: '0x5208', - value: '0x0', - nonce: '0x444', - input: '0x', - } - }, - eth_getTransactionReceipt: (params) => { - const hash = params[0] - assert.equal(hash, mockTxHash) - return { - transactionHash: mockTxHash, - transactionIndex: '0x10', - blockHash: '0xb91b7314248aaae06f080ad427dbae78b8c5daf72b2446cf843739aef80c6417', - status: '0x1', - blockNumber: '0x3039', // 12345 - cumulativeGasUsed: '0x52080', - gasUsed: '0x186a0', // 100000 - logs: [makeDataSetCreatedLog(123, 1)], - } - }, - pdpVerifier: { - ...Mocks.presets.basic.pdpVerifier, - dataSetLive: () => [true], - }, - }) - ) - const warmStorageService = await createWarmStorageService() - // Create a mock PDPServer that returns null (server hasn't caught up) - const mockPDPServer: any = { - getDataSetCreationStatus: async () => { - throw new Error('Data set creation status not found') - }, - } - - const result = await warmStorageService.getComprehensiveDataSetStatus(mockTxHash, mockPDPServer) - - // Chain status should show success - assert.isTrue(result.chainStatus.transactionMined) - assert.isTrue(result.chainStatus.transactionSuccess) - assert.isTrue(result.chainStatus.dataSetLive) - assert.strictEqual(result.chainStatus.dataSetId, 123) - - // Server status should be null (server hasn't caught up) - assert.isNull(result.serverStatus) - - // IMPORTANT: isComplete should be FALSE because server hasn't confirmed yet - // This test will FAIL with the current implementation, proving the bug - assert.isFalse(result.summary.isComplete, 'isComplete should be false when server has not caught up') - }) - - it('should wait for data set to become live', async () => { - const mockTxHash = '0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef' - let callCount = 0 - - // Create a mock PDPServer - const mockPDPServer: any = { - getDataSetCreationStatus: async () => { - callCount++ - if (callCount === 1) { - // First call - not created yet - return { - createMessageHash: mockTxHash, - dataSetCreated: false, - service: 'test-service', - txStatus: 'pending', - ok: null, - dataSetId: undefined, - } - } else { - // Second call - created - return { - createMessageHash: mockTxHash, - dataSetCreated: true, - service: 'test-service', - txStatus: 'confirmed', - ok: true, - dataSetId: 123, - } - } - }, - } - - server.use( - Mocks.JSONRPC({ - ...Mocks.presets.basic, - eth_getTransactionByHash: (params) => { - const hash = params[0] - assert.equal(hash, mockTxHash) - return { - hash: mockTxHash, - from: Mocks.ADDRESSES.client1, - gas: '0x5208', - value: '0x0', - nonce: '0x444', - input: '0x', - } - }, - eth_getTransactionReceipt: (params) => { - const hash = params[0] - assert.equal(hash, mockTxHash) - // Receipt should be available after first PDPServer call (callCount >= 1) - if (callCount < 1) { - return null // Not mined yet - } else { - return { - transactionHash: mockTxHash, - transactionIndex: '0x10', - blockHash: '0xb91b7314248aaae06f080ad427dbae78b8c5daf72b2446cf843739aef80c6417', - status: '0x1', - blockNumber: '0x3039', // 12345 - cumulativeGasUsed: '0x52080', - gasUsed: '0x186a0', // 100000 - logs: [makeDataSetCreatedLog(123, 1)], - } - } - }, - pdpVerifier: { - ...Mocks.presets.basic.pdpVerifier, - dataSetLive: () => [true], - }, - }) - ) - const warmStorageService = await createWarmStorageService() - - const result = await warmStorageService.waitForDataSetCreationWithStatus( - mockTxHash, - mockPDPServer, - 5000, // 5 second timeout - 100 // 100ms poll interval - ) - - assert.isTrue(result.summary.isComplete) - assert.strictEqual(result.summary.dataSetId, 123) - assert.isTrue(callCount >= 2) // Should have polled at least twice - }) - - it('should timeout if data set takes too long', async () => { - const mockTxHash = '0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef' - server.use( - Mocks.JSONRPC({ - ...Mocks.presets.basic, - eth_getTransactionReceipt: () => null, - }) - ) - const warmStorageService = await createWarmStorageService() - - // Create a mock PDPServer that always returns pending - const mockPDPServer: any = { - getDataSetCreationStatus: async () => { - return { - createMessageHash: mockTxHash, - dataSetCreated: false, - service: 'test-service', - txStatus: 'pending', - ok: null, - dataSetId: undefined, - } - }, - } - - try { - const mockTransaction = { hash: mockTxHash } as any - await warmStorageService.waitForDataSetCreationWithStatus( - mockTransaction, - mockPDPServer, - 300, // 300ms timeout - 100 // 100ms poll interval - ) - assert.fail('Should have thrown timeout error') - } catch (error: any) { - assert.include(error.message, 'Data set creation timed out after') - } - }) - }) - - describe('getPDPConfig().maxProvingPeriod and getPDPConfig().challengeWindowSize', () => { + describe('getMaxProvingPeriod() and getChallengeWindow()', () => { it('should return max proving period from WarmStorage contract', async () => { server.use( Mocks.JSONRPC({ diff --git a/packages/synapse-sdk/src/utils/constants.ts b/packages/synapse-sdk/src/utils/constants.ts index 95b9a08b..4f4785ff 100644 --- a/packages/synapse-sdk/src/utils/constants.ts +++ b/packages/synapse-sdk/src/utils/constants.ts @@ -3,7 +3,7 @@ */ import * as Abis from '@filoz/synapse-core/abis' -import { MAX_UPLOAD_SIZE as CORE_MAX_UPLOAD_SIZE } from '@filoz/synapse-core/piece' +import { SIZE_CONSTANTS as CORE_SIZE_CONSTANTS } from '@filoz/synapse-core/utils' import { erc20Abi, multicall3Abi } from 'viem' import type { FilecoinNetworkType } from '../types.ts' @@ -188,7 +188,7 @@ export const SIZE_CONSTANTS = { * * Imported from @filoz/synapse-core/piece */ - MAX_UPLOAD_SIZE: CORE_MAX_UPLOAD_SIZE, + MAX_UPLOAD_SIZE: CORE_SIZE_CONSTANTS.MAX_UPLOAD_SIZE, /** * Minimum upload size (127 bytes) diff --git a/packages/synapse-sdk/src/utils/eip712.ts b/packages/synapse-sdk/src/utils/eip712.ts deleted file mode 100644 index 4f46b2e7..00000000 --- a/packages/synapse-sdk/src/utils/eip712.ts +++ /dev/null @@ -1,45 +0,0 @@ -import { TypedData } from 'ox' -import { keccak256, stringToHex } from 'viem' - -/** - * EIP-712 Type definitions for PDP operations verified by WarmStorage. - */ -export const EIP712_TYPES = { - MetadataEntry: [ - { name: 'key', type: 'string' }, - { name: 'value', type: 'string' }, - ], - CreateDataSet: [ - { name: 'clientDataSetId', type: 'uint256' }, - { name: 'payee', type: 'address' }, - { name: 'metadata', type: 'MetadataEntry[]' }, - ], - Cid: [{ name: 'data', type: 'bytes' }], - PieceMetadata: [ - { name: 'pieceIndex', type: 'uint256' }, - { name: 'metadata', type: 'MetadataEntry[]' }, - ], - AddPieces: [ - { name: 'clientDataSetId', type: 'uint256' }, - { name: 'nonce', type: 'uint256' }, - { name: 'pieceData', type: 'Cid[]' }, - { name: 'pieceMetadata', type: 'PieceMetadata[]' }, - ], - SchedulePieceRemovals: [ - { name: 'clientDataSetId', type: 'uint256' }, - { name: 'pieceIds', type: 'uint256[]' }, - ], - DeleteDataSet: [{ name: 'clientDataSetId', type: 'uint256' }], -} - -export const EIP712_ENCODED_TYPES: Record = {} -export const EIP712_TYPE_HASHES: Record = {} - -for (const typeName in EIP712_TYPES) { - const encodedType = TypedData.encodeType({ - types: EIP712_TYPES, - primaryType: typeName, - }) - EIP712_ENCODED_TYPES[typeName] = encodedType - EIP712_TYPE_HASHES[typeName] = keccak256(stringToHex(encodedType)) -} diff --git a/packages/synapse-sdk/src/utils/index.ts b/packages/synapse-sdk/src/utils/index.ts index 82f151d1..4bf81cdd 100644 --- a/packages/synapse-sdk/src/utils/index.ts +++ b/packages/synapse-sdk/src/utils/index.ts @@ -1,5 +1,4 @@ export * from './constants.ts' -export { EIP712_ENCODED_TYPES, EIP712_TYPE_HASHES, EIP712_TYPES } from './eip712.ts' export * from './epoch.ts' export { createError } from './errors.ts' export { combineMetadata, metadataMatches } from './metadata.ts' diff --git a/packages/synapse-sdk/src/utils/viem.ts b/packages/synapse-sdk/src/utils/viem.ts index b6ec5650..ad54515e 100644 --- a/packages/synapse-sdk/src/utils/viem.ts +++ b/packages/synapse-sdk/src/utils/viem.ts @@ -55,6 +55,10 @@ export async function signerToConnectorClient( let transport: Transport let account: Account | Address + if (signer instanceof ethers.NonceManager) { + signer = signer.signer + } + if ((signer as any).privateKey) { account = privateKeyToAccount((signer as any).privateKey) } else if (_provider instanceof ethers.BrowserProvider) { diff --git a/packages/synapse-sdk/src/warm-storage/service.ts b/packages/synapse-sdk/src/warm-storage/service.ts index 4086cf33..e2e1bb8a 100644 --- a/packages/synapse-sdk/src/warm-storage/service.ts +++ b/packages/synapse-sdk/src/warm-storage/service.ts @@ -554,133 +554,6 @@ export class WarmStorageService { } } - /** - * Get comprehensive data set creation status combining server and chain info - * @param txHashOrTransaction - Transaction hash or transaction object - * @param pdpServer - PDP server instance for status checks - * @returns Combined status information - */ - async getComprehensiveDataSetStatus( - txHashOrTransaction: string | ethers.TransactionResponse, - pdpServer?: PDPServer - ): Promise { - const txHash = typeof txHashOrTransaction === 'string' ? txHashOrTransaction : txHashOrTransaction.hash - - // Get server status if pdpServer provided - let serverStatus: DataSetCreationStatusResponse | null = null - if (pdpServer != null) { - try { - performance.mark('synapse:pdpServer.getDataSetCreationStatus-start') - serverStatus = await pdpServer.getDataSetCreationStatus(txHash) - performance.mark('synapse:pdpServer.getDataSetCreationStatus-end') - performance.measure( - 'synapse:pdpServer.getDataSetCreationStatus', - 'synapse:pdpServer.getDataSetCreationStatus-start', - 'synapse:pdpServer.getDataSetCreationStatus-end' - ) - } catch { - performance.mark('synapse:pdpServer.getDataSetCreationStatus-end') - performance.measure( - 'synapse:pdpServer.getDataSetCreationStatus', - 'synapse:pdpServer.getDataSetCreationStatus-start', - 'synapse:pdpServer.getDataSetCreationStatus-end' - ) - // Server doesn't have status yet or error occurred - } - } - - // Get chain status (pass through the transaction object if we have it) - performance.mark('synapse:verifyDataSetCreation-start') - const chainStatus = await this.verifyDataSetCreation(txHashOrTransaction) - performance.mark('synapse:verifyDataSetCreation-end') - performance.measure( - 'synapse:verifyDataSetCreation', - 'synapse:verifyDataSetCreation-start', - 'synapse:verifyDataSetCreation-end' - ) - - // Combine into summary - // isComplete should be true only when BOTH chain and server have confirmed the data set creation - const isComplete = - chainStatus.transactionMined && - chainStatus.transactionSuccess && - chainStatus.dataSetId != null && - chainStatus.dataSetLive && - serverStatus != null && - serverStatus.ok === true && - serverStatus.dataSetCreated - const dataSetId = serverStatus?.dataSetId ?? chainStatus.dataSetId ?? null - - // Determine error from server status or chain status - let error: string | null = chainStatus.error ?? null - if (serverStatus != null && serverStatus.ok === false) { - error = `Server reported transaction failed (status: ${serverStatus.txStatus})` - } - - return { - txHash, - serverStatus, - chainStatus, - summary: { - isComplete, - isLive: chainStatus.dataSetLive, - dataSetId, - error, - }, - } - } - - /** - * Wait for data set creation with status updates - * @param txHashOrTransaction - Transaction hash or transaction object to wait for - * @param pdpServer - PDP server for status checks - * @param maxWaitTime - Maximum time to wait in milliseconds - * @param pollInterval - Polling interval in milliseconds - * @param onProgress - Optional progress callback - * @returns Final comprehensive status - */ - async waitForDataSetCreationWithStatus( - txHashOrTransaction: string | ethers.TransactionResponse, - pdpServer: PDPServer, - maxWaitTime: number = TIMING_CONSTANTS.DATA_SET_CREATION_TIMEOUT_MS, - pollInterval: number = TIMING_CONSTANTS.DATA_SET_CREATION_POLL_INTERVAL_MS, - onProgress?: (status: ComprehensiveDataSetStatus, elapsedMs: number) => Promise - ): Promise { - const startTime = Date.now() - - while (Date.now() - startTime < maxWaitTime) { - const status = await this.getComprehensiveDataSetStatus(txHashOrTransaction, pdpServer) - const elapsedMs = Date.now() - startTime - - // Fire progress callback if provided - if (onProgress != null) { - try { - await onProgress(status, elapsedMs) - } catch (error) { - // Don't let callback errors break the polling loop - console.error('Error in progress callback:', error) - } - } - - // Check if complete - if (status.summary.isComplete) { - return status - } - - // Check for errors - if (status.summary.error != null && status.chainStatus.transactionMined) { - // Transaction confirmed but failed - throw new Error(status.summary.error) - } - - // Wait before next poll - await new Promise((resolve) => setTimeout(resolve, pollInterval)) - } - - // Timeout - throw new Error(`Data set creation timed out after ${maxWaitTime / 1000} seconds`) - } - // ========== Metadata Operations ========== /** diff --git a/utils/example-pull-e2e.js b/utils/example-pull-e2e.js new file mode 100644 index 00000000..a588963d --- /dev/null +++ b/utils/example-pull-e2e.js @@ -0,0 +1,317 @@ +#!/usr/bin/env node + +/** + * Example: SP-to-SP Piece Pull End-to-End Test + * + * This example demonstrates the SP-to-SP pull functionality: + * 1. Upload a piece to SP1 (providerId=1) using low-level park API (no AddPieces) + * 2. Wait for SP1 to park the piece + * 3. Request SP2 (providerId=2) to pull the piece from SP1 + * 4. Poll until the pull completes + * 5. Verify SP2 can serve the piece + * + * This tests: + * - curio: POST /pdp/piece/pull endpoint + * - synapse-core: warm-storage/pull module (high-level with signing) + * + * Required environment variables: + * - PRIVATE_KEY: Your private key (with 0x prefix) + * - RPC_URL: Filecoin RPC endpoint (defaults to calibration) + * + * Optional environment variables (for devnet): + * - WARM_STORAGE_ADDRESS: Warm Storage service contract address + * - MULTICALL3_ADDRESS: Multicall3 address (required for devnet) + * - USDFC_ADDRESS: USDFC token address + * + * Usage: + * PRIVATE_KEY=0x... node example-pull-e2e.js + * + * With foc-devnet: + * RUN_ID=$(jq -r '.run_id' ~/.foc-devnet/state/current_runid.json) + * PRIVATE_KEY=0x$(jq -r '.[] | select(.name=="USER_1") | .private_key' ~/.foc-devnet/keys/addresses.json) \ + * RPC_URL=http://localhost:$(docker port foc-${RUN_ID}-lotus 1234 | cut -d: -f2)/rpc/v1 \ + * WARM_STORAGE_ADDRESS=$(jq -r '.foc_contracts.filecoin_warm_storage_service_proxy' ~/.foc-devnet/state/latest/contract_addresses.json) \ + * MULTICALL3_ADDRESS=$(jq -r '.contracts.multicall' ~/.foc-devnet/state/latest/contract_addresses.json) \ + * USDFC_ADDRESS=$(jq -r '.contracts.usdfc' ~/.foc-devnet/state/latest/contract_addresses.json) \ + * SP_REGISTRY_ADDRESS=$(jq -r '.foc_contracts.service_provider_registry_proxy' ~/.foc-devnet/state/latest/contract_addresses.json) \ + * node utils/example-pull-e2e.js test-file.txt + */ + +import fsPromises from 'fs/promises' +import { createWalletClient, http, publicActions } from 'viem' +import { privateKeyToAccount } from 'viem/accounts' +import { devnet } from '../packages/synapse-core/src/chains.ts' +import * as SP from '../packages/synapse-core/src/sp.ts' +import { waitForPullStatus } from '../packages/synapse-core/src/warm-storage/pull.ts' +import { Synapse } from '../packages/synapse-sdk/src/index.ts' +import { SPRegistryService } from '../packages/synapse-sdk/src/sp-registry/service.ts' + +// Configuration from environment +const PRIVATE_KEY = process.env.PRIVATE_KEY +const RPC_URL = process.env.RPC_URL || 'https://api.calibration.node.glif.io/rpc/v1' +const WARM_STORAGE_ADDRESS = process.env.WARM_STORAGE_ADDRESS +const MULTICALL3_ADDRESS = process.env.MULTICALL3_ADDRESS +const USDFC_ADDRESS = process.env.USDFC_ADDRESS +const SP_REGISTRY_ADDRESS = process.env.SP_REGISTRY_ADDRESS + +function printUsageAndExit() { + console.error('Usage: PRIVATE_KEY=0x... node example-pull-e2e.js ') + process.exit(1) +} + +// Validate inputs +if (!PRIVATE_KEY) { + console.error('ERROR: PRIVATE_KEY environment variable is required') + printUsageAndExit() +} + +const filePaths = process.argv.slice(2) +if (filePaths.length === 0) { + console.error('ERROR: At least one file path argument is required') + printUsageAndExit() +} + +// Helper to format bytes for display +function formatBytes(bytes) { + if (bytes === 0) return '0 Bytes' + const k = 1024 + const sizes = ['Bytes', 'KB', 'MB', 'GB'] + const i = Math.floor(Math.log(bytes) / Math.log(k)) + return `${parseFloat((bytes / k ** i).toFixed(2))} ${sizes[i]}` +} + +// Helper to format USDFC amounts (18 decimals) +function formatUSDFC(amount) { + const usdfc = Number(amount) / 1e18 + return `${usdfc.toFixed(6)} USDFC` +} + +async function main() { + try { + console.log('=== SP-to-SP Pull E2E Test ===\n') + console.log(`Processing ${filePaths.length} file(s)...`) + + // Read all files and get their stats + const fileInfos = await Promise.all( + filePaths.map(async (filePath) => { + const stat = await fsPromises.stat(filePath) + if (!stat.isFile()) { + throw new Error(`Path is not a file: ${filePath}`) + } + console.log(` ${filePath}: ${formatBytes(stat.size)}`) + return { filePath, size: stat.size } + }) + ) + + // Create Synapse instance (still needed for provider discovery and balance checks) + console.log('\n--- Initializing Synapse SDK ---') + console.log(`RPC URL: ${RPC_URL}`) + + const synapseOptions = { + multicall3Address: MULTICALL3_ADDRESS, + privateKey: PRIVATE_KEY, + rpcURL: RPC_URL, + usdfcAddress: USDFC_ADDRESS, + warmStorageAddress: WARM_STORAGE_ADDRESS, + } + + if (WARM_STORAGE_ADDRESS) { + console.log(`Warm Storage Address: ${WARM_STORAGE_ADDRESS}`) + } + if (MULTICALL3_ADDRESS) { + console.log(`Multicall3 Address: ${MULTICALL3_ADDRESS}`) + } + + const synapse = await Synapse.create(synapseOptions) + console.log('Synapse instance created') + + // Create viem wallet client for signing (required by pullPieces) + // Use devnet chain template and override with actual contract addresses from deployment + const account = privateKeyToAccount(PRIVATE_KEY) + const chain = { + ...devnet, + rpcUrls: { + default: { http: [RPC_URL] }, + }, + contracts: { + ...devnet.contracts, + // Override with actual devnet deployment addresses + storage: { + ...devnet.contracts.storage, + address: synapse.getWarmStorageAddress(), + }, + }, + } + const viemClient = createWalletClient({ + account, + chain, + transport: http(RPC_URL), + }).extend(publicActions) + + console.log(`Wallet address: ${account.address}`) + + // Check balances + console.log('\n--- Checking Balances ---') + const filBalance = await synapse.payments.walletBalance() + const usdfcBalance = await synapse.payments.walletBalance('USDFC') + console.log(`FIL balance: ${Number(filBalance) / 1e18} FIL`) + console.log(`USDFC balance: ${formatUSDFC(usdfcBalance)}`) + + // Get SP1 and SP2 info + console.log('\n--- Discovering Service Providers ---') + console.log(`SP Registry Address: ${SP_REGISTRY_ADDRESS}`) + const spRegistry = new SPRegistryService(synapse.getProvider(), SP_REGISTRY_ADDRESS, MULTICALL3_ADDRESS) + const sp1Info = await spRegistry.getProvider(1) + const sp2Info = await spRegistry.getProvider(2) + + if (!sp1Info || !sp1Info.products.PDP?.data.serviceURL) { + throw new Error('SP1 (providerId=1) not found or missing PDP service URL') + } + if (!sp2Info || !sp2Info.products.PDP?.data.serviceURL) { + throw new Error('SP2 (providerId=2) not found or missing PDP service URL') + } + + const sp1Url = sp1Info.products.PDP.data.serviceURL.replace(/\/$/, '') + const sp2Url = sp2Info.products.PDP.data.serviceURL.replace(/\/$/, '') + + console.log(`SP1 (providerId=1): ${sp1Info.name}`) + console.log(` Address: ${sp1Info.serviceProvider}`) + console.log(` PDP URL: ${sp1Url}`) + console.log(`SP2 (providerId=2): ${sp2Info.name}`) + console.log(` Address: ${sp2Info.serviceProvider}`) + console.log(` PDP URL: ${sp2Url}`) + + // Upload all pieces to SP1 in parallel + console.log('\n--- Uploading Pieces to SP1 (Park Only) ---') + const uploadResults = await Promise.all( + fileInfos.map(async ({ filePath, size }) => { + const fileHandle = await fsPromises.open(filePath, 'r') + const fileData = fileHandle.readableWebStream() + + console.log(` Uploading ${filePath}...`) + const result = await SP.uploadPieceStreaming({ + endpoint: sp1Url, + data: fileData, + size: size, + }) + await fileHandle.close() + + const pieceCid = result.pieceCid + console.log(` ${filePath} -> ${pieceCid.toString().slice(0, 30)}... (${formatBytes(result.size)})`) + return { filePath, pieceCid, size: result.size } + }) + ) + + console.log(`\nUploaded ${uploadResults.length} piece(s) to SP1`) + + // Wait for all pieces to be parked on SP1 + console.log('\n--- Waiting for SP1 to park all pieces ---') + await Promise.all( + uploadResults.map(async ({ pieceCid }) => { + await SP.findPiece({ + endpoint: sp1Url, + pieceCid: pieceCid, + }) + console.log(` Parked: ${pieceCid.toString().slice(0, 30)}...`) + }) + ) + console.log('All pieces parked on SP1') + + // Get FWSS address for recordKeeper + const fwssAddress = synapse.getWarmStorageAddress() + console.log(`\nFWSS Address (recordKeeper): ${fwssAddress}`) + + // Initiate pull from SP2 using high-level API + console.log('\n--- Initiating Pull to SP2 ---') + console.log(`Target SP2 URL: ${sp2Url}`) + console.log(`Requesting SP2 to pull ${uploadResults.length} piece(s) from SP1...`) + console.log(`Client: ${account.address}`) + console.log(`Payee (SP2): ${sp2Info.serviceProvider}`) + console.log(`PieceCIDs: ${uploadResults.length} pieces`) + for (const { pieceCid } of uploadResults) { + console.log(` - ${pieceCid.toString().slice(0, 40)}...`) + } + + // Build pieces array with source URLs for each piece + const piecesToPull = uploadResults.map(({ pieceCid }) => ({ + pieceCid: pieceCid, + sourceUrl: `${sp1Url}/piece/${pieceCid.toString()}`, + })) + + // Use high-level pullPieces with automatic signing + // dataSetId omitted = create new dataset + // recordKeeper is explicitly provided for devnet (custom chain ID not in chain registry) + const pullResult = await waitForPullStatus(viemClient, { + endpoint: sp2Url, + payee: sp2Info.serviceProvider, + recordKeeper: fwssAddress, + pieces: piecesToPull, + onStatus: (response) => { + console.log(` Pull status: ${response.status}`) + for (const piece of response.pieces) { + console.log(` ${piece.pieceCid.slice(0, 20)}...: ${piece.status}`) + } + }, + minTimeout: 2000, // Poll every 2 seconds + }) + + console.log(`\nPull completed with status: ${pullResult.status}`) + + if (pullResult.status === 'complete') { + console.log('\n--- Verifying SP2 has all pieces ---') + + let allMatched = true + for (const { filePath, pieceCid } of uploadResults) { + const sp2PieceUrl = `${sp2Url}/piece/${pieceCid.toString()}` + console.log(`\nDownloading ${pieceCid.toString().slice(0, 30)}... from SP2`) + + const downloadResponse = await fetch(sp2PieceUrl) + if (downloadResponse.ok) { + const downloadedData = await downloadResponse.arrayBuffer() + console.log(` Downloaded ${formatBytes(downloadedData.byteLength)}`) + + // Compare with original file + const originalData = await fsPromises.readFile(filePath) + const matches = Buffer.from(originalData).equals(Buffer.from(downloadedData)) + + if (matches) { + console.log(` MATCH: ${filePath}`) + } else { + console.error(` MISMATCH: ${filePath}`) + allMatched = false + } + } else { + console.error(` ERROR: Failed to download: ${downloadResponse.status}`) + const errorText = await downloadResponse.text() + console.error(` Response: ${errorText}`) + allMatched = false + } + } + + if (allMatched) { + console.log(`\nSUCCESS: All ${uploadResults.length} pieces verified on SP2!`) + } else { + console.error('\nERROR: Some pieces did not match!') + process.exit(1) + } + } else if (pullResult.status === 'failed') { + console.error('\nERROR: Pull failed!') + for (const piece of pullResult.pieces) { + console.error(` ${piece.pieceCid}: ${piece.status}`) + } + process.exit(1) + } + + console.log('\n=== SP-to-SP Pull Test Complete ===') + } catch (error) { + console.error('\nERROR:', error.message) + if (error.cause) { + console.error('Caused by:', error.cause.message) + } + console.error(error) + process.exit(1) + } +} + +// Run the test +main().catch(console.error) diff --git a/utils/package.json b/utils/package.json index 07bfbc68..bf48968a 100644 --- a/utils/package.json +++ b/utils/package.json @@ -7,7 +7,9 @@ "lint": "biome check --no-errors-on-unmatched --files-ignore-unknown=true ." }, "dependencies": { + "@filoz/synapse-core": "workspace:*", "@filoz/synapse-sdk": "workspace:*", - "ethers": "^6.16.0" + "ethers": "^6.16.0", + "viem": "catalog:" } }