diff --git a/README.md b/README.md index 5804195c5..d9134b9bd 100644 --- a/README.md +++ b/README.md @@ -12,9 +12,12 @@ See the [implementation document](docs/implementation.md) for the detailed descr 1. Must pass `npm run test`. 1. Must pass `npm run lint`. -1. Must and only prefix the name of a "data structure interface" (interface that is without methods and act purely as data holders) with an `I`. -1. Must and only export a class as a default export if the class name matches the file name. -1. Must sort imports. +1. Prefix an interface that require implementation with `I`. e.g. `ITransactionProcessor`. +1. Suffix a data-holder interface (without definition of methods) with `Model`. e.g. `TransactionModel`. +1. Use default export if class/interface name matches the file name. +1. Sort imports. ## Docker -The Sidetree components are also available via docker containers. Please see the [docker document](docs/docker.md) to find out details on building and running. +> NOTE: 2019-08-13: docker-compose out-of-date, needs to be udpated. + +The Sidetree components are available via docker containers . Please see the [docker document](docs/docker.md) to find out details on building and running. diff --git a/lib/bitcoin/BitcoinProcessor.ts b/lib/bitcoin/BitcoinProcessor.ts index e12128415..d55fe23b1 100644 --- a/lib/bitcoin/BitcoinProcessor.ts +++ b/lib/bitcoin/BitcoinProcessor.ts @@ -1,10 +1,10 @@ import * as httpStatus from 'http-status'; import MongoDbTransactionStore from '../common/MongoDbTransactionStore'; import nodeFetch, { FetchError, Response, RequestInit } from 'node-fetch'; -import ErrorCode from '../common/ErrorCode'; -import ITransaction from '../common/ITransaction'; +import ErrorCode from '../common/SharedErrorCode'; import ReadableStream from '../common/ReadableStream'; import RequestError from './RequestError'; +import TransactionModel from '../common/models/TransactionModel'; import TransactionNumber from './TransactionNumber'; import { Address, Networks, PrivateKey, Script, Transaction } from 'bitcore-lib'; import { IBitcoinConfig } from './IBitcoinConfig'; @@ -124,7 +124,7 @@ export default class BitcoinProcessor { * Initializes the Bitcoin processor */ public async initialize () { - console.debug('Initializing TransactionStore'); + console.debug('Initializing ITransactionStore'); await this.transactionStore.initialize(); const address = this.privateKey.toAddress(); console.debug(`Checking if bitcoin contains a wallet for ${address}`); @@ -194,7 +194,7 @@ export default class BitcoinProcessor { */ public async transactions (since?: number, hash?: string): Promise<{ moreTransactions: boolean, - transactions: ITransaction[] + transactions: TransactionModel[] }> { if ((since && !hash) || (!since && hash)) { @@ -215,7 +215,7 @@ export default class BitcoinProcessor { transactionTime: transaction.transactionTime, transactionTimeHash: transaction.transactionTimeHash, anchorFileHash: transaction.anchorFileHash - } as ITransaction; + } as TransactionModel; }); return { @@ -229,7 +229,7 @@ export default class BitcoinProcessor { * @param transactions List of transactions to check * @returns The first valid transaction, or undefined if none are valid */ - public async firstValidTransaction (transactions: ITransaction[]): Promise { + public async firstValidTransaction (transactions: TransactionModel[]): Promise { for (let index = 0; index < transactions.length; index++) { const transaction = transactions[index]; const height = transaction.transactionTime; @@ -516,7 +516,7 @@ export default class BitcoinProcessor { const data = Buffer.from(hexDataMatches[1], 'hex').toString(); if (data.startsWith(this.sidetreePrefix)) { // we have found a sidetree transaction - const sidetreeTransaction: ITransaction = { + const sidetreeTransaction: TransactionModel = { transactionNumber: TransactionNumber.construct(block, transactionIndex), transactionTime: block, transactionTimeHash: blockHash, diff --git a/lib/bitcoin/RequestError.ts b/lib/bitcoin/RequestError.ts index f459f558e..62b6e3179 100644 --- a/lib/bitcoin/RequestError.ts +++ b/lib/bitcoin/RequestError.ts @@ -1,4 +1,3 @@ -import ErrorCode from '../common/ErrorCode'; import Response, { ResponseStatus } from '../common/Response'; /** @@ -19,7 +18,7 @@ export default class RequestError extends Error { return this.code !== undefined; } - constructor (public readonly responseCode: ResponseStatus, public readonly code?: ErrorCode) { + constructor (public readonly responseCode: ResponseStatus, public readonly code?: string) { super(code ? JSON.stringify({ code }) : undefined); // NOTE: Extending 'Error' breaks prototype chain since TypeScript 2.1. diff --git a/lib/common/ErrorCode.ts b/lib/common/ErrorCode.ts deleted file mode 100644 index b842938ba..000000000 --- a/lib/common/ErrorCode.ts +++ /dev/null @@ -1,47 +0,0 @@ -/** - * Error codes. - */ -enum ErrorCode { - AnchorFileBatchFileHashMissing = 'anchor_file_batch_file_hash_missing', - AnchorFileBatchFileHashNotString = 'anchor_file_batch_file_hash_not_string', - AnchorFileBatchFileHashUnsupported = 'anchor_file_batch_file_hash_unsupported', - AnchorFileDidUniqueSuffixEntryInvalid = 'anchor_file_did_unique_suffix_entry_invalid', - AnchorFileDidUniqueSuffixEntryNotString = 'anchor_file_did_unique_suffix_entry_not_string', - AnchorFileDidUniqueSuffixesHasDuplicates = 'anchor_file_did_unique_suffixes_has_duplicates', - AnchorFileDidUniqueSuffixesMissing = 'anchor_file_did_unique_suffixes_missing', - AnchorFileDidUniqueSuffixesNotArray = 'anchor_file_did_unique_suffixes_not_array', - AnchorFileExceededMaxOperationCount = 'anchor_file_exceeded_max_operation_count', - AnchorFileHasUnknownProperty = 'anchor_file_has_unknown_property', - AnchorFileMerkleRootMissing = 'anchor_file_merkle_root_missing', - AnchorFileMerkleRootNotString = 'anchor_file_merkle_root_not_string', - AnchorFileMerkleRootUnsupported = 'anchor_file_merkle_root_unsupported', - AnchorFileNotJson = 'anchor_file_not_json', - BatchWriterAlreadyHasOperationForDid = 'batch_writer_already_has_operation_for_did', - InvalidTransactionNumberOrTimeHash = 'invalid_transaction_number_or_time_hash', - OperationCreateInvalidDidDocument = 'operation_create_invalid_did_document', - OperationExceedsMaximumSize = 'operation_exceeds_maximum_size', - OperationHeaderMissingKid = 'operation_header_missing_kid', - OperationHeaderMissingOrIncorrectAlg = 'operation_header_missing_or_incorrect_alg', - OperationHeaderMissingOrIncorrectOperation = 'operation_header_missing_or_incorrect_operation', - OperationMissingOrIncorrectPayload = 'operation_missing_or_incorrect_payload', - OperationMissingOrIncorrectSignature = 'operation_missing_or_incorrect_signature', - OperationUpdatePayloadDidUniqueSuffixInvalid = 'operation_update_payload_did_unique_suffix_invalid', - OperationUpdatePayloadPreviousOperationHashInvalid = 'operation_update_payload_previous_operation_hash_invalid', - OperationUpdatePayloadMissingOrInvalidDidUniqueSuffixType = 'operation_update_payload_missing_or_invalid_did_unique_suffix_type', - OperationUpdatePayloadMissingOrInvalidPreviousOperationHashType = 'operation_update_payload_missing_or_invalid_previous_operation_hash_type', - OperationUpdatePayloadMissingOrUnknownProperty = 'operation_update_payload_missing_or_unknown_property', - OperationUpdatePatchesNotArray = 'operation_update_patches_not_array', - OperationUpdatePatchMissingOrUnknownAction = 'operation_update_patch_missing_or_unknown_action', - OperationUpdatePatchMissingOrUnknownProperty = 'operation_update_patch_missing_or_unknown_property', - OperationUpdatePatchPublicKeyHexMissingOrIncorrect = 'operation_update_patch_public_key_hex_missing_or_incorrect', - OperationUpdatePatchPublicKeyIdNotString = 'operation_update_patch_public_key_id_not_string', - OperationUpdatePatchPublicKeyMissingOrUnknownProperty = 'operation_update_patch_public_key_missing_or_unknown_property', - OperationUpdatePatchPublicKeysNotArray = 'operation_update_patch_public_keys_not_array', - OperationUpdatePatchPublicKeyTypeMissingOrUnknown = 'operation_update_patch_public_key_type_missing_or_unknown', - OperationUpdatePatchServiceEndpointNotDid = 'operation_update_patch_service_endpoint_not_did', - OperationUpdatePatchServiceEndpointsNotArray = 'operation_update_patch_service_endpoints_not_array', - OperationUpdatePatchServiceTypeMissingOrUnknown = 'operation_update_patch_service_type_missing_or_unknown', - QueueingMultipleOperationsPerDidNotAllowed = 'queueing_multiple_operations_per_did_not_allowed' -} - -export default ErrorCode; diff --git a/lib/common/MongoDbTransactionStore.ts b/lib/common/MongoDbTransactionStore.ts index 315ebca15..93f9a4e43 100644 --- a/lib/common/MongoDbTransactionStore.ts +++ b/lib/common/MongoDbTransactionStore.ts @@ -1,11 +1,11 @@ -import ITransaction from './ITransaction'; -import TransactionStore from '../core/interfaces/TransactionStore'; +import ITransactionStore from '../core/interfaces/ITransactionStore'; +import TransactionModel from './models/TransactionModel'; import { Collection, Db, Long, MongoClient } from 'mongodb'; /** - * Implementation of TransactionStore that stores the transaction data in a MongoDB database. + * Implementation of ITransactionStore that stores the transaction data in a MongoDB database. */ -export default class MongoDbTransactionStore implements TransactionStore { +export default class MongoDbTransactionStore implements ITransactionStore { /** Default database name used if not specified in constructor. */ public static readonly defaultDatabaseName: string = 'sidetree'; /** Collection name for transactions. */ @@ -44,7 +44,7 @@ export default class MongoDbTransactionStore implements TransactionStore { return transactionCount; } - public async getTransaction (transactionNumber: number): Promise { + public async getTransaction (transactionNumber: number): Promise { const transactions = await this.transactionCollection!.find({ transactionNumber: Long.fromNumber(transactionNumber) }).toArray(); if (transactions.length === 0) { return undefined; @@ -54,7 +54,7 @@ export default class MongoDbTransactionStore implements TransactionStore { return transaction; } - public async getTransactionsLaterThan (transactionNumber: number | undefined, max: number): Promise { + public async getTransactionsLaterThan (transactionNumber: number | undefined, max: number): Promise { let transactions = []; try { @@ -82,7 +82,7 @@ export default class MongoDbTransactionStore implements TransactionStore { this.transactionCollection = await MongoDbTransactionStore.createTransactionCollectionIfNotExist(this.db!); } - async addTransaction (transaction: ITransaction): Promise { + async addTransaction (transaction: TransactionModel): Promise { try { const transactionInMongoDb = { anchorFileHash: transaction.anchorFileHash, @@ -100,7 +100,7 @@ export default class MongoDbTransactionStore implements TransactionStore { } } - async getLastTransaction (): Promise { + async getLastTransaction (): Promise { const lastTransactions = await this.transactionCollection!.find().limit(1).sort({ transactionNumber: -1 }).toArray(); if (lastTransactions.length === 0) { return undefined; @@ -110,8 +110,8 @@ export default class MongoDbTransactionStore implements TransactionStore { return lastProcessedTransaction; } - async getExponentiallySpacedTransactions (): Promise { - const exponentiallySpacedTransactions: ITransaction[] = []; + async getExponentiallySpacedTransactions (): Promise { + const exponentiallySpacedTransactions: TransactionModel[] = []; const allTransactions = await this.transactionCollection!.find().sort({ transactionNumber: 1 }).toArray(); let index = allTransactions.length - 1; @@ -138,7 +138,7 @@ export default class MongoDbTransactionStore implements TransactionStore { * Gets the list of processed transactions. * Mainly used for test purposes. */ - public async getTransactions (): Promise { + public async getTransactions (): Promise { const transactions = await this.transactionCollection!.find().sort({ transactionNumber: 1 }).toArray(); return transactions; } @@ -147,7 +147,7 @@ export default class MongoDbTransactionStore implements TransactionStore { * Creates the `transaction` collection with indexes if it does not exists. * @returns The existing collection if exists, else the newly created collection. */ - private static async createTransactionCollectionIfNotExist (db: Db): Promise> { + private static async createTransactionCollectionIfNotExist (db: Db): Promise> { const collections = await db.collections(); const collectionNames = collections.map(collection => collection.collectionName); diff --git a/lib/common/Response.ts b/lib/common/Response.ts index 11cfeb505..df945b08f 100644 --- a/lib/common/Response.ts +++ b/lib/common/Response.ts @@ -1,7 +1,7 @@ /** * Defines a Sidetree response object. */ -interface IResponse { +interface ResponseModel { status: ResponseStatus; body?: any; } @@ -38,4 +38,4 @@ export default class Response { } } -export { IResponse, Response, ResponseStatus }; +export { Response, ResponseModel, ResponseStatus }; diff --git a/lib/common/SharedErrorCode.ts b/lib/common/SharedErrorCode.ts new file mode 100644 index 000000000..4e24e0cc5 --- /dev/null +++ b/lib/common/SharedErrorCode.ts @@ -0,0 +1,6 @@ +/** + * Common error codes used across services. + */ +export default { + InvalidTransactionNumberOrTimeHash: 'invalid_transaction_number_or_time_hash' +}; diff --git a/lib/common/IFetchResult.ts b/lib/common/models/FetchResult.ts similarity index 67% rename from lib/common/IFetchResult.ts rename to lib/common/models/FetchResult.ts index 11650883f..a89753ffe 100644 --- a/lib/common/IFetchResult.ts +++ b/lib/common/models/FetchResult.ts @@ -1,9 +1,9 @@ -import { FetchResultCode } from './FetchResultCode'; +import { FetchResultCode } from '../FetchResultCode'; /** * Data structure representing the result of a content fetch from the Content Addressable Storage. */ -export default interface IFetchResult { +export default interface FetchResult { /** Return code for the fetch. */ code: FetchResultCode; content?: Buffer; diff --git a/lib/common/ITransaction.ts b/lib/common/models/TransactionModel.ts similarity index 78% rename from lib/common/ITransaction.ts rename to lib/common/models/TransactionModel.ts index 0bfd8235f..184444999 100644 --- a/lib/common/ITransaction.ts +++ b/lib/common/models/TransactionModel.ts @@ -1,7 +1,7 @@ /** * Defines a Sidetree transaction. */ -export default interface ITransaction { +export default interface TransactionModel { transactionNumber: number; transactionTime: number; transactionTimeHash: string; diff --git a/lib/core/BatchScheduler.ts b/lib/core/BatchScheduler.ts new file mode 100644 index 000000000..e63636f96 --- /dev/null +++ b/lib/core/BatchScheduler.ts @@ -0,0 +1,56 @@ +import IBatchWriter from './interfaces/IBatchWriter'; +import IBlockchain from './interfaces/IBlockchain'; +import timeSpan = require('time-span'); + +/** + * Class that performs periodic writing of batches of Sidetree operations to CAS and blockchain. + */ +export default class BatchScheduler { + /** + * Flag indicating if this Batch Writer is currently processing a batch of operations. + */ + private processing: boolean = false; + + public constructor ( + private getBatchWriter: (blockchainTime: number) => IBatchWriter, + private blockchain: IBlockchain, + private batchingIntervalInSeconds: number) { + } + + /** + * The function that starts periodically anchoring operation batches to blockchain. + */ + public startPeriodicBatchWriting () { + setInterval(async () => this.writeOperationBatch(), this.batchingIntervalInSeconds * 1000); + } + + /** + * Processes the operations in the queue. + */ + public async writeOperationBatch () { + const endTimer = timeSpan(); // For calcuating time taken to write operations. + + // Wait until the next interval if the Batch Writer is still processing a batch. + if (this.processing) { + return; + } + + try { + console.info('Start operation batch writing...'); + this.processing = true; + + // Get the correct version of the `BatchWriter`. + const currentTime = this.blockchain.approximateTime.time; + const batchWriter = this.getBatchWriter(currentTime); + + await batchWriter.write(); + } catch (error) { + console.error('Unexpected and unhandled error during batch writing, investigate and fix:'); + console.error(error); + } finally { + this.processing = false; + + console.info(`End batch writing. Duration: ${endTimer.rounded()} ms.`); + } + } +} diff --git a/lib/core/BatchWriter.ts b/lib/core/BatchWriter.ts deleted file mode 100644 index c1d5d7a0f..000000000 --- a/lib/core/BatchWriter.ts +++ /dev/null @@ -1,153 +0,0 @@ -import BatchFile from './BatchFile'; -import Did from './Did'; -import Encoder from './Encoder'; -import MerkleTree from './util/MerkleTree'; -import Multihash from './Multihash'; -import OperationQueue from './interfaces/OperationQueue'; -import ProtocolParameters, { IProtocolParameters } from './ProtocolParameters'; -import timeSpan = require('time-span'); -import { Blockchain } from './Blockchain'; -import { Cas } from './Cas'; -import { Operation, OperationType } from './Operation'; - -/** - * Class that performs periodic writing of batches of Sidetree operations to CAS and blockchain. - */ -export default class BatchWriter { - /** - * Flag indicating if this Batch Writer is currently processing a batch of operations. - */ - private processing: boolean = false; - - public constructor ( - private blockchain: Blockchain, - private cas: Cas, - private batchingIntervalInSeconds: number, - private operationQueue: OperationQueue) { - } - - /** - * Adds the given operation to a queue to be batched and anchored on blockchain. - */ - public async add (operation: Operation) { - await this.operationQueue.enqueue(operation.didUniqueSuffix, operation.operationBuffer); - } - - /** - * The function that starts periodically anchoring operation batches to blockchain. - */ - public startPeriodicBatchWriting () { - setInterval(async () => this.writeOperationBatch(), this.batchingIntervalInSeconds * 1000); - } - - /** - * Checks to see if there is already an operation queued for the given DID unique suffix. - */ - public async hasOperationQueuedFor (didUniqueSuffix: string): Promise { - return this.operationQueue.contains(didUniqueSuffix); - } - - /** - * Processes the operations in the queue. - */ - public async writeOperationBatch () { - const endTimer = timeSpan(); // For calcuating time taken to write operations. - - // Wait until the next interval if the Batch Writer is still processing a batch. - if (this.processing) { - return; - } - - let batchWritingSucceeded = true; - let batch: Operation[] = []; - try { - console.info('Start operation batch writing...'); - this.processing = true; - - // Get the protocol version according to current blockchain time to decide on the batch size limit to enforce. - const protocolParameters = this.getCurrentProtocolParameters(); - - // Get the batch of operations to be anchored on the blockchain. - const currentTime = this.blockchain.approximateTime; - const operationBuffers = await this.operationQueue.peek(protocolParameters.maxOperationsPerBatch); - batch = operationBuffers.map((buffer) => Operation.createUnanchoredOperation(buffer, currentTime.time)); - console.info('Batch size = ' + batch.length); - - // Do nothing if there is nothing to batch together. - if (batch.length === 0) { - return; - } - - // Create the batch file buffer from the operation batch. - const batchFileBuffer = BatchFile.fromOperationBuffers(operationBuffers); - - // Write the 'batch file' to content addressable store. - const batchFileHash = await this.cas.write(batchFileBuffer); - console.info(`Wrote batch file ${batchFileHash} to content addressable store.`); - - // Compute the Merkle root hash. - const merkleRoot = MerkleTree.create(operationBuffers).rootHash; - const merkleRootAsMultihash = Multihash.encode(merkleRoot, 18); - const encodedMerkleRoot = Encoder.encode(merkleRootAsMultihash); - - // Construct the DID unique suffixes of each operation to be included in the anchor file. - const didUniqueSuffixes = this.getDidUniqueSuffixes(batch); - - // Construct the 'anchor file'. - const anchorFile = { - batchFileHash: batchFileHash, - merkleRoot: encodedMerkleRoot, - didUniqueSuffixes - }; - - // Make the 'anchor file' available in content addressable store. - const anchorFileJsonBuffer = Buffer.from(JSON.stringify(anchorFile)); - const anchorFileAddress = await this.cas.write(anchorFileJsonBuffer); - console.info(`Wrote anchor file ${anchorFileAddress} to content addressable store.`); - - // Anchor the 'anchor file hash' on blockchain. - await this.blockchain.write(anchorFileAddress); - } catch (error) { - batchWritingSucceeded = false; - console.error('Unexpected and unhandled error during batch writing, investigate and fix:'); - console.error(error); - } finally { - this.processing = false; - - // Remove written operations from queue if batch writing is successful. - if (batchWritingSucceeded) { - await this.operationQueue.dequeue(batch.length); - } - - console.info(`End batch writing. Duration: ${endTimer.rounded()} ms.`); - } - } - - /** - * Returns the DID unique suffix of each operation given in the same order. - */ - private getDidUniqueSuffixes (operations: Operation[]): string[] { - const didUniquesuffixes = new Array(operations.length); - - // Get the protocol version according to current blockchain time to decide on hashing algorithm to use for DID unique suffix computation. - const protocolParameters = this.getCurrentProtocolParameters(); - - for (let i = 0; i < operations.length; i++) { - const operation = operations[i]; - - if (operation.type === OperationType.Create) { - didUniquesuffixes[i] = Did.getUniqueSuffixFromEncodeDidDocument(operation.encodedPayload, protocolParameters.hashAlgorithmInMultihashCode); - } else { - didUniquesuffixes[i] = operation.didUniqueSuffix; - } - } - - return didUniquesuffixes; - } - - private getCurrentProtocolParameters (): IProtocolParameters { - const currentTime = this.blockchain.approximateTime; - const protocolParameters = ProtocolParameters.get(currentTime.time); - return protocolParameters; - } -} diff --git a/lib/core/Blockchain.ts b/lib/core/Blockchain.ts index effffda23..348d3d33d 100644 --- a/lib/core/Blockchain.ts +++ b/lib/core/Blockchain.ts @@ -1,55 +1,21 @@ import * as HttpStatus from 'http-status'; -import ErrorCode from '../common/ErrorCode'; -import IBlockchainTime from './interfaces/IBlockchainTime'; -import ITransaction from '../common/ITransaction'; +import BlockchainTimeModel from './models/BlockchainTimeModel'; +import ErrorCode from '../common/SharedErrorCode'; +import IBlockchain from './interfaces/IBlockchain'; import nodeFetch from 'node-fetch'; import ReadableStream from '../common/ReadableStream'; +import TransactionModel from '../common/models/TransactionModel'; import { SidetreeError } from './Error'; -/** - * Interface to access the underlying blockchain. - * This interface is mainly useful for creating a mock Blockchain for testing purposes. - */ -export interface Blockchain { - /** - * Writes the anchor file hash as a transaction to blockchain. - */ - write (anchorFileHash: string): Promise; - - /** - * Gets Sidetree transactions in chronological order. - * The function call may not return all known transactions, moreTransaction indicates if there are more transactions to be fetched. - * When sinceTransactionNumber is not given, Sidetree transactions starting from inception will be returned. - * When sinceTransactionNumber is given, only Sidetree transaction after the given transaction will be returned. - * @param sinceTransactionNumber A valid Sidetree transaction number. - * @param transactionTimeHash The hash associated with the anchored time of the transaction number given. - * Required if and only if sinceTransactionNumber is provided. - * @throws SidetreeError with ErrorCode.InvalidTransactionNumberOrTimeHash if a potential block reorganization is detected. - */ - read (sinceTransactionNumber?: number, transactionTimeHash?: string): Promise<{ moreTransactions: boolean, transactions: ITransaction[] }>; - - /** - * Given a list of Sidetree transaction in any order, iterate through the list and return the first transaction that is valid. - * @param transactions List of potentially valid transactions. - */ - getFirstValidTransaction (transactions: ITransaction[]): Promise; - - /** - * Gets the approximate latest time synchronously without requiring to make network call. - * Useful for cases where high performance is desired and hgih accuracy is not required. - */ - approximateTime: IBlockchainTime; -} - /** * Class that communicates with the underlying blockchain using REST API defined by the protocol document. */ -export class BlockchainClient implements Blockchain { +export default class Blockchain implements IBlockchain { /** Interval for refreshing the cached blockchain time. */ static readonly cachedBlockchainTimeRefreshInSeconds = 60; /** Used for caching the blockchain time to avoid excessive time fetching over network. */ - private cachedBlockchainTime: IBlockchainTime; + private cachedBlockchainTime: BlockchainTimeModel; private fetch = nodeFetch; @@ -75,7 +41,7 @@ export class BlockchainClient implements Blockchain { * The function that starts periodically anchoring operation batches to blockchain. */ public startPeriodicCachedBlockchainTimeRefresh () { - setInterval(async () => this.getLatestTime(), BlockchainClient.cachedBlockchainTimeRefreshInSeconds * 1000); + setInterval(async () => this.getLatestTime(), Blockchain.cachedBlockchainTimeRefreshInSeconds * 1000); } public async write (anchorFileHash: string): Promise { @@ -97,7 +63,7 @@ export class BlockchainClient implements Blockchain { } } - public async read (sinceTransactionNumber?: number, transactionTimeHash?: string): Promise<{ moreTransactions: boolean, transactions: ITransaction[]}> { + public async read (sinceTransactionNumber?: number, transactionTimeHash?: string): Promise<{ moreTransactions: boolean, transactions: TransactionModel[]}> { if ((sinceTransactionNumber !== undefined && transactionTimeHash === undefined) || (sinceTransactionNumber === undefined && transactionTimeHash !== undefined)) { throw new Error('Transaction number and time hash must both be given or not given at the same time.'); @@ -131,7 +97,7 @@ export class BlockchainClient implements Blockchain { return responseBody; } - public async getFirstValidTransaction (transactions: ITransaction[]): Promise { + public async getFirstValidTransaction (transactions: TransactionModel[]): Promise { const bodyString = JSON.stringify({ transactions }); const requestParameters = { method: 'post', @@ -155,14 +121,14 @@ export class BlockchainClient implements Blockchain { return transaction; } - public get approximateTime (): IBlockchainTime { + public get approximateTime (): BlockchainTimeModel { return this.cachedBlockchainTime; } /** * Gets the latest blockchain time and updates the cached time. */ - private async getLatestTime (): Promise { + private async getLatestTime (): Promise { try { console.info(`Refreshing cached blockchain time...`); const response = await this.fetch(this.timeUri); diff --git a/lib/core/Cas.ts b/lib/core/Cas.ts index d7db1a4ba..b898687f8 100644 --- a/lib/core/Cas.ts +++ b/lib/core/Cas.ts @@ -1,33 +1,14 @@ import * as HttpStatus from 'http-status'; -import IFetchResult from '../common/IFetchResult'; +import FetchResult from '../common/models/FetchResult'; +import ICas from './interfaces/ICas'; import nodeFetch from 'node-fetch'; import ReadableStream from '../common/ReadableStream'; import { FetchResultCode } from '../common/FetchResultCode'; -/** - * Interface for accessing the underlying CAS (Content Addressable Store). - * This interface is mainly useful for creating a mock CAS for testing purposes. - */ -export interface Cas { - /** - * Writes the given content to CAS. - * @returns The SHA256 hash in base64url encoding which represents the address of the content. - */ - write (content: Buffer): Promise; - - /** - * Reads the content of the given address in CAS. - * @param maxSizeInBytes The maximum allowed size limit of the content. - * @returns The fetch result containg the content buffer if found. - * The result `code` is set to `FetchResultCode.MaxSizeExceeded` if the content exceeds the specified max size. - */ - read (address: string, maxSizeInBytes: number): Promise; -} - /** * Class that communicates with the underlying CAS using REST API defined by the protocol document. */ -export class CasClient implements Cas { +export default class Cas implements ICas { private fetch = nodeFetch; @@ -57,7 +38,7 @@ export class CasClient implements Cas { return hash; } - public async read (address: string, maxSizeInBytes: number): Promise { + public async read (address: string, maxSizeInBytes: number): Promise { try { // Fetch the resource. const queryUri = `${this.uri}/${address}?max-size=${maxSizeInBytes}`; diff --git a/lib/core/Core.ts b/lib/core/Core.ts index 6bbcf4aca..d2dee7292 100644 --- a/lib/core/Core.ts +++ b/lib/core/Core.ts @@ -1,16 +1,15 @@ -import BatchWriter from './BatchWriter'; +import BatchScheduler from './BatchScheduler'; +import Blockchain from './Blockchain'; +import Cas from './Cas'; import DownloadManager from './DownloadManager'; -import IConfig from './interfaces/IConfig'; -import MongoDbOperationQueue from './MongoDbOperationQueue'; +import Config from './models/Config'; import MongoDbOperationStore from './MongoDbOperationStore'; import MongoDbTransactionStore from '../common/MongoDbTransactionStore'; import MongoDbUnresolvableTransactionStore from './MongoDbUnresolvableTransactionStore'; import Observer from './Observer'; -import OperationProcessor from './OperationProcessor'; -import ProtocolParameters, { IProtocolParameters } from './ProtocolParameters'; -import RequestHandler from './RequestHandler'; -import { BlockchainClient } from './Blockchain'; -import { CasClient } from './Cas'; +import Resolver from './Resolver'; +import VersionManager, { IProtocolVersion } from './VersionManager'; +import { ResponseModel } from '../common/Response'; /** * The core class that is instantiated when running a Sidetree node. @@ -19,41 +18,40 @@ export default class Core { private transactionStore: MongoDbTransactionStore; private unresolvableTransactionStore: MongoDbUnresolvableTransactionStore; private operationStore: MongoDbOperationStore; - private operationQueue: MongoDbOperationQueue; - private blockchain: BlockchainClient; + private versionManager: VersionManager; + private blockchain: Blockchain; + private cas: Cas; + private downloadManager: DownloadManager; private observer: Observer; - private batchWriter: BatchWriter; - - /** - * Operation and resolve request handler. - */ - public requestHandler: RequestHandler; + private batchScheduler: BatchScheduler; + private resolver: Resolver; /** * Core constructor. */ - public constructor (config: IConfig, versionsOfProtocolParameters: IProtocolParameters[]) { - ProtocolParameters.initialize(versionsOfProtocolParameters); - - // Component dependency initialization & injection. - this.blockchain = new BlockchainClient(config.blockchainServiceUri); - const cas = new CasClient(config.contentAddressableStoreServiceUri); - const downloadManager = new DownloadManager(config.maxConcurrentDownloads, cas); - this.operationQueue = new MongoDbOperationQueue(config.mongoDbConnectionString); - this.batchWriter = new BatchWriter(this.blockchain, cas, config.batchingIntervalInSeconds, this.operationQueue); + public constructor (config: Config, protocolVersions: IProtocolVersion[]) { + // Component dependency construction & injection. + this.versionManager = new VersionManager(config, protocolVersions); // `VersionManager` is first constructed component. this.operationStore = new MongoDbOperationStore(config.mongoDbConnectionString); - const operationProcessor = new OperationProcessor(config.didMethodName, this.operationStore); - this.requestHandler = new RequestHandler(operationProcessor, this.blockchain, this.batchWriter, config.didMethodName); + this.blockchain = new Blockchain(config.blockchainServiceUri); + this.cas = new Cas(config.contentAddressableStoreServiceUri); + this.downloadManager = new DownloadManager(config.maxConcurrentDownloads, this.cas); + this.resolver = new Resolver((blockchainTime) => this.versionManager.getOperationProcessor(blockchainTime), this.operationStore); + this.batchScheduler = new BatchScheduler( + (blockchainTime) => this.versionManager.getBatchWriter(blockchainTime), this.blockchain, config.batchingIntervalInSeconds); this.transactionStore = new MongoDbTransactionStore(config.mongoDbConnectionString); this.unresolvableTransactionStore = new MongoDbUnresolvableTransactionStore(config.mongoDbConnectionString); - this.observer = new Observer(this.blockchain, - downloadManager, - operationProcessor, - this.transactionStore, - this.unresolvableTransactionStore, - config.observingIntervalInSeconds); + this.observer = new Observer( + (blockchainTime) => this.versionManager.getTransactionProcessor(blockchainTime), + this.blockchain, + config.maxConcurrentDownloads, + this.operationStore, + this.transactionStore, + this.unresolvableTransactionStore, + config.observingIntervalInSeconds + ); - downloadManager.start(); + this.downloadManager.start(); } /** @@ -61,14 +59,43 @@ export default class Core { * The method starts the Observer and Batch Writer. */ public async initialize () { - await this.operationQueue.initialize(); await this.transactionStore.initialize(); await this.unresolvableTransactionStore.initialize(); await this.operationStore.initialize(); await this.blockchain.initialize(); + await this.versionManager.initialize( + this.blockchain, + this.cas, + this.downloadManager, + this.operationStore, + this.resolver + ); // `VersionManager` is last initialized component. await this.observer.startPeriodicProcessing(); - this.batchWriter.startPeriodicBatchWriting(); + this.batchScheduler.startPeriodicBatchWriting(); this.blockchain.startPeriodicCachedBlockchainTimeRefresh(); } + + /** + * Handles an operation request. + */ + public async handleOperationRequest (request: Buffer): Promise { + const currentTime = this.blockchain.approximateTime; + const requestHandler = this.versionManager.getRequestHandler(currentTime.time); + const response = requestHandler.handleOperationRequest(request); + return response; + } + + /** + * Handles resolve operation. + * @param didOrDidDocument Can either be: + * 1. Fully qualified DID. e.g. 'did:sidetree:abc' or + * 2. An encoded DID Document prefixed by the DID method name. e.g. 'did:sidetree:'. + */ + public async handleResolveRequest (didOrDidDocument: string): Promise { + const currentTime = this.blockchain.approximateTime; + const requestHandler = this.versionManager.getRequestHandler(currentTime.time); + const response = requestHandler.handleResolveRequest(didOrDidDocument); + return response; + } } diff --git a/lib/core/DownloadManager.ts b/lib/core/DownloadManager.ts index aef07ea6a..c66ef2021 100644 --- a/lib/core/DownloadManager.ts +++ b/lib/core/DownloadManager.ts @@ -1,6 +1,6 @@ import * as crypto from 'crypto'; -import IFetchResult from '../common/IFetchResult'; -import { Cas } from './Cas'; +import FetchResult from '../common/models/FetchResult'; +import ICas from './interfaces/ICas'; /** * Interface containing information regarding each queued CAS download. @@ -35,7 +35,7 @@ interface DownloadInfo { /** * Holds the fetch result once the download is completed. */ - fetchResult?: IFetchResult; + fetchResult?: FetchResult; } /** @@ -44,7 +44,7 @@ interface DownloadInfo { export default class DownloadManager { private pendingDownloads: DownloadInfo[] = []; private activeDownloads: Map = new Map(); - private completedDownloads: Map = new Map(); + private completedDownloads: Map = new Map(); /** * Constructs the download manager. @@ -52,7 +52,7 @@ export default class DownloadManager { */ public constructor ( public maxConcurrentDownloads: number, - private cas: Cas) { + private cas: ICas) { // If maximum concurrent CAS download count is NaN, set it to a default value. if (isNaN(maxConcurrentDownloads)) { @@ -117,7 +117,7 @@ export default class DownloadManager { * Downloads the content of the given content hash. * @param contentHash Hash of the content to be downloaded. */ - public async download (contentHash: string, maxSizeInBytes: number): Promise { + public async download (contentHash: string, maxSizeInBytes: number): Promise { const handle = crypto.randomBytes(32); const fetchPromise = new Promise(resolve => { const downloadInfo = { handle, contentHash, maxSizeInBytes, resolve, completed: false, content: undefined }; diff --git a/lib/core/Error.ts b/lib/core/Error.ts index a927def2d..c2d6b751d 100644 --- a/lib/core/Error.ts +++ b/lib/core/Error.ts @@ -1,11 +1,9 @@ -import ErrorCode from '../common/ErrorCode'; - /** * Standardized error class for throwing generic errors internal to this project. * NOTE: Not to be confused with RequestError which is used as a response to external requests. */ export class SidetreeError extends Error { - constructor (public code: ErrorCode, message?: string) { + constructor (public code: string, message?: string) { super(message ? message : code); // NOTE: Extending 'Error' breaks prototype chain since TypeScript 2.1. diff --git a/lib/core/MongoDbOperationStore.ts b/lib/core/MongoDbOperationStore.ts index 0f3d34db1..8798dcede 100644 --- a/lib/core/MongoDbOperationStore.ts +++ b/lib/core/MongoDbOperationStore.ts @@ -1,6 +1,7 @@ -import OperationStore from './interfaces/OperationStore'; +import AnchoredOperationModel from './models/AnchoredOperationModel'; +import IOperationStore from './interfaces/IOperationStore'; +import NamedAnchoredOperationModel from './models/NamedAnchoredOperationModel'; import { Binary, Collection, Long, MongoClient } from 'mongodb'; -import { Operation } from './Operation'; /** * Sidetree operation stored in MongoDb. @@ -15,14 +16,13 @@ interface IMongoOperation { opIndex: number; transactionNumber: Long; transactionTime: number; - batchFileHash: string; } /** * Implementation of OperationStore that stores the operation data in * a MongoDB database. */ -export default class MongoDbOperationStore implements OperationStore { +export default class MongoDbOperationStore implements IOperationStore { private collection: Collection | undefined; /** @@ -35,7 +35,11 @@ export default class MongoDbOperationStore implements OperationStore { */ private readonly operationCollectionName: string; - constructor (private serverUrl: string, databaseName?: string, operationCollectionName?: string) { + constructor ( + private serverUrl: string, + databaseName?: string, + operationCollectionName?: string + ) { this.databaseName = databaseName ? databaseName : 'sidetree'; this.operationCollectionName = operationCollectionName ? operationCollectionName : 'operations'; } @@ -63,7 +67,7 @@ export default class MongoDbOperationStore implements OperationStore { /** * Implement OperationStore.put */ - public async put (operations: Array): Promise { + public async put (operations: NamedAnchoredOperationModel[]): Promise { let batch = this.collection!.initializeUnorderedBulkOp(); for (const operation of operations) { @@ -86,9 +90,9 @@ export default class MongoDbOperationStore implements OperationStore { * didUniqueSuffix ordered by (transactionNumber, operationIndex) * ascending. */ - public async get (didUniqueSuffix: string): Promise> { + public async get (didUniqueSuffix: string): Promise { const mongoOperations = await this.collection!.find({ didUniqueSuffix }).sort({ transactionNumber: 1, operationIndex: 1 }).toArray(); - return mongoOperations.map(MongoDbOperationStore.convertToOperation); + return mongoOperations.map((operation) => { return MongoDbOperationStore.convertToAnchoredOperationModel(operation); }); } /** @@ -108,14 +112,13 @@ export default class MongoDbOperationStore implements OperationStore { * that can be stored on MongoDb. The IMongoOperation object has sufficient * information to reconstruct the original operation. */ - private static convertToMongoOperation (operation: Operation): IMongoOperation { + private static convertToMongoOperation (operation: NamedAnchoredOperationModel): IMongoOperation { return { didUniqueSuffix: operation.didUniqueSuffix, operationBufferBsonBinary: new Binary(operation.operationBuffer), - opIndex: operation.operationIndex!, - transactionNumber: Long.fromNumber(operation.transactionNumber!), - transactionTime: operation.transactionTime!, - batchFileHash: operation.batchFileHash! + opIndex: operation.operationIndex, + transactionNumber: Long.fromNumber(operation.transactionNumber), + transactionTime: operation.transactionTime }; } @@ -126,17 +129,12 @@ export default class MongoDbOperationStore implements OperationStore { * Note: mongodb.find() returns an 'any' object that automatically converts longs to numbers - * hence the type 'any' for mongoOperation. */ - private static convertToOperation (mongoOperation: any): Operation { - return Operation.createAnchoredOperation( - mongoOperation.operationBufferBsonBinary.buffer, - { - transactionNumber: mongoOperation.transactionNumber, - transactionTime: mongoOperation.transactionTime, - transactionTimeHash: 'unavailable', - anchorFileHash: 'unavailable', - batchFileHash: mongoOperation.batchFileHash - }, - mongoOperation.opIndex - ); + private static convertToAnchoredOperationModel (mongoOperation: any): AnchoredOperationModel { + return { + operationBuffer: mongoOperation.operationBufferBsonBinary.buffer, + operationIndex: mongoOperation.opIndex, + transactionNumber: mongoOperation.transactionNumber, + transactionTime: mongoOperation.transactionTime + }; } } diff --git a/lib/core/MongoDbUnresolvableTransactionStore.ts b/lib/core/MongoDbUnresolvableTransactionStore.ts index 8de3829c6..09a3910a6 100644 --- a/lib/core/MongoDbUnresolvableTransactionStore.ts +++ b/lib/core/MongoDbUnresolvableTransactionStore.ts @@ -1,17 +1,17 @@ -import ITransaction from '../common/ITransaction'; -import UnresolvableTransactionStore from './interfaces/UnresolvableTransactionStore'; +import IUnresolvableTransactionStore from './interfaces/IUnresolvableTransactionStore'; +import TransactionModel from '../common/models/TransactionModel'; import { Collection, Db, Long, MongoClient } from 'mongodb'; -interface IUnresolvableTransaction extends ITransaction { +interface IUnresolvableTransaction extends TransactionModel { firstFetchTime: number; retryAttempts: number; nextRetryTime: number; } /** - * Implementation of TransactionStore that stores the transaction data in a MongoDB database. + * Implementation of `IIUnresolvableTransactionStore` that stores the transaction data in a MongoDB database. */ -export default class MongoDbUnresolvableTransactionStore implements UnresolvableTransactionStore { +export default class MongoDbUnresolvableTransactionStore implements IUnresolvableTransactionStore { /** Default database name used if not specified in constructor. */ public static readonly defaultDatabaseName: string = 'sidetree'; /** Collection name for unresolvable transactions. */ @@ -56,7 +56,7 @@ export default class MongoDbUnresolvableTransactionStore implements Unresolvable this.unresolvableTransactionCollection = await MongoDbUnresolvableTransactionStore.createUnresolvableTransactionCollectionIfNotExist(this.db!); } - async recordUnresolvableTransactionFetchAttempt (transaction: ITransaction): Promise { + async recordUnresolvableTransactionFetchAttempt (transaction: TransactionModel): Promise { // Try to get the unresolvable transaction from store. const transactionTime = transaction.transactionTime; const transactionNumber = transaction.transactionNumber; @@ -94,13 +94,13 @@ export default class MongoDbUnresolvableTransactionStore implements Unresolvable } } - async removeUnresolvableTransaction (transaction: ITransaction): Promise { + async removeUnresolvableTransaction (transaction: TransactionModel): Promise { const transactionTime = transaction.transactionTime; const transactionNumber = transaction.transactionNumber; await this.unresolvableTransactionCollection!.deleteOne({ transactionTime, transactionNumber: Long.fromNumber(transactionNumber) }); } - async getUnresolvableTransactionsDueForRetry (maximumReturnCount?: number): Promise { + async getUnresolvableTransactionsDueForRetry (maximumReturnCount?: number): Promise { // Override the return count if it is specified. let returnCount = this.maximumUnresolvableTransactionReturnCount; if (maximumReturnCount !== undefined) { diff --git a/lib/core/Observer.ts b/lib/core/Observer.ts index 97fefad6d..265dd1773 100644 --- a/lib/core/Observer.ts +++ b/lib/core/Observer.ts @@ -1,35 +1,14 @@ -import AnchorFile, { IAnchorFile } from './AnchorFile'; -import BatchFile from './BatchFile'; -import DownloadManager from './DownloadManager'; -import ErrorCode from '../common/ErrorCode'; -import IResolvedTransaction from './interfaces/IResolvedTransaction'; -import ITransaction from '../common/ITransaction'; -import OperationProcessor from './OperationProcessor'; -import ProtocolParameters from './ProtocolParameters'; +import IBlockchain from './interfaces/IBlockchain'; +import IOperationStore from './interfaces/IOperationStore'; +import ITransactionProcessor from './interfaces/ITransactionProcessor'; +import ITransactionStore from './interfaces/ITransactionStore'; +import IUnresolvableTransactionStore from './interfaces/IUnresolvableTransactionStore'; +import SharedErrorCode from '../common/SharedErrorCode'; import timeSpan = require('time-span'); -import TransactionStore from './interfaces/TransactionStore'; -import UnresolvableTransactionStore from './interfaces/UnresolvableTransactionStore'; -import { Blockchain } from './Blockchain'; -import { FetchResultCode } from '../common/FetchResultCode'; -import { Operation } from './Operation'; +import TransactionModel from '../common/models/TransactionModel'; +import TransactionUnderProcessingModel, { TransactionProcessingStatus } from './models/TransactionUnderProcessingModel'; import { SidetreeError } from './Error'; -/** - * The state of a transaction that is being processed. - */ -enum TransactionProcessingStatus { - Pending = 'pending', - Processsed = 'processed' -} - -/** - * Data structure for holding a transaction that is being processed and its state. - */ -interface ITransactionUnderProcessing { - transaction: ITransaction; - processingStatus: TransactionProcessingStatus; -} - /** * Class that performs periodic processing of batches of Sidetree operations anchored to the blockchain. */ @@ -44,19 +23,20 @@ export default class Observer { /** * The list of transactions that are being downloaded or processed. */ - private transactionsUnderProcessing: { transaction: ITransaction; processingStatus: TransactionProcessingStatus }[] = []; + private transactionsUnderProcessing: TransactionUnderProcessingModel[] = []; /** * This is the transaction that is used as a timestamp to fetch newer transaction. */ - private lastKnownTransaction: ITransaction | undefined; + private lastKnownTransaction: TransactionModel | undefined; public constructor ( - private blockchain: Blockchain, - private downloadManager: DownloadManager, - private operationProcessor: OperationProcessor, - private transactionStore: TransactionStore, - private unresolvableTransactionStore: UnresolvableTransactionStore, + private getTransactionProcessor: (blockchainTime: number) => ITransactionProcessor, + private blockchain: IBlockchain, + private maxConcurrentDownloads: number, + private operationStore: IOperationStore, + private transactionStore: ITransactionStore, + private unresolvableTransactionStore: IUnresolvableTransactionStore, private observingIntervalInSeconds: number) { } @@ -87,7 +67,7 @@ export default class Observer { /** * Processes new transactions if any, then reprocess a set of unresolvable transactions if any, - * then scehdules the next round of processing using the following rules unless `stopPeriodicProcessing()` is invoked. + * then scehdules the next round of processing unless `stopPeriodicProcessing()` is invoked. */ public async processTransactions () { let blockReorganizationDetected = false; @@ -110,7 +90,7 @@ export default class Observer { console.info(`Fetched ${readResult.transactions.length} Sidetree transactions from blockchain service in ${endTimer.rounded()} ms.`); } catch (error) { // If block reorganization (temporary fork) has happened. - if (error instanceof SidetreeError && error.code === ErrorCode.InvalidTransactionNumberOrTimeHash) { + if (error instanceof SidetreeError && error.code === SharedErrorCode.InvalidTransactionNumberOrTimeHash) { console.info(`Block reorganization detected.`); blockReorganizationDetected = true; moreTransactions = true; @@ -130,7 +110,7 @@ export default class Observer { }; this.transactionsUnderProcessing.push(awaitingTransaction); // Intentionally not awaiting on downloading and processing each operation batch. - void this.downloadThenProcessBatchAsync(transaction, awaitingTransaction); + void this.processTransaction(transaction, awaitingTransaction); } // If block reorg is detected, we must wait until no more operation processing is pending, @@ -147,7 +127,7 @@ export default class Observer { // We hold off from fetching more transactions if the list of transactions under processing gets too long. // We will wait for count of transaction being processed to fall to the maximum allowed concurrent downloads // before attempting further transaction fetches. - await this.waitUntilCountOfTransactionsUnderProcessingIsLessOrEqualTo(this.downloadManager.maxConcurrentDownloads); + await this.waitUntilCountOfTransactionsUnderProcessingIsLessOrEqualTo(this.maxConcurrentDownloads); } // Update the last known transaction. @@ -181,6 +161,7 @@ export default class Observer { // Wait a little before checking again. await new Promise(resolve => setTimeout(resolve, 1000)); } + return; } @@ -202,7 +183,7 @@ export default class Observer { }; unresolvableTransactionStatus.push(awaitingTransaction); // Intentionally not awaiting on downloading and processing each operation batch. - void this.downloadThenProcessBatchAsync(transaction, awaitingTransaction); + void this.processTransaction(transaction, awaitingTransaction); } // Wait until all unresolvable transactions are processed, @@ -237,146 +218,33 @@ export default class Observer { // Trim the transaction list. this.transactionsUnderProcessing.splice(0, i); } + /** - * Processes the given transaction. - * If the given transaction is unresolvable (anchor/batch file not found), save the transaction for retry. - * If no error encountered (unresolvable transaction is NOT an error), advance the 'last processed transaction' marker. + * Processes the given transaction by passing the transaction to the right version of the transaction processor based on the transaction time. + * The transaction processing generically involves first downloading DID operation data from CAS (Content Addressable Storage), + * then storing the operations indexed/grouped by DIDs in the persistent operation DB. */ - private async downloadThenProcessBatchAsync (transaction: ITransaction, transactionUnderProcessing: ITransactionUnderProcessing) { - let retryNeeded = false; + private async processTransaction (transaction: TransactionModel, transactionUnderProcessing: TransactionUnderProcessingModel) { + let transactionProcessedSuccessfully; try { - // Get the protocol parameters - const protocolParameters = ProtocolParameters.get(transaction.transactionTime); - - console.info(`Downloading anchor file '${transaction.anchorFileHash}', max size limit ${protocolParameters.maxAnchorFileSizeInBytes} bytes...`); - const anchorFileFetchResult = await this.downloadManager.download(transaction.anchorFileHash, protocolParameters.maxAnchorFileSizeInBytes); - - // No thing to process if the file hash is invalid. No retry needed. - if (anchorFileFetchResult.code === FetchResultCode.InvalidHash) { - console.info(`Anchor file '${transaction.anchorFileHash}' is not a valid hash.`); - return; - } - - // No thing to process if the file size exceeds protocol specified size limit, no retry needed either. - if (anchorFileFetchResult.code === FetchResultCode.MaxSizeExceeded) { - console.info(`Anchor file '${transaction.anchorFileHash}' exceeded max size limit ${protocolParameters.maxAnchorFileSizeInBytes} bytes.`); - return; - } - - // Content for hash exists but is not a file. No retry needed. - if (anchorFileFetchResult.code === FetchResultCode.NotAFile) { - console.info(`Anchor file hash '${transaction.anchorFileHash}' points to a content that is not a file.`); - return; - } - - // If Content Addressable Storage is not reachable, mark the transaction for retry later. - if (anchorFileFetchResult.code === FetchResultCode.CasNotReachable) { - console.info(`CAS not reachable for anchor file '${transaction.anchorFileHash}', will try again later.`); - retryNeeded = true; - return; - } - - // If file cannot be found, mark it for retry later. - if (anchorFileFetchResult.code === FetchResultCode.NotFound) { - console.info(`Anchor file '${transaction.anchorFileHash}' not found, will try again later.`); - retryNeeded = true; - return; - } - - console.info(`Anchor file '${transaction.anchorFileHash}' of size ${anchorFileFetchResult.content!.length} bytes downloaded.`); - let anchorFile: IAnchorFile; - try { - const maxOperationsPerBatch = protocolParameters.maxOperationsPerBatch; - const hashAlgorithmInMultihashCode = protocolParameters.hashAlgorithmInMultihashCode; - anchorFile = AnchorFile.parseAndValidate(anchorFileFetchResult.content!, maxOperationsPerBatch, hashAlgorithmInMultihashCode); - } catch (error) { - // Give meaningful/specific error code and message when possible. - if (error instanceof SidetreeError) { - console.info(`Invalid anchor file: ${error}`); - console.info(`Anchor file '${transaction.anchorFileHash}' failed parsing/validation, transaction '${transaction.transactionNumber}' ignored...`); - } else { - console.error(`Unexpected error processing anchor file, MUST investigate and fix: ${error}`); - retryNeeded = true; - } - - return; - } - - console.info(`Downloading batch file '${anchorFile.batchFileHash}', max size limit ${protocolParameters.maxBatchFileSizeInBytes}...`); - const batchFileFetchResult = await this.downloadManager.download(anchorFile.batchFileHash, protocolParameters.maxBatchFileSizeInBytes); - - // Nothing to process if the file hash is invalid. No retry needed. - if (batchFileFetchResult.code === FetchResultCode.InvalidHash) { - console.info(`Batch file '${anchorFile.batchFileHash}' is not a valid hash.`); - return; - } - - // Nothing to process if the file size exceeds protocol specified size limit, no retry needed either. - if (batchFileFetchResult.code === FetchResultCode.MaxSizeExceeded) { - console.info(`Batch file '${anchorFile.batchFileHash}' exceeded max size limit ${protocolParameters.maxBatchFileSizeInBytes}...`); - return; - } - - // Content for hash exists but is not a file. No retry needed. - if (batchFileFetchResult.code === FetchResultCode.NotAFile) { - console.info(`Batch file hash '${anchorFile.batchFileHash}' points to a content that is not a file.`); - return; - } - - // If Content Addressable Storage is not reachable, mark the transaction for retry later. - if (batchFileFetchResult.code === FetchResultCode.CasNotReachable) { - console.info(`CAS not reachable for batch file '${anchorFile.batchFileHash}', will try again later.`); - retryNeeded = true; - return; - } - - // If file cannot be found, mark it for retry later. - if (batchFileFetchResult.code === FetchResultCode.NotFound) { - console.info(`Batch file '${anchorFile.batchFileHash}' not found, will try again later.`); - retryNeeded = true; - return; - } - - console.info(`Batch file '${anchorFile.batchFileHash}' of size ${batchFileFetchResult.content!.length} downloaded.`); - - // Construct a resolved transaction from the original transaction object now that batch file is fetched. - const resolvedTransaction: IResolvedTransaction = { - transactionNumber: transaction.transactionNumber, - transactionTime: transaction.transactionTime, - transactionTimeHash: transaction.transactionTimeHash, - anchorFileHash: transaction.anchorFileHash, - batchFileHash: anchorFile.batchFileHash - }; - - let operations: Operation[]; - try { - operations = await BatchFile.parseAndValidate(batchFileFetchResult.content!, anchorFile, resolvedTransaction); - } catch (error) { - console.info(error); - console.info(`Batch file '${anchorFile.batchFileHash}' failed parsing/validation, transaction '${transaction.transactionNumber}' ignored.`); - return; - } - - // If the code reaches here, it means that the batch of operations is valid, process the operations. - const endTimer = timeSpan(); - await this.operationProcessor.process(operations); - console.info(`Processed batch '${anchorFile.batchFileHash}' of ${operations.length} operations. Time taken: ${endTimer.rounded()} ms.`); + const transactionProcessor: ITransactionProcessor = this.getTransactionProcessor(transaction.transactionTime); + transactionProcessedSuccessfully = await transactionProcessor.processTransaction(transaction); } catch (error) { console.error(`Unhandled error encoutnered processing transaction '${transaction.transactionNumber}'.`); console.error(error); - retryNeeded = true; + transactionProcessedSuccessfully = false; } finally { // Purposely setting processing status first before rest of the code to prevent any possibility of deadlocking the Observer. console.info(`Finished processing transaction '${transaction.transactionNumber}'.`); transactionUnderProcessing.processingStatus = TransactionProcessingStatus.Processsed; - if (retryNeeded) { - console.info(`Recording failed processing attempt for transaction '${transaction.transactionNumber}'...`); - await this.unresolvableTransactionStore.recordUnresolvableTransactionFetchAttempt(transaction); - } else { + if (transactionProcessedSuccessfully) { console.info(`Removing transaction '${transaction.transactionNumber}' from unresolvable transactions if exists...`); await this.unresolvableTransactionStore.removeUnresolvableTransaction(transaction); + } else { + console.info(`Recording failed processing attempt for transaction '${transaction.transactionNumber}'...`); + await this.unresolvableTransactionStore.recordUnresolvableTransactionFetchAttempt(transaction); } } } @@ -397,7 +265,7 @@ export default class Observer { // Revert all processed operations that came after the best known valid recent transaction. console.info('Reverting operations...'); - await this.operationProcessor.rollback(bestKnownValidRecentTransactionNumber); + await this.operationStore.delete(bestKnownValidRecentTransactionNumber); // NOTE: MUST do this step LAST to handle incomplete operation rollback due to unexpected scenarios, such as power outage etc. await this.transactionStore.removeTransactionsLaterThan(bestKnownValidRecentTransactionNumber); diff --git a/lib/core/OperationProcessor.ts b/lib/core/OperationProcessor.ts deleted file mode 100644 index 5331a5220..000000000 --- a/lib/core/OperationProcessor.ts +++ /dev/null @@ -1,172 +0,0 @@ -import Document, { IDocument } from './Document'; -import OperationStore from './interfaces/OperationStore'; -import ProtocolParameters from './ProtocolParameters'; -import { Operation, OperationType } from './Operation'; - -/** - * Implementation of OperationProcessor. Uses a OperationStore - * that might, e.g., use a backend database for persistence. - * All 'processing' is deferred to resolve time, with process() - * simply storing the operation in the store. - */ -export default class OperationProcessor { - - public constructor (private didMethodName: string, private operationStore: OperationStore) { } - - /** - * Process a batch of operations. Simply store the operations in the - * store. - */ - public async process (operations: Array): Promise { - return this.operationStore.put(operations); - } - - /** - * Remove all previously processed operations with transactionNumber - * greater than the provided transaction number. Relies on - * OperationStore.delete that implements this functionality. - */ - public async rollback (transactionNumber?: number): Promise { - return this.operationStore.delete(transactionNumber); - } - - /** - * Resolve the given DID unique suffix to its DID Doducment. - * @param didUniqueSuffix The unique suffix of the DID to resolve. e.g. if 'did:sidetree:abc123' is the DID, the unique suffix would be 'abc123' - * @returns DID Document. Undefined if the unique suffix of the DID is deleted or not found. - * - * Iterate over all operations in blockchain-time order extending the - * the operation chain while checking validity. - */ - public async resolve (didUniqueSuffix: string): Promise { - console.info(`Resolving DID unique suffix '${didUniqueSuffix}'...`); - - // NOTE: We create an object referencing the DID document to be constructed so that both: - // 1. `didDocument` can be `undefined` initially; and - // 2. `didDocument` can be modified directly in-place in subsequent document patching. - let didDocumentReference: { didDocument: IDocument | undefined } = { didDocument: undefined }; - let previousOperation: Operation | undefined; - - const didOps = await this.operationStore.get(didUniqueSuffix); - - // Apply each operation in chronological order to build a complete DID Document. - for (const operation of didOps) { - let isOperationValid: boolean; - isOperationValid = await this.apply(operation, previousOperation, didDocumentReference); - - if (isOperationValid) { - previousOperation = operation; - - // If this is a delete operation, this will be the last valid operation for this DID. - if (operation.type === OperationType.Delete) { - break; - } - } else { - const batchFileHash = operation.batchFileHash; - const operationIndex = operation.operationIndex; - console.info(`Ignored invalid operation for unique suffix '${didUniqueSuffix}' in batch file '${batchFileHash}' operation index ${operationIndex}.`); - } - } - - return didDocumentReference.didDocument; - } - - /** - * Applies an operation on top of the given DID document in place. - * In the case of an invalid operation, the given DID document will be unchanged. - * In the case of a (valid) delete operation, the given DID document will be set to `undefined`. - * - * NOTE: An object referencing the DID document is used so that - * `didDocumentReference.didDocument` can be `undefined` initially and be set to an object created. - * An alternative approach is to include the DID Document as a return value, but that would give the - * misconception that the given DID Document is unchanged. - * - * @param operation The operation to apply against the given DID Document (if any). - * @param previousOperation The previously operation applied if any. Used for operation validation. - * @param didDocumentReference The object containing DID document to apply the given operation against. - * @returns a boolean that indicates if the operation is valid and applied. - */ - private async apply (operation: Operation, previousOperation: Operation | undefined, didDocumentReference: { didDocument: IDocument | undefined }): - Promise { - // NOTE: only used for read interally. - const didDocument = didDocumentReference.didDocument; - - if (operation.type === OperationType.Create) { - - // If either of these is defined, then we have seen a previous create operation. - if (previousOperation || didDocumentReference.didDocument) { - return false; - } - - const originalDidDocument = this.getOriginalDocument(operation)!; - - const signingKey = Document.getPublicKey(originalDidDocument, operation.signingKeyId); - - if (!signingKey) { - return false; - } - - if (!(await operation.verifySignature(signingKey))) { - return false; - } - - didDocumentReference.didDocument = originalDidDocument; - return true; - } else if (operation.type === OperationType.Delete) { - // Delete can be applied only on valid did with a current document - if (!didDocument) { - return false; - } - - // The current did document should contain the public key mentioned in the operation ... - const publicKey = Document.getPublicKey(didDocument, operation.signingKeyId); - if (!publicKey) { - return false; - } - - // ... and the signature should verify - if (!(await operation.verifySignature(publicKey))) { - return false; - } - - // If the delete is valid - didDocumentReference.didDocument = undefined; - return true; - } else { - // Update operation - - // Every operation other than a create has a previous operation and a valid - // current DID document. - if (!previousOperation || !didDocument) { - return false; - } - - // Any non-create needs a previous operation hash that should match the hash of the latest valid operation (previousOperation) - if (operation.previousOperationHash !== previousOperation.getOperationHash()) { - return false; - } - - // The current did document should contain the public key mentioned in the operation ... - const publicKey = Document.getPublicKey(didDocument, operation.signingKeyId); - if (!publicKey) { - return false; - } - - // ... and the signature should verify - if (!(await operation.verifySignature(publicKey))) { - return false; - } - - Operation.applyPatchesToDidDocument(didDocument, operation.patches!); - return true; - } - } - - /** - * Gets the original DID document from a create operation. - */ - private getOriginalDocument (createOperation: Operation): IDocument | undefined { - const protocolVersion = ProtocolParameters.get(createOperation.transactionTime!); - return Document.from(createOperation.encodedPayload, this.didMethodName, protocolVersion.hashAlgorithmInMultihashCode); - } -} diff --git a/lib/core/ProtocolParameters.ts b/lib/core/ProtocolParameters.ts deleted file mode 100644 index c43a22038..000000000 --- a/lib/core/ProtocolParameters.ts +++ /dev/null @@ -1,64 +0,0 @@ -/** - * Defines the list of protocol parameters. - */ -export interface IProtocolParameters { - /** The inclusive starting logical blockchain time that this protocol applies to. */ - startingBlockchainTime: number; - /** Hash algorithm in Multihash code in DEC (not in HEX). */ - hashAlgorithmInMultihashCode: number; - /** Maximum allowed size of anchor file stored in Content Addressable Storage. */ - maxAnchorFileSizeInBytes: number; - /** Maximum allowed size of batch file stored in Content Addressable Storage. */ - maxBatchFileSizeInBytes: number; - /** Maximum operations per batch. */ - maxOperationsPerBatch: number; - /** Maximum size of an operation in bytes. */ - maxOperationByteSize: number; -} - -// Reverse sorted protocol versions. ie. latest version first. -let protocolParametersVersionsSorted: IProtocolParameters[]; - -// Cached list of supported hash algorithms. -let supportedHashAlgorithms: number[]; - -/** - * Contains operations related to protocol parameters. - */ -export default class ProtocolParameters { - - /** - * Initializes the protocol parameters versions. - * Must be invoked first before other methods in this class. - */ - public static initialize (protocolParametersVersions: IProtocolParameters[]) { - // Reverse sort. - protocolParametersVersionsSorted = protocolParametersVersions.sort((a, b) => b.startingBlockchainTime - a.startingBlockchainTime); - - // Compute and cache supported hash algorithms. - supportedHashAlgorithms = protocolParametersVersionsSorted.map(version => version.hashAlgorithmInMultihashCode); - supportedHashAlgorithms = Array.from(new Set(supportedHashAlgorithms)); // This line removes duplicates. - } - - /** - * Gets the corresponding protocol parameters as a Protocol object given the blockchain time. - */ - public static get (blockchainTime: number): IProtocolParameters { - // Iterate through each version to find the right version. - for (const protocolParameters of protocolParametersVersionsSorted) { - if (blockchainTime >= protocolParameters.startingBlockchainTime) { - return protocolParameters; - } - } - - throw new Error(`Unabled to find protocol parameters for the given blockchain time ${blockchainTime}`); - } - - /** - * Gets the list of hash algorithms used by this Sidetree network. - */ - public static getSupportedHashAlgorithms (): number[] { - return supportedHashAlgorithms; - } - -} diff --git a/lib/core/Resolver.ts b/lib/core/Resolver.ts new file mode 100644 index 000000000..51832c092 --- /dev/null +++ b/lib/core/Resolver.ts @@ -0,0 +1,52 @@ +import IOperationProcessor from './interfaces/IOperationProcessor'; +import IOperationStore from './interfaces/IOperationStore'; + +/** + * Implementation of OperationProcessor. Uses a OperationStore + * that might, e.g., use a backend database for persistence. + * All 'processing' is deferred to resolve time, with process() + * simply storing the operation in the store. + * + * NOTE: Resolver needs to be versioned because it depends on `VersionManager` being constructed to fetch the versioned operation processor. + */ +export default class Resolver { + + public constructor (private getOperationProcessor: (blockchainTime: number) => IOperationProcessor, private operationStore: IOperationStore) { } + + /** + * Resolve the given DID unique suffix to its DID Doducment. + * @param didUniqueSuffix The unique suffix of the DID to resolve. e.g. if 'did:sidetree:abc123' is the DID, the unique suffix would be 'abc123' + * @returns DID Document. Undefined if the unique suffix of the DID is deleted or not found. + * + * Iterate over all operations in blockchain-time order extending the + * the operation chain while checking validity. + */ + public async resolve (didUniqueSuffix: string): Promise { + console.info(`Resolving DID unique suffix '${didUniqueSuffix}'...`); + + // NOTE: We create an object referencing the DID document to be constructed so that both: + // 1. `didDocument` can be `undefined` initially; and + // 2. `didDocument` can be modified directly in-place in subsequent document patching. + let didDocumentReference: { didDocument: object | undefined } = { didDocument: undefined }; + let previousOperationHash: string | undefined; + + const operations = await this.operationStore.get(didUniqueSuffix); + + // Patch each operation in chronological order to build a complete DID Document. + for (const operation of operations) { + const operationProcessor = this.getOperationProcessor(operation.transactionTime); + const patchResult = await operationProcessor.patch(operation, previousOperationHash, didDocumentReference); + + if (patchResult.validOperation) { + previousOperationHash = patchResult.operationHash; + } else { + const index = operation.operationIndex; + const time = operation.transactionTime; + const number = operation.transactionNumber; + console.info(`Ignored invalid operation for DID '${didUniqueSuffix}' in transaction '${number}' at time '${time}' at operation index ${index}.`); + } + } + + return didDocumentReference.didDocument; + } +} diff --git a/lib/core/VersionManager.ts b/lib/core/VersionManager.ts new file mode 100644 index 000000000..b8ea716b5 --- /dev/null +++ b/lib/core/VersionManager.ts @@ -0,0 +1,175 @@ +import DownloadManager from './DownloadManager'; +import IBatchWriter from './interfaces/IBatchWriter'; +import IBlockchain from './interfaces/IBlockchain'; +import ICas from './interfaces/ICas'; +import Config from './models/Config'; +import IOperationProcessor from './interfaces/IOperationProcessor'; +import IOperationStore from './interfaces/IOperationStore'; +import IRequestHandler from './interfaces/IRequestHandler'; +import ITransactionProcessor from './interfaces/ITransactionProcessor'; +import ProtocolVersionMetadata from './models/ProtocolVersionMetadata'; +import Resolver from './Resolver'; + +/** + * Defines a protocol version and its starting blockchain time. + */ +export interface IProtocolVersion { + /** The inclusive starting logical blockchain time that this protocol applies to. */ + startingBlockchainTime: number; + version: string; +} + +/** + * The class that handles the loading of different versions of protocol codebase. + */ +export default class VersionManager { + // Reverse sorted protocol versions. ie. latest version first. + private protocolVersionsReverseSorted: IProtocolVersion[]; + + private batchWriters: Map; + private operationProcessors: Map; + private requestHandlers: Map; + private transactionProcessors: Map; + private protocolVersionMetadatas: Map; + + public constructor ( + private config: Config, + protocolVersions: IProtocolVersion[] + ) { + + // Reverse sort protocol versions. + this.protocolVersionsReverseSorted = protocolVersions.sort((a, b) => b.startingBlockchainTime - a.startingBlockchainTime); + + this.batchWriters = new Map(); + this.operationProcessors = new Map(); + this.requestHandlers = new Map(); + this.transactionProcessors = new Map(); + this.protocolVersionMetadatas = new Map(); + } + + /** + * Loads all the versions of the protocol codebase. + */ + public async initialize ( + blockchain: IBlockchain, + cas: ICas, + downloadManager: DownloadManager, + operationStore: IOperationStore, + resolver: Resolver + ) { + + // Load all the metadata on all protocol versions first because instantiation of other components will need it. + for (const protocolVersion of this.protocolVersionsReverseSorted) { + const version = protocolVersion.version; + const protocolVersionMetadata = (await import(`./versions/${version}/ProtocolVersionMetadata`)).default; + this.protocolVersionMetadatas.set(version, protocolVersionMetadata); + } + + // Get and cache supported hash algorithms. + let allSupportedHashAlgorithms = Array.from(this.protocolVersionMetadatas.values(), value => value.hashAlgorithmInMultihashCode); + allSupportedHashAlgorithms = Array.from(new Set(allSupportedHashAlgorithms)); // This line removes duplicates. + + // Instantiate rest of the protocol components. + // NOTE: In principal each version of the interface implemtnations can have different constructors, + // but we currently keep the constructor signature the same as much as possible for simple instance construction, + // but it is not inhernetly "bad" if we have to have conditional constructions for each if we have to. + for (const protocolVersion of this.protocolVersionsReverseSorted) { + const version = protocolVersion.version; + + /* tslint:disable-next-line */ + const MongoDbOperationQueue = (await import(`./versions/${version}/MongoDbOperationQueue`)).default; + const operationQueue = new MongoDbOperationQueue(this.config.mongoDbConnectionString); + await operationQueue.initialize(); + + /* tslint:disable-next-line */ + const TransactionProcessor = (await import(`./versions/${version}/TransactionProcessor`)).default; + const transactionProcessor = new TransactionProcessor(downloadManager, operationStore); + this.transactionProcessors.set(version, transactionProcessor); + + /* tslint:disable-next-line */ + const BatchWriter = (await import(`./versions/${version}/BatchWriter`)).default; + const batchWriter = new BatchWriter(operationQueue, blockchain, cas); + this.batchWriters.set(version, batchWriter); + + /* tslint:disable-next-line */ + const OperationProcessor = (await import(`./versions/${version}/OperationProcessor`)).default; + const operationProcessor = new OperationProcessor(this.config.didMethodName); + this.operationProcessors.set(version, operationProcessor); + + /* tslint:disable-next-line */ + const RequestHandler = (await import(`./versions/${version}/RequestHandler`)).default; + const requestHandler = new RequestHandler(resolver, operationQueue, this.config.didMethodName, allSupportedHashAlgorithms); + this.requestHandlers.set(version, requestHandler); + } + } + + /** + * Gets the corresponding version of the `IBatchWriter` based on the given blockchain time. + */ + public getBatchWriter (blockchainTime: number): IBatchWriter { + const version = this.getVersionString(blockchainTime); + const batchWriter = this.batchWriters.get(version); + + if (batchWriter === undefined) { + throw new Error(`Unabled to find batch writer for the given blockchain time ${blockchainTime}, investigate and fix.`); + } + + return batchWriter; + } + + /** + * Gets the corresponding version of the `IOperationProcessor` based on the given blockchain time. + */ + public getOperationProcessor (blockchainTime: number): IOperationProcessor { + const version = this.getVersionString(blockchainTime); + const operationProcessor = this.operationProcessors.get(version); + + if (operationProcessor === undefined) { + throw new Error(`Unabled to find operation processor for the given blockchain time ${blockchainTime}, investigate and fix.`); + } + + return operationProcessor; + } + + /** + * Gets the corresponding version of the `IRequestHandler` based on the given blockchain time. + */ + public getRequestHandler (blockchainTime: number): IRequestHandler { + const version = this.getVersionString(blockchainTime); + const requestHandler = this.requestHandlers.get(version); + + if (requestHandler === undefined) { + throw new Error(`Unabled to find request handler for the given blockchain time ${blockchainTime}, investigate and fix.`); + } + + return requestHandler; + } + + /** + * Gets the corresponding version of the `TransactionProcessor` based on the given blockchain time. + */ + public getTransactionProcessor (blockchainTime: number): ITransactionProcessor { + const version = this.getVersionString(blockchainTime); + const transactionProcessor = this.transactionProcessors.get(version); + + if (transactionProcessor === undefined) { + throw new Error(`Unabled to find transaction processor for the given blockchain time ${blockchainTime}, investigate and fix.`); + } + + return transactionProcessor; + } + + /** + * Gets the corresponding protocol version string given the blockchain time. + */ + private getVersionString (blockchainTime: number): string { + // Iterate through each version to find the right version. + for (const protocolVersion of this.protocolVersionsReverseSorted) { + if (blockchainTime >= protocolVersion.startingBlockchainTime) { + return protocolVersion.version; + } + } + + throw new Error(`Unabled to find protocol parameters for the given blockchain time ${blockchainTime}, investigate and fix.`); + } +} diff --git a/lib/core/interfaces/IBatchWriter.ts b/lib/core/interfaces/IBatchWriter.ts new file mode 100644 index 000000000..e7e90aae7 --- /dev/null +++ b/lib/core/interfaces/IBatchWriter.ts @@ -0,0 +1,9 @@ +/** + * Interface that defines a class that can write batches of operations to content addressable storage and blockchain. + */ +export default interface IBatchWriter { + /** + * Writes one or more batches of batches of operations to content addressable storage and blockchain. + */ + write (): Promise; +} diff --git a/lib/core/interfaces/IBlockchain.ts b/lib/core/interfaces/IBlockchain.ts new file mode 100644 index 000000000..ae5ffa935 --- /dev/null +++ b/lib/core/interfaces/IBlockchain.ts @@ -0,0 +1,37 @@ +import BlockchainTimeModel from '../models/BlockchainTimeModel'; +import TransactionModel from '../../common/models/TransactionModel'; + +/** + * Interface to access the underlying blockchain. + * This interface is mainly useful for creating a mock Blockchain for testing purposes. + */ +export default interface IBlockchain { + /** + * Writes the anchor file hash as a transaction to blockchain. + */ + write (anchorFileHash: string): Promise; + /** + * Gets Sidetree transactions in chronological order. + * The function call may not return all known transactions, moreTransaction indicates if there are more transactions to be fetched. + * When sinceTransactionNumber is not given, Sidetree transactions starting from inception will be returned. + * When sinceTransactionNumber is given, only Sidetree transaction after the given transaction will be returned. + * @param sinceTransactionNumber A valid Sidetree transaction number. + * @param transactionTimeHash The hash associated with the anchored time of the transaction number given. + * Required if and only if sinceTransactionNumber is provided. + * @throws SidetreeError with ErrorCode.InvalidTransactionNumberOrTimeHash if a potential block reorganization is detected. + */ + read (sinceTransactionNumber?: number, transactionTimeHash?: string): Promise<{ + moreTransactions: boolean; + transactions: TransactionModel[]; + }>; + /** + * Given a list of Sidetree transaction in any order, iterate through the list and return the first transaction that is valid. + * @param transactions List of potentially valid transactions. + */ + getFirstValidTransaction (transactions: TransactionModel[]): Promise; + /** + * Gets the approximate latest time synchronously without requiring to make network call. + * Useful for cases where high performance is desired and hgih accuracy is not required. + */ + approximateTime: BlockchainTimeModel; +} diff --git a/lib/core/interfaces/ICas.ts b/lib/core/interfaces/ICas.ts new file mode 100644 index 000000000..cffcd241b --- /dev/null +++ b/lib/core/interfaces/ICas.ts @@ -0,0 +1,19 @@ +import FetchResult from '../../common/models/FetchResult'; +/** + * Interface for accessing the underlying CAS (Content Addressable Store). + * This interface is mainly useful for creating a mock CAS for testing purposes. + */ +export default interface ICas { + /** + * Writes the given content to CAS. + * @returns The SHA256 hash in base64url encoding which represents the address of the content. + */ + write (content: Buffer): Promise; + /** + * Reads the content of the given address in CAS. + * @param maxSizeInBytes The maximum allowed size limit of the content. + * @returns The fetch result containg the content buffer if found. + * The result `code` is set to `FetchResultCode.MaxSizeExceeded` if the content exceeds the specified max size. + */ + read (address: string, maxSizeInBytes: number): Promise; +} diff --git a/lib/core/interfaces/IOperationProcessor.ts b/lib/core/interfaces/IOperationProcessor.ts new file mode 100644 index 000000000..28d95117c --- /dev/null +++ b/lib/core/interfaces/IOperationProcessor.ts @@ -0,0 +1,37 @@ +import AnchoredOperationModel from '../models/AnchoredOperationModel'; + +/** + * Interface that defines a class that can process operations. + */ +export default interface IOperationProcessor { + + /** + * Applies an operation on top of the given DID document in place. + * In the case of an invalid operation, the given DID document will be unchanged. + * In the case of a (valid) delete operation, the given DID document will be set to `undefined`. + * + * MUST NOT throw error. + * + * NOTE: An object referencing the DID document is used so that + * `didDocumentReference.didDocument` can be `undefined` initially and be set to an object created. + * An alternative approach is to include the DID Document as a return value, but that would give the + * misconception that the given DID Document is unchanged. + * + * @param operation The operation to apply against the given DID Document (if any). + * @param didDocumentReference The object containing DID document to apply the given operation against. + * @returns a boolean that indicates if the operation is valid and applied. + */ + patch ( + operation: AnchoredOperationModel, + previousOperationHash: string | undefined, + didDocumentReference: { didDocument: object | undefined } + ): Promise; +} + +/** + * The result of applying an operation update patch. + */ +export interface PatchResult { + validOperation: boolean; + operationHash: string | undefined; +} diff --git a/lib/core/interfaces/IOperationStore.ts b/lib/core/interfaces/IOperationStore.ts new file mode 100644 index 000000000..7a207061c --- /dev/null +++ b/lib/core/interfaces/IOperationStore.ts @@ -0,0 +1,28 @@ +import AnchoredOperationModel from '../models/AnchoredOperationModel'; +import NamedAnchoredOperationModel from '../models/NamedAnchoredOperationModel'; + +/** + * An abstraction of a complete store for operations exposing methods to + * put and get operations. + */ +export default interface IOperationStore { + + /** + * Stores a batch of operations + * @param operations The list of operations to be stored, where the key of the map is the DID unique suffix. + */ + put (operations: NamedAnchoredOperationModel[]): Promise; + + /** + * Gets an array of all operations with a given + * didUniqueSuffix ordered by (transactionNumber, operationIndex) + * ascending. + */ + get (didUniqueSuffix: string): Promise; + + /** + * Deletes all operations with transaction number greater than the + * provided parameter. + */ + delete (transactionNumber?: number): Promise; +} diff --git a/lib/core/interfaces/IRequestHandler.ts b/lib/core/interfaces/IRequestHandler.ts new file mode 100644 index 000000000..b4af937a1 --- /dev/null +++ b/lib/core/interfaces/IRequestHandler.ts @@ -0,0 +1,19 @@ +import { ResponseModel } from '../../common/Response'; + +/** + * Interface that defines a class that handle requests. + */ +export default interface IRequestHandler { + /** + * Handles an operation request. + */ + handleOperationRequest (request: Buffer): Promise; + + /** + * Handles resolve operation. + * @param didOrDidDocument Can either be: + * 1. Fully qualified DID. e.g. 'did:sidetree:abc' or + * 2. An encoded DID Document prefixed by the DID method name. e.g. 'did:sidetree:'. + */ + handleResolveRequest (didOrDidDocument: string): Promise; +} diff --git a/lib/core/interfaces/IResolvedTransaction.ts b/lib/core/interfaces/IResolvedTransaction.ts deleted file mode 100644 index 55f4f905d..000000000 --- a/lib/core/interfaces/IResolvedTransaction.ts +++ /dev/null @@ -1,9 +0,0 @@ -import ITransaction from '../../common/ITransaction'; - -/** - * Defines a resolved Sidetree transaction. - * A resolved transaction means the batch file is located in CAS. - */ -export default interface IResolvedTransaction extends ITransaction { - batchFileHash: string; -} diff --git a/lib/core/interfaces/ITransactionProcessor.ts b/lib/core/interfaces/ITransactionProcessor.ts new file mode 100644 index 000000000..05b01e97f --- /dev/null +++ b/lib/core/interfaces/ITransactionProcessor.ts @@ -0,0 +1,14 @@ +import TransactionModel from '../../common/models/TransactionModel'; + +/** + * Interface that defines a class that can process transactions fetched from blockchain. + */ +export default interface ITransactionProcessor { + /** + * Processes the given transactions. + * This includes fetching the files referenced by the given transaction, validation, categorization of operations by DID, and storing the operations in DB. + * @param transaction Transaction to process. + * @returns true if the transaction is processed successfully (no retry required), false otherwise (retry required). + */ + processTransaction (transaction: TransactionModel): Promise; +} diff --git a/lib/core/interfaces/TransactionStore.ts b/lib/core/interfaces/ITransactionStore.ts similarity index 73% rename from lib/core/interfaces/TransactionStore.ts rename to lib/core/interfaces/ITransactionStore.ts index 1d9db782e..d1a0fa3f9 100644 --- a/lib/core/interfaces/TransactionStore.ts +++ b/lib/core/interfaces/ITransactionStore.ts @@ -1,38 +1,38 @@ -import ITransaction from '../../common/ITransaction'; +import TransactionModel from '../../common/models/TransactionModel'; /** * An abstraction for the persistence of Sidetree transactions. * Used to avoid re-fetching and reprocessing of transactions when the Sidetree node crashes or restarts. */ -export default interface TransactionStore { +export default interface ITransactionStore { /** * Idempotent method that adds the given transaction to the list of transactions. */ - addTransaction (transaction: ITransaction): Promise; + addTransaction (transaction: TransactionModel): Promise; /** * Gets the most recent transaction. Returns undefined if there is no transaction. */ - getLastTransaction (): Promise; + getLastTransaction (): Promise; /** * Gets a list of exponentially-spaced transactions in reverse chronological sorted order * where the first element in the returned list is the chronologically last transaction in the store. */ - getExponentiallySpacedTransactions (): Promise; + getExponentiallySpacedTransactions (): Promise; /** * Returns the specified transaction. * @param transactionNumber Transaction number of the transaction to be returned. */ - getTransaction (transactionNumber: number): Promise; + getTransaction (transactionNumber: number): Promise; /** * Returns at most @param max transactions with transactionNumber greater than @param transactionNumber * If @param transactionNumber is undefined, returns transactions from index 0 in the store */ - getTransactionsLaterThan (transactionNumber: number | undefined, max: number): Promise; + getTransactionsLaterThan (transactionNumber: number | undefined, max: number): Promise; /** * Remove all transactions with transaction number greater than the provided parameter. diff --git a/lib/core/interfaces/UnresolvableTransactionStore.ts b/lib/core/interfaces/IUnresolvableTransactionStore.ts similarity index 75% rename from lib/core/interfaces/UnresolvableTransactionStore.ts rename to lib/core/interfaces/IUnresolvableTransactionStore.ts index ef7ad614f..cdfd91106 100644 --- a/lib/core/interfaces/UnresolvableTransactionStore.ts +++ b/lib/core/interfaces/IUnresolvableTransactionStore.ts @@ -1,20 +1,20 @@ -import ITransaction from '../../common/ITransaction'; +import TransactionModel from '../../common/models/TransactionModel'; /** * An abstraction for the persistence of Sidetree transactions. * Used to avoid re-fetching and reprocessing of transactions when the Sidetree node crashes or restarts. */ -export default interface UnresolvableTransactionStore { +export default interface IUnresolvableTransactionStore { /** * Records the retry attempts of the given resolvable transaction. */ - recordUnresolvableTransactionFetchAttempt (transaction: ITransaction): Promise; + recordUnresolvableTransactionFetchAttempt (transaction: TransactionModel): Promise; /** * Remove the given transaction from the list of unresolvable transactions. * No-op if the transaction does not exist in the list of unresolvable transactions. */ - removeUnresolvableTransaction (transaction: ITransaction): Promise; + removeUnresolvableTransaction (transaction: TransactionModel): Promise; /** * Gets a list of unresolvable transactions due for retry processing. @@ -22,7 +22,7 @@ export default interface UnresolvableTransactionStore { * The maximum count of unresolvable transactions to return retry. * If not given, the implementation determines the number of unresolvable transactions to return. */ - getUnresolvableTransactionsDueForRetry (maxReturnCount?: number): Promise; + getUnresolvableTransactionsDueForRetry (maxReturnCount?: number): Promise; /** * Remove all unresolvable transactions with transaction number greater than the provided parameter. diff --git a/lib/core/interfaces/OperationStore.ts b/lib/core/interfaces/OperationStore.ts deleted file mode 100644 index 2fb13b047..000000000 --- a/lib/core/interfaces/OperationStore.ts +++ /dev/null @@ -1,26 +0,0 @@ -import { Operation } from '../Operation'; - -/** - * An abstraction of a complete store for operations exposing methods to - * put and get operations. - */ -export default interface OperationStore { - - /** - * Store a batch of operations - */ - put (operations: Array): Promise; - - /** - * Get an iterator that returns all operations with a given - * didUniqueSuffix ordered by (transactionNumber, operationIndex) - * ascending. - */ - get (didUniqueSuffix: string): Promise>; - - /** - * Delete all operations with transaction number greater than the - * provided parameter. - */ - delete (transactionNumber?: number): Promise; -} diff --git a/lib/core/models/AnchoredOperationModel.ts b/lib/core/models/AnchoredOperationModel.ts new file mode 100644 index 000000000..74ff059ef --- /dev/null +++ b/lib/core/models/AnchoredOperationModel.ts @@ -0,0 +1,13 @@ +/** + * The minimal contractual properties of an operation across protocol versions. + */ +export default interface AnchoredOperationModel { + /** The logical blockchain time that this opeartion was anchored on the blockchain */ + transactionTime: number; + /** The transaction number of the transaction this operation was batched within. */ + transactionNumber: number; + /** The index this operation was assigned to in the batch. */ + operationIndex: number; + /** The original request buffer sent by the requester. */ + operationBuffer: Buffer; +} diff --git a/lib/core/interfaces/IBlockchainTime.ts b/lib/core/models/BlockchainTimeModel.ts similarity index 82% rename from lib/core/interfaces/IBlockchainTime.ts rename to lib/core/models/BlockchainTimeModel.ts index 3195f2d5d..bb7b38b29 100644 --- a/lib/core/interfaces/IBlockchainTime.ts +++ b/lib/core/models/BlockchainTimeModel.ts @@ -1,7 +1,7 @@ /** * Represents an instance in time in a blockchain. */ -export default interface IBlockchainTime { +export default interface BlockchainTimeModel { /** A number that represents the time in the blockchain. */ time: number; /** The globally unique hash that is associated with the time. */ diff --git a/lib/core/interfaces/IConfig.ts b/lib/core/models/Config.ts similarity index 90% rename from lib/core/interfaces/IConfig.ts rename to lib/core/models/Config.ts index 856e8808c..b9724c21c 100644 --- a/lib/core/interfaces/IConfig.ts +++ b/lib/core/models/Config.ts @@ -1,7 +1,7 @@ /** * Defines all the configuration parameters needed to initialize Sidetree Core. */ -export default interface IConfig { +export default interface Config { batchingIntervalInSeconds: number; blockchainServiceUri: string; contentAddressableStoreServiceUri: string; diff --git a/lib/core/models/NamedAnchoredOperationModel.ts b/lib/core/models/NamedAnchoredOperationModel.ts new file mode 100644 index 000000000..3ea00800e --- /dev/null +++ b/lib/core/models/NamedAnchoredOperationModel.ts @@ -0,0 +1,9 @@ +import AnchoredOperationModel from './AnchoredOperationModel'; + +/** + * The minimal contractual properties of an operation across protocol versions, plus the DID unique suffix that this operation belongs to. + */ +export default interface NamedAnchoredOperationModel extends AnchoredOperationModel { + /** The DID unique suffix. */ + didUniqueSuffix: string; +} diff --git a/lib/core/models/ProtocolVersionMetadata.ts b/lib/core/models/ProtocolVersionMetadata.ts new file mode 100644 index 000000000..0f663696d --- /dev/null +++ b/lib/core/models/ProtocolVersionMetadata.ts @@ -0,0 +1,7 @@ +/** + * Holds metadata for a particular protocol version needed by the managing/orchastrating classes across all versions of the protocol implementations. + */ +export default interface ProtocolVersionMetadata { + /** Hash algorithm in Multihash code in DEC (not in HEX). */ + hashAlgorithmInMultihashCode: number; +} diff --git a/lib/core/models/TransactionUnderProcessingModel.ts b/lib/core/models/TransactionUnderProcessingModel.ts new file mode 100644 index 000000000..304a97091 --- /dev/null +++ b/lib/core/models/TransactionUnderProcessingModel.ts @@ -0,0 +1,17 @@ +import TransactionModel from '../../common/models/TransactionModel'; + +/** + * The state of a transaction that is being processed. + */ +export enum TransactionProcessingStatus { + Pending = 'pending', + Processsed = 'processed' +} + +/** + * Data structure for holding a transaction that is being processed and its state. + */ +export default interface TransactionUnderProcessingModel { + transaction: TransactionModel; + processingStatus: TransactionProcessingStatus; +} diff --git a/lib/core/AnchorFile.ts b/lib/core/versions/latest/AnchorFile.ts similarity index 80% rename from lib/core/AnchorFile.ts rename to lib/core/versions/latest/AnchorFile.ts index 79a6bdaea..eee512be9 100644 --- a/lib/core/AnchorFile.ts +++ b/lib/core/versions/latest/AnchorFile.ts @@ -1,17 +1,9 @@ +import AnchorFileModel from './models/AnchorFileModel'; import Encoder from './Encoder'; -import ErrorCode from '../common/ErrorCode'; +import ErrorCode from './ErrorCode'; import Multihash from './Multihash'; import ProtocolParameters from './ProtocolParameters'; -import { SidetreeError } from './Error'; - -/** - * Defines Anchor File structure. - */ -export interface IAnchorFile { - batchFileHash: string; - merkleRoot: string; - didUniqueSuffixes: string[]; -} +import { SidetreeError } from '../../Error'; /** * Class containing Anchor File related operations. @@ -21,7 +13,8 @@ export default class AnchorFile { * Parses and validates the given anchor file buffer. * @throws `SidetreeError` if failed parsing or validation. */ - public static parseAndValidate (anchorFileBuffer: Buffer, maxOperationsPerBatch: number, hashAlgorithmInMultihashCode: number): IAnchorFile { + public static parseAndValidate (anchorFileBuffer: Buffer, maxOperationsPerBatch: number): AnchorFileModel { + let anchorFile; try { anchorFile = JSON.parse(anchorFileBuffer.toString()); @@ -52,7 +45,7 @@ export default class AnchorFile { } const didUniqueSuffixBuffer = Encoder.decodeAsBuffer(anchorFile.batchFileHash); - if (!Multihash.isValidHash(didUniqueSuffixBuffer, hashAlgorithmInMultihashCode)) { + if (!Multihash.isValidHash(didUniqueSuffixBuffer, ProtocolParameters.hashAlgorithmInMultihashCode)) { throw new SidetreeError(ErrorCode.AnchorFileBatchFileHashUnsupported, `Batch file hash '${anchorFile.batchFileHash}' is unsupported.`); } @@ -62,7 +55,7 @@ export default class AnchorFile { } const merkleRootBuffer = Encoder.decodeAsBuffer(anchorFile.merkleRoot); - if (!Multihash.isValidHash(merkleRootBuffer, hashAlgorithmInMultihashCode)) { + if (!Multihash.isValidHash(merkleRootBuffer, ProtocolParameters.hashAlgorithmInMultihashCode)) { throw new SidetreeError(ErrorCode.AnchorFileMerkleRootUnsupported, `Merkle root '${anchorFile.merkleRoot}' is unsupported.`); } @@ -80,15 +73,14 @@ export default class AnchorFile { } // Verify each entry in DID unique suffixes. - const supportedHashAlgorithms = ProtocolParameters.getSupportedHashAlgorithms(); for (let uniqueSuffix of anchorFile.didUniqueSuffixes) { if (typeof uniqueSuffix !== 'string') { throw new SidetreeError(ErrorCode.AnchorFileDidUniqueSuffixEntryNotString); } - const uniqueSuffixBuffer = Encoder.decodeAsBuffer(uniqueSuffix); - if (!Multihash.isSupportedHash(uniqueSuffixBuffer, supportedHashAlgorithms)) { - throw new SidetreeError(ErrorCode.AnchorFileDidUniqueSuffixEntryInvalid, `Unique suffix '${uniqueSuffix}' is invalid.`); + const maxEncodedHashStringLength = ProtocolParameters.maxEncodedHashStringLength; + if (uniqueSuffix.length > maxEncodedHashStringLength) { + throw new SidetreeError(ErrorCode.AnchorFileDidUniqueSuffixTooLong, `Unique suffix '${uniqueSuffix}' exceeds length of ${maxEncodedHashStringLength}.`); } } diff --git a/lib/core/versions/latest/AnchoredOperation.ts b/lib/core/versions/latest/AnchoredOperation.ts new file mode 100644 index 000000000..2164dac0f --- /dev/null +++ b/lib/core/versions/latest/AnchoredOperation.ts @@ -0,0 +1,35 @@ +import AnchoredOperationModel from '../../models/AnchoredOperationModel'; +import NamedAnchoredOperationModel from '../../models/NamedAnchoredOperationModel'; +import Operation from './Operation'; + +/** + * A class that represents an anchored Sidetree operation. + */ +export default class AnchoredOperation extends Operation implements NamedAnchoredOperationModel { + /** The index this operation was assigned to in the batch. */ + public readonly operationIndex: number; + /** The transaction number of the transaction this operation was batched within. */ + public readonly transactionNumber: number; + /** The logical blockchain time that this opeartion was anchored on the blockchain */ + public readonly transactionTime: number; + + /** + * Constructs an anchored peration if the operation buffer passes schema validation, throws error otherwise. + */ + private constructor (anchoredOperationModel: AnchoredOperationModel) { + super(anchoredOperationModel.operationBuffer); + + // Properties of an operation in a resolved transaction. + this.operationIndex = anchoredOperationModel.operationIndex; + this.transactionNumber = anchoredOperationModel.transactionNumber; + this.transactionTime = anchoredOperationModel.transactionTime; + } + + /** + * Validates and creates an anchored operation that has been anchored on the blockchain. + * @throws Error if given operation buffer fails any validation. + */ + public static createAnchoredOperation (anchoredOperationModel: AnchoredOperationModel): AnchoredOperation { + return new AnchoredOperation(anchoredOperationModel); + } +} diff --git a/lib/core/BatchFile.ts b/lib/core/versions/latest/BatchFile.ts similarity index 60% rename from lib/core/BatchFile.ts rename to lib/core/versions/latest/BatchFile.ts index 1e940dcf5..7aea55954 100644 --- a/lib/core/BatchFile.ts +++ b/lib/core/versions/latest/BatchFile.ts @@ -1,17 +1,12 @@ +import AnchoredOperation from './AnchoredOperation'; +import AnchoredOperationModel from '../../models/AnchoredOperationModel'; +import AnchorFileModel from './models/AnchorFileModel'; +import BatchFileModel from './models/BatchFileModel'; import Encoder from './Encoder'; -import IResolvedTransaction from './interfaces/IResolvedTransaction'; import JsonAsync from './util/JsonAsync'; +import NamedAnchoredOperationModel from '../../models/NamedAnchoredOperationModel'; import ProtocolParameters from './ProtocolParameters'; import timeSpan = require('time-span'); -import { IAnchorFile } from './AnchorFile'; -import { Operation } from './Operation'; - -/** - * Defines Batch File structure. - */ -export interface IBatchFile { - operations: string[]; -} /** * Defines the schema of a Batch File and its related operations. @@ -22,7 +17,13 @@ export default class BatchFile { * Parses and validates the given batch file buffer and all the operations within it. * @throws Error if failed parsing or validation. */ - public static async parseAndValidate (batchFileBuffer: Buffer, anchorFile: IAnchorFile, resolvedTransaction: IResolvedTransaction): Promise { + public static async parseAndValidate ( + batchFileBuffer: Buffer, + anchorFile: AnchorFileModel, + transactionNumber: number, + transactionTime: number + ): Promise { + let endTimer = timeSpan(); const batchFileObject = await JsonAsync.parse(batchFileBuffer); console.info(`Parsed batch file ${anchorFile.batchFileHash} in ${endTimer.rounded()} ms.`); @@ -47,51 +48,52 @@ export default class BatchFile { } }); - const batchFile = batchFileObject as IBatchFile; + const batchFile = batchFileObject as BatchFileModel; const batchSize = batchFile.operations.length; // Verify the number of operations does not exceed the maximum allowed limit. - const protocol = ProtocolParameters.get(resolvedTransaction.transactionTime); - if (batchSize > protocol.maxOperationsPerBatch) { - throw Error(`Batch size of ${batchSize} operations exceeds the allowed limit of ${protocol.maxOperationsPerBatch}.`); + if (batchSize > ProtocolParameters.maxOperationsPerBatch) { + throw new Error(`Batch size of ${batchSize} operations exceeds the allowed limit of ${ProtocolParameters.maxOperationsPerBatch}.`); } // Verify that the batch size count matches that of the anchor file. const operationCountInAnchorFile = anchorFile.didUniqueSuffixes.length; if (batchSize !== operationCountInAnchorFile) { - throw Error(`Batch size of ${batchSize} in batch file '${anchorFile.batchFileHash}' does not size of ${operationCountInAnchorFile} in anchor file.`); + throw new Error(`Batch size of ${batchSize} in batch file '${anchorFile.batchFileHash}' does not size of ${operationCountInAnchorFile} in anchor file.`); } endTimer = timeSpan(); - const operations: Operation[] = new Array(batchSize); + const namedAnchoredOperationModels: NamedAnchoredOperationModel[] = []; + for (let operationIndex = 0; operationIndex < batchSize; operationIndex++) { const encodedOperation = batchFile.operations[operationIndex]; const operationBuffer = Encoder.decodeAsBuffer(encodedOperation); // Verify size of each operation does not exceed the maximum allowed limit. - if (operationBuffer.length > protocol.maxOperationByteSize) { - throw Error(`Operation size of ${operationBuffer.length} bytes exceeds the allowed limit of ${protocol.maxOperationByteSize} bytes.`); + if (operationBuffer.length > ProtocolParameters.maxOperationByteSize) { + throw new Error(`Operation size of ${operationBuffer.length} bytes exceeds the allowed limit of ${ProtocolParameters.maxOperationByteSize} bytes.`); } - let operation; - try { - operation = Operation.createAnchoredOperation(operationBuffer, resolvedTransaction, operationIndex); - } catch (error) { - console.info(`Unable to create an Operation object with '${operationBuffer}': ${error}`); - throw error; - } + const anchoredOperationModel: AnchoredOperationModel = { + operationBuffer, + operationIndex, + transactionNumber, + transactionTime + }; + + const operation = AnchoredOperation.createAnchoredOperation(anchoredOperationModel); const didUniqueSuffixesInAnchorFile = anchorFile.didUniqueSuffixes[operationIndex]; if (operation.didUniqueSuffix !== didUniqueSuffixesInAnchorFile) { - console.info(`Operation ${operationIndex}'s DID unique suffix '${operation.didUniqueSuffix}' ` + + throw new Error(`Operation ${operationIndex}'s DID unique suffix '${operation.didUniqueSuffix}' ` + `is not the same as '${didUniqueSuffixesInAnchorFile}' seen in anchor file.`); } - operations[operationIndex] = operation; + namedAnchoredOperationModels.push(operation); } - console.info(`Decoded ${operations.length} operations in batch ${resolvedTransaction.batchFileHash}. Time taken: ${endTimer.rounded()} ms.`); + console.info(`Decoded ${batchSize} operations in batch ${anchorFile.batchFileHash}. Time taken: ${endTimer.rounded()} ms.`); - return operations; + return namedAnchoredOperationModels; } /** diff --git a/lib/core/versions/latest/BatchWriter.ts b/lib/core/versions/latest/BatchWriter.ts new file mode 100644 index 000000000..3f57e62ce --- /dev/null +++ b/lib/core/versions/latest/BatchWriter.ts @@ -0,0 +1,69 @@ +import BatchFile from './BatchFile'; +import ICas from '../../interfaces/ICas'; +import Encoder from './Encoder'; +import IBatchWriter from '../../interfaces/IBatchWriter'; +import IBlockchain from '../../interfaces/IBlockchain'; +import IOperationQueue from './interfaces/IOperationQueue'; +import MerkleTree from './util/MerkleTree'; +import Multihash from './Multihash'; +import Operation from './Operation'; +import ProtocolParameters from './ProtocolParameters'; + +/** + * Implementation of the `TransactionProcessor`. + */ +export default class BatchWriter implements IBatchWriter { + public constructor ( + private operationQueue: IOperationQueue, + private blockchain: IBlockchain, + private cas: ICas) { } + + public async write () { + // Get the batch of operations to be anchored on the blockchain. + const operationBuffers = await this.operationQueue.peek(ProtocolParameters.maxOperationsPerBatch); + + console.info('Batch size = ' + operationBuffers.length); + + // Do nothing if there is nothing to batch together. + if (operationBuffers.length === 0) { + return; + } + + const batch = operationBuffers.map( + (buffer) => Operation.create(buffer) + ); + + // Create the batch file buffer from the operation batch. + const batchFileBuffer = BatchFile.fromOperationBuffers(operationBuffers); + + // Write the 'batch file' to content addressable store. + const batchFileHash = await this.cas.write(batchFileBuffer); + console.info(`Wrote batch file ${batchFileHash} to content addressable store.`); + + // Compute the Merkle root hash. + const merkleRoot = MerkleTree.create(operationBuffers).rootHash; + const merkleRootAsMultihash = Multihash.encode(merkleRoot, 18); + const encodedMerkleRoot = Encoder.encode(merkleRootAsMultihash); + + // Construct the DID unique suffixes of each operation to be included in the anchor file. + const didUniqueSuffixes = batch.map(operation => operation.didUniqueSuffix); + + // Construct the 'anchor file'. + const anchorFile = { + batchFileHash: batchFileHash, + merkleRoot: encodedMerkleRoot, + didUniqueSuffixes + }; + + // Make the 'anchor file' available in content addressable store. + const anchorFileJsonBuffer = Buffer.from(JSON.stringify(anchorFile)); + const anchorFileAddress = await this.cas.write(anchorFileJsonBuffer); + console.info(`Wrote anchor file ${anchorFileAddress} to content addressable store.`); + + // Anchor the 'anchor file hash' on blockchain. + await this.blockchain.write(anchorFileAddress); + + // Remove written operations from queue if batch writing is successful. + await this.operationQueue.dequeue(batch.length); + } +} diff --git a/lib/core/Did.ts b/lib/core/versions/latest/Did.ts similarity index 100% rename from lib/core/Did.ts rename to lib/core/versions/latest/Did.ts diff --git a/lib/core/Document.ts b/lib/core/versions/latest/Document.ts similarity index 91% rename from lib/core/Document.ts rename to lib/core/versions/latest/Document.ts index a3b77f644..0a654ee4e 100644 --- a/lib/core/Document.ts +++ b/lib/core/versions/latest/Document.ts @@ -1,29 +1,8 @@ import Did from './Did'; +import DocumentModel from './models/DocumentModel'; import Encoder from './Encoder'; import { DidPublicKey } from '@decentralized-identity/did-common-typescript'; -/** - * Defines DID Document data structure used by Sidetree for basic type safety checks. - */ -export interface IDocument { - '@context': string; - id: string; - publicKey: { - id: string, - type: string, - publicKeyJwk?: object - publicKeyHex?: object - }[]; - service: { - type: string, - serviceEndpoint: { - '@context': string; - '@type': string; - instance: string[] - } - }[]; -} - /** * Class containing reusable DID Document related operations specific to Sidetree. * NOTE: The class is intentionally named to disambiguate from the `DidDocument` class in '@decentralized-identity/did-common-typescript'. @@ -33,7 +12,7 @@ export default class Document { * Creates a DID Document with a valid Sidetree DID from an encoded original DID Document. * @returns DID Document if encoded original DID Document is valid; `undefined` otherwise. */ - public static from (encodedOriginalDidDocument: string, didMethodName: string, hashAlgorithmAsMultihashCode: number): IDocument | undefined { + public static from (encodedOriginalDidDocument: string, didMethodName: string, hashAlgorithmAsMultihashCode: number): DocumentModel | undefined { // Compute the hash of the DID Document in the create payload as the DID const did = Did.from(encodedOriginalDidDocument, didMethodName, hashAlgorithmAsMultihashCode); @@ -206,7 +185,7 @@ export default class Document { * Returns undefined if not found. * @param keyId The ID of the public-key. */ - public static getPublicKey (didDocument: IDocument, keyId: string): DidPublicKey | undefined { + public static getPublicKey (didDocument: DocumentModel, keyId: string): DidPublicKey | undefined { for (let i = 0; i < didDocument.publicKey.length; i++) { const publicKey = didDocument.publicKey[i]; diff --git a/lib/core/Encoder.ts b/lib/core/versions/latest/Encoder.ts similarity index 100% rename from lib/core/Encoder.ts rename to lib/core/versions/latest/Encoder.ts diff --git a/lib/core/versions/latest/ErrorCode.ts b/lib/core/versions/latest/ErrorCode.ts new file mode 100644 index 000000000..a91dda64b --- /dev/null +++ b/lib/core/versions/latest/ErrorCode.ts @@ -0,0 +1,42 @@ +/** + * Error codes used ONLY by this version of the protocol. + */ +export default { + AnchorFileBatchFileHashMissing: 'anchor_file_batch_file_hash_missing', + AnchorFileBatchFileHashNotString: 'anchor_file_batch_file_hash_not_string', + AnchorFileBatchFileHashUnsupported: 'anchor_file_batch_file_hash_unsupported', + AnchorFileDidUniqueSuffixEntryNotString: 'anchor_file_did_unique_suffix_entry_not_string', + AnchorFileDidUniqueSuffixesHasDuplicates: 'anchor_file_did_unique_suffixes_has_duplicates', + AnchorFileDidUniqueSuffixesMissing: 'anchor_file_did_unique_suffixes_missing', + AnchorFileDidUniqueSuffixesNotArray: 'anchor_file_did_unique_suffixes_not_array', + AnchorFileDidUniqueSuffixTooLong: 'anchor_file_did_unique_suffix_too_long', + AnchorFileExceededMaxOperationCount: 'anchor_file_exceeded_max_operation_count', + AnchorFileHasUnknownProperty: 'anchor_file_has_unknown_property', + AnchorFileMerkleRootMissing: 'anchor_file_merkle_root_missing', + AnchorFileMerkleRootNotString: 'anchor_file_merkle_root_not_string', + AnchorFileMerkleRootUnsupported: 'anchor_file_merkle_root_unsupported', + AnchorFileNotJson: 'anchor_file_not_json', + BatchWriterAlreadyHasOperationForDid: 'batch_writer_already_has_operation_for_did', + OperationCreateInvalidDidDocument: 'operation_create_invalid_did_document', + OperationExceedsMaximumSize: 'operation_exceeds_maximum_size', + OperationHeaderMissingKid: 'operation_header_missing_kid', + OperationHeaderMissingOrIncorrectAlg: 'operation_header_missing_or_incorrect_alg', + OperationHeaderMissingOrIncorrectOperation: 'operation_header_missing_or_incorrect_operation', + OperationMissingOrIncorrectPayload: 'operation_missing_or_incorrect_payload', + OperationMissingOrIncorrectSignature: 'operation_missing_or_incorrect_signature', + OperationUpdatePayloadMissingOrInvalidDidUniqueSuffixType: 'operation_update_payload_missing_or_invalid_did_unique_suffix_type', + OperationUpdatePayloadMissingOrInvalidPreviousOperationHashType: 'operation_update_payload_missing_or_invalid_previous_operation_hash_type', + OperationUpdatePayloadMissingOrUnknownProperty: 'operation_update_payload_missing_or_unknown_property', + OperationUpdatePatchesNotArray: 'operation_update_patches_not_array', + OperationUpdatePatchMissingOrUnknownAction: 'operation_update_patch_missing_or_unknown_action', + OperationUpdatePatchMissingOrUnknownProperty: 'operation_update_patch_missing_or_unknown_property', + OperationUpdatePatchPublicKeyHexMissingOrIncorrect: 'operation_update_patch_public_key_hex_missing_or_incorrect', + OperationUpdatePatchPublicKeyIdNotString: 'operation_update_patch_public_key_id_not_string', + OperationUpdatePatchPublicKeyMissingOrUnknownProperty: 'operation_update_patch_public_key_missing_or_unknown_property', + OperationUpdatePatchPublicKeysNotArray: 'operation_update_patch_public_keys_not_array', + OperationUpdatePatchPublicKeyTypeMissingOrUnknown: 'operation_update_patch_public_key_type_missing_or_unknown', + OperationUpdatePatchServiceEndpointNotDid: 'operation_update_patch_service_endpoint_not_did', + OperationUpdatePatchServiceEndpointsNotArray: 'operation_update_patch_service_endpoints_not_array', + OperationUpdatePatchServiceTypeMissingOrUnknown: 'operation_update_patch_service_type_missing_or_unknown', + QueueingMultipleOperationsPerDidNotAllowed: 'queueing_multiple_operations_per_did_not_allowed' +}; diff --git a/lib/core/MongoDbOperationQueue.ts b/lib/core/versions/latest/MongoDbOperationQueue.ts similarity index 95% rename from lib/core/MongoDbOperationQueue.ts rename to lib/core/versions/latest/MongoDbOperationQueue.ts index 755baf01c..9b1c1fd58 100644 --- a/lib/core/MongoDbOperationQueue.ts +++ b/lib/core/versions/latest/MongoDbOperationQueue.ts @@ -1,7 +1,7 @@ -import ErrorCode from '../common/ErrorCode'; -import OperationQueue from './interfaces/OperationQueue'; +import ErrorCode from './ErrorCode'; +import IOperationQueue from './interfaces/IOperationQueue'; import { Binary, Collection, MongoClient, Db } from 'mongodb'; -import { SidetreeError } from './Error'; +import { SidetreeError } from '../../Error'; /** * Sidetree operation stored in MongoDb. @@ -18,7 +18,7 @@ interface IMongoQueuedOperation { /** * Operation queue used by the Batch Writer implemented using MongoDB. */ -export default class MongoDbOperationQueue implements OperationQueue { +export default class MongoDbOperationQueue implements IOperationQueue { /** Collection name for queued operations. */ public static readonly collectionName: string = 'queued-operations'; diff --git a/lib/core/Multihash.ts b/lib/core/versions/latest/Multihash.ts similarity index 94% rename from lib/core/Multihash.ts rename to lib/core/versions/latest/Multihash.ts index ce38b26a4..0b8ed7359 100644 --- a/lib/core/Multihash.ts +++ b/lib/core/versions/latest/Multihash.ts @@ -6,7 +6,7 @@ const multihashes = require('multihashes'); */ export default class Multihash { /** - * Hashes the content using the hashing algorithm specified by the latest protocol version. + * Hashes the content using the hashing algorithm specified. */ public static hash (content: Buffer, hashAlgorithmInMultihashCode: number): Buffer { const hashAlgorithm = hashAlgorithmInMultihashCode; diff --git a/lib/core/Operation.ts b/lib/core/versions/latest/Operation.ts similarity index 72% rename from lib/core/Operation.ts rename to lib/core/versions/latest/Operation.ts index 37c351339..924da7edb 100644 --- a/lib/core/Operation.ts +++ b/lib/core/versions/latest/Operation.ts @@ -1,37 +1,26 @@ import Cryptography from './util/Cryptography'; import Did from './Did'; -import Document, { IDocument } from './Document'; +import Document from './Document'; +import DocumentModel from './models/DocumentModel'; import Encoder from './Encoder'; -import ErrorCode from '../common/ErrorCode'; -import IResolvedTransaction from './interfaces/IResolvedTransaction'; +import ErrorCode from './ErrorCode'; import Multihash from './Multihash'; +import OperationModel from './models/OperationModel'; import ProtocolParameters from './ProtocolParameters'; import { DidPublicKey } from '@decentralized-identity/did-common-typescript'; import { PrivateKey } from '@decentralized-identity/did-auth-jose'; -import { SidetreeError } from './Error'; +import { SidetreeError } from '../../Error'; /** * Sidetree operation types. */ -enum OperationType { +export enum OperationType { Create = 'create', Update = 'update', Delete = 'delete', Recover = 'recover' } -/** - * Defines operation request data structure for basic type safety checks. - */ -interface IOperation { - header: { - operation: string, - kid: string - }; - payload: string; - signature: string; -} - /** * A class that represents a Sidetree operation. * The primary purphose of this class is to provide an abstraction to the underlying JSON data structure. @@ -40,16 +29,7 @@ interface IOperation { * 1. No subclassing of specific operations. The intention here is to keep the hierarchy flat, as most properties are common. * 2. Factory method to hide constructor in case subclassing becomes useful in the future. Most often a good practice anyway. */ -class Operation { - /** The logical blockchain time that this opeartion was anchored on the blockchain */ - public readonly transactionTime?: number; - /** The transaction number of the transaction this operation was batched within. */ - public readonly transactionNumber?: number; - /** The index this operation was assigned to in the batch. */ - public readonly operationIndex?: number; - /** The hash of the batch file this operation belongs to */ - public readonly batchFileHash?: string; - +export default class Operation { /** The original request buffer sent by the requester. */ public readonly operationBuffer: Buffer; @@ -59,6 +39,8 @@ class Operation { */ public readonly didUniqueSuffix: string; + /** Hash of the operation based on the encoded payload string. */ + public readonly operationHash: string; /** The encoded operation payload. */ public readonly encodedPayload: string; /** The type of operation. */ @@ -71,44 +53,16 @@ class Operation { public readonly signature: string; /** DID document given in the operation, only applicable to create and recovery operations, undefined otherwise. */ - public readonly didDocument?: IDocument; + public readonly didDocument?: DocumentModel; /** Patches to the DID Document, only applicable to update operations, undefined otherwise. */ public readonly patches?: any[]; /** * Constructs an Operation if the operation buffer passes schema validation, throws error otherwise. - * @param resolvedTransaction The transaction operation this opeartion was batched within. - * If given, operationIndex must be given else error will be thrown. - * The transactoinTimeHash is ignored by the constructor. - * @param operationIndex The operation index this operation was assigned to in the batch. - * If given, resolvedTransaction must be given else error will be thrown. - * @param estimatedAnchorTime Estimated anchor time for this opeartion to be used for generating the theoretical DID unique suffix. - * This parameter and `resolvedTransaction` must be mutually exclusively specified. + * NOTE: Would love to mark this constructor private to prevent direct calls, but need it to be public for `AnchoredOperation` to inherit from. */ - private constructor ( - operationBuffer: Buffer, - resolvedTransaction?: IResolvedTransaction, - operationIndex?: number, - private estimatedAnchorTime?: number) { - // resolvedTransaction and estimatedAnchorTime must be mutually exclusively specified. - if ((resolvedTransaction === undefined && estimatedAnchorTime === undefined) || - (resolvedTransaction !== undefined && estimatedAnchorTime !== undefined)) { - throw new Error('Param resolvedTransaction and estimatedAnchorTime must be mutually exclusively specified.'); - } - - // resolvedTransaction and operationIndex must both be defined or undefined at the same time. - if (!((resolvedTransaction === undefined && operationIndex === undefined) || - (resolvedTransaction !== undefined && operationIndex !== undefined))) { - throw new Error('Param resolvedTransaction and operationIndex must both be defined or undefined.'); - } - - // Properties of an operation in a resolved transaction. - this.transactionTime = resolvedTransaction ? resolvedTransaction.transactionTime : undefined; - this.transactionNumber = resolvedTransaction ? resolvedTransaction.transactionNumber : undefined; - this.batchFileHash = resolvedTransaction ? resolvedTransaction.batchFileHash : undefined; - - this.operationIndex = operationIndex; + public constructor (operationBuffer: Buffer) { this.operationBuffer = operationBuffer; // Parse request buffer into a JS object. @@ -123,11 +77,12 @@ class Operation { this.signingKeyId = operation.header.kid; this.encodedPayload = operation.payload; this.signature = operation.signature; + this.operationHash = Operation.computeHash(this.encodedPayload); // Initialize operation specific properties. switch (this.type) { case OperationType.Create: - this.didUniqueSuffix = this.getOperationHash(); + this.didUniqueSuffix = this.operationHash; break; case OperationType.Update: this.didUniqueSuffix = decodedPayload.didUniqueSuffix; @@ -142,26 +97,12 @@ class Operation { } } - /** - * Creates an Operation that has been anchored on the blockchain. - * @param resolvedTransaction The transaction operation was batched within. If given, operationIndex must be given else error will be thrown. - * @param operationIndex The operation index this operation was assigned to in the batch. - * If given, resolvedTransaction must be given else error will be thrown. - * @throws Error if given operation buffer fails any validation. - */ - public static createAnchoredOperation ( - operationBuffer: Buffer, - resolvedTransaction: IResolvedTransaction, - operationIndex: number): Operation { - return new Operation(operationBuffer, resolvedTransaction, operationIndex); - } - /** * Creates an Operation that has not been anchored on the blockchain. * @throws Error if given operation buffer fails any validation. */ - public static createUnanchoredOperation (operationBuffer: Buffer, estimatedAnchorTime: number) { - return new Operation(operationBuffer, undefined, undefined, estimatedAnchorTime); + public static create (operationBuffer: Buffer) { + return new Operation(operationBuffer); } /** @@ -190,19 +131,12 @@ class Operation { } /** - * Gets a cryptographic hash of the operation payload. + * Computes the cryptographic multihash of the given string. */ - public getOperationHash (): string { - // Get the protocol version according to the transaction time to decide on the hashing algorithm used for the DID. - let protocol; - if (this.transactionTime === undefined) { - protocol = ProtocolParameters.get(this.estimatedAnchorTime!); - } else { - protocol = ProtocolParameters.get(this.transactionTime); - } - - const encodedOperationPayloadBuffer = Buffer.from(this.encodedPayload); - const multihash = Multihash.hash(encodedOperationPayloadBuffer, protocol.hashAlgorithmInMultihashCode); + private static computeHash (dataString: string): string { + const hashAlgorithmInMultihashCode = ProtocolParameters.hashAlgorithmInMultihashCode; + const encodedOperationPayloadBuffer = Buffer.from(dataString); + const multihash = Multihash.hash(encodedOperationPayloadBuffer, hashAlgorithmInMultihashCode); const encodedMultihash = Encoder.encode(multihash); return encodedMultihash; } @@ -211,7 +145,7 @@ class Operation { * Applies the given patches in order to the given DID Document. * NOTE: Assumes no schema validation is needed. */ - public static applyPatchesToDidDocument (didDocument: IDocument, patches: any[]) { + public static applyPatchesToDidDocument (didDocument: DocumentModel, patches: any[]) { // Loop through and apply all patches. for (let patch of patches) { Operation.applyPatchToDidDocument(didDocument, patch); @@ -221,7 +155,7 @@ class Operation { /** * Applies the given patch to the given DID Document. */ - private static applyPatchToDidDocument (didDocument: IDocument, patch: any) { + private static applyPatchToDidDocument (didDocument: DocumentModel, patch: any) { if (patch.action === 'add-public-keys') { const publicKeySet = new Set(didDocument.publicKey.map(key => key.id)); @@ -289,7 +223,7 @@ class Operation { /** * Gets the operation type given an operation object. */ - private static getOperationType (operation: IOperation): OperationType { + private static getOperationType (operation: OperationModel): OperationType { switch (operation.header.operation) { case 'create': return OperationType.Create; @@ -381,17 +315,6 @@ class Operation { throw new SidetreeError(ErrorCode.OperationUpdatePayloadMissingOrInvalidPreviousOperationHashType); } - const supportedHashAlgorithms = ProtocolParameters.getSupportedHashAlgorithms(); - const uniqueSuffixBuffer = Encoder.decodeAsBuffer(payload.didUniqueSuffix); - if (!Multihash.isSupportedHash(uniqueSuffixBuffer, supportedHashAlgorithms)) { - throw new SidetreeError(ErrorCode.OperationUpdatePayloadDidUniqueSuffixInvalid, `'${payload.didUniqueSuffix}' is an unuspported multihash.`); - } - - const previousOperationHashBuffer = Encoder.decodeAsBuffer(payload.previousOperationHash); - if (!Multihash.isSupportedHash(previousOperationHashBuffer, supportedHashAlgorithms)) { - throw new SidetreeError(ErrorCode.OperationUpdatePayloadPreviousOperationHashInvalid, `'${payload.previousOperationHash}' is an unuspported multihash`); - } - // Validate schema of every patch to be applied. Operation.validateUpdatePatches(payload.patches); } @@ -500,5 +423,3 @@ class Operation { } } } - -export { IOperation, OperationType, Operation }; diff --git a/lib/core/versions/latest/OperationProcessor.ts b/lib/core/versions/latest/OperationProcessor.ts new file mode 100644 index 000000000..ceb2a98b9 --- /dev/null +++ b/lib/core/versions/latest/OperationProcessor.ts @@ -0,0 +1,107 @@ +import AnchoredOperation from './AnchoredOperation'; +import AnchoredOperationModel from '../../models/AnchoredOperationModel'; +import Document from './Document'; +import DocumentModel from './models/DocumentModel'; +import IOperationProcessor, { PatchResult } from '../../interfaces/IOperationProcessor'; +import ProtocolParameters from './ProtocolParameters'; +import { OperationType } from './Operation'; + +/** + * Implementation of OperationProcessor. Uses a OperationStore + * that might, e.g., use a backend database for persistence. + * All 'processing' is deferred to resolve time, with process() + * simply storing the operation in the store. + */ +export default class OperationProcessor implements IOperationProcessor { + + public constructor (private didMethodName: string) { } + + public async patch ( + anchoredOperationModel: AnchoredOperationModel, + previousOperationHash: string | undefined, + didDocumentReference: { didDocument: object | undefined } + ): Promise { + let operationHash = undefined; + + try { + // NOTE: only used for read interally. + const didDocument = didDocumentReference.didDocument as (DocumentModel | undefined); + + const operation = AnchoredOperation.createAnchoredOperation(anchoredOperationModel); + operationHash = operation.operationHash; + + if (operation.type === OperationType.Create) { + + // If either of these is defined, then we have seen a previous create operation. + if (previousOperationHash !== undefined || didDocumentReference.didDocument) { + return { validOperation: false, operationHash }; + } + + const originalDidDocument = Document.from(operation.encodedPayload, this.didMethodName, ProtocolParameters.hashAlgorithmInMultihashCode)!; + + const signingKey = Document.getPublicKey(originalDidDocument, operation.signingKeyId); + + if (!signingKey) { + return { validOperation: false, operationHash }; + } + + if (!(await operation.verifySignature(signingKey))) { + return { validOperation: false, operationHash }; + } + + didDocumentReference.didDocument = originalDidDocument; + return { validOperation: true, operationHash }; + } else if (operation.type === OperationType.Delete) { + // Delete can be applied only on valid did with a current document + if (!didDocument) { + return { validOperation: false, operationHash }; + } + + // The current did document should contain the public key mentioned in the operation ... + const publicKey = Document.getPublicKey(didDocument, operation.signingKeyId); + if (!publicKey) { + return { validOperation: false, operationHash }; + } + + // ... and the signature should verify + if (!(await operation.verifySignature(publicKey))) { + return { validOperation: false, operationHash }; + } + + // If the delete is valid + didDocumentReference.didDocument = undefined; + return { validOperation: true, operationHash }; + } else { + // Update operation + + // Every operation other than a create has a previous operation and a valid + // current DID document. + if (previousOperationHash === undefined || didDocument === undefined) { + return { validOperation: false, operationHash }; + } + + // Any non-create needs a previous operation hash that should match the hash of the latest valid operation (previousOperation) + if (operation.previousOperationHash !== previousOperationHash) { + return { validOperation: false, operationHash }; + } + + // The current did document should contain the public key mentioned in the operation ... + const publicKey = Document.getPublicKey(didDocument, operation.signingKeyId); + if (!publicKey) { + return { validOperation: false, operationHash }; + } + + // ... and the signature should verify + if (!(await operation.verifySignature(publicKey))) { + return { validOperation: false, operationHash }; + } + + AnchoredOperation.applyPatchesToDidDocument(didDocument, operation.patches!); + return { validOperation: true, operationHash }; + } + } catch (error) { + console.log(`Invalid operation ${error}.`); + return { validOperation: false, operationHash }; + } + } +} diff --git a/lib/core/versions/latest/ProtocolParameters.ts b/lib/core/versions/latest/ProtocolParameters.ts new file mode 100644 index 000000000..73c0b9b4e --- /dev/null +++ b/lib/core/versions/latest/ProtocolParameters.ts @@ -0,0 +1,8 @@ +import ProtocolParameters from './models/ProtocolParameters'; + +/** + * Defines the list of protocol parameters, intended ONLY to be used within each version of the protocol implementation. + */ +let protocolParameters: ProtocolParameters = require('./protocol-parameters.json'); + +export default protocolParameters; diff --git a/lib/core/versions/latest/ProtocolVersionMetadata.ts b/lib/core/versions/latest/ProtocolVersionMetadata.ts new file mode 100644 index 000000000..714405d3f --- /dev/null +++ b/lib/core/versions/latest/ProtocolVersionMetadata.ts @@ -0,0 +1,8 @@ +import ProtocolParameters from './ProtocolParameters'; + +/** + * Implementation of the ProtocolVersionMetadata. + */ +export default { + hashAlgorithmInMultihashCode: ProtocolParameters.hashAlgorithmInMultihashCode +}; diff --git a/lib/core/RequestHandler.ts b/lib/core/versions/latest/RequestHandler.ts similarity index 74% rename from lib/core/RequestHandler.ts rename to lib/core/versions/latest/RequestHandler.ts index c856d92f1..c7640a7ee 100644 --- a/lib/core/RequestHandler.ts +++ b/lib/core/versions/latest/RequestHandler.ts @@ -1,52 +1,48 @@ -import BatchWriter from './BatchWriter'; import Encoder from './Encoder'; import Did from './Did'; import Document from './Document'; -import ErrorCode from '../common/ErrorCode'; +import ErrorCode from './ErrorCode'; +import IOperationQueue from './interfaces/IOperationQueue'; +import IRequestHandler from '../../interfaces/IRequestHandler'; import Multihash from './Multihash'; -import OperationProcessor from './OperationProcessor'; +import Operation, { OperationType } from './Operation'; import ProtocolParameters from './ProtocolParameters'; -import { Blockchain } from './Blockchain'; -import { IResponse, ResponseStatus } from '../common/Response'; -import { Operation, OperationType } from './Operation'; -import { SidetreeError } from './Error'; +import Resolver from '../../Resolver'; +import { ResponseModel, ResponseStatus } from '../../../common/Response'; +import { SidetreeError } from '../../Error'; /** * Sidetree operation request handler. */ -export default class RequestHandler { +export default class RequestHandler implements IRequestHandler { public constructor ( - private operationProcessor: OperationProcessor, - private blockchain: Blockchain, - private batchWriter: BatchWriter, - private didMethodName: string) { - } + private resolver: Resolver, + private operationQueue: IOperationQueue, + private didMethodName: string, + private allSupportedHashAlgorithms: number[]) { } /** * Handles an operation request. */ - public async handleOperationRequest (request: Buffer): Promise { + public async handleOperationRequest (request: Buffer): Promise { console.info(`Handling operation request of size ${request.length} bytes...`); - // Get the protocol version according to current blockchain time to validate the operation request. - const currentTime = this.blockchain.approximateTime; - const protocolParameters = ProtocolParameters.get(currentTime.time); // Perform common validation for any write request and parse it into an `Operation`. let operation: Operation; try { // Validate operation request size. - if (request.length > protocolParameters.maxOperationByteSize) { - const errorMessage = `Operation byte size of ${request.length} exceeded limit of ${protocolParameters.maxOperationByteSize}`; + if (request.length > ProtocolParameters.maxOperationByteSize) { + const errorMessage = `Operation byte size of ${request.length} exceeded limit of ${ProtocolParameters.maxOperationByteSize}`; console.info(errorMessage); throw new SidetreeError(ErrorCode.OperationExceedsMaximumSize, errorMessage); } // Parse request into an Operation. - operation = Operation.createUnanchoredOperation(request, currentTime.time); + operation = Operation.create(request); // Reject operation if there is already an operation for the same DID waiting to be batched and anchored. - if (await this.batchWriter.hasOperationQueuedFor(operation.didUniqueSuffix)) { + if (await this.operationQueue.contains(operation.didUniqueSuffix)) { throw new SidetreeError(ErrorCode.QueueingMultipleOperationsPerDidNotAllowed); } } catch (error) { @@ -71,10 +67,10 @@ export default class RequestHandler { console.info(`Operation type: '${operation.type}', DID unique suffix: '${operation.didUniqueSuffix}'`); // Passed common operation validation, hand off to specific operation handler. - let response: IResponse; + let response: ResponseModel; switch (operation.type) { case OperationType.Create: - const didDocument = Document.from(operation.encodedPayload, this.didMethodName, protocolParameters.hashAlgorithmInMultihashCode); + const didDocument = Document.from(operation.encodedPayload, this.didMethodName, ProtocolParameters.hashAlgorithmInMultihashCode); response = { status: ResponseStatus.Succeeded, @@ -100,7 +96,7 @@ export default class RequestHandler { // if the operation was processed successfully, queue the original request buffer for batching. if (response.status === ResponseStatus.Succeeded) { - await this.batchWriter.add(operation); + await this.operationQueue.enqueue(operation.didUniqueSuffix, operation.operationBuffer); } return response; @@ -128,7 +124,7 @@ export default class RequestHandler { * 1. Fully qualified DID. e.g. 'did:sidetree:abc' or * 2. An encoded DID Document prefixed by the DID method name. e.g. 'did:sidetree:'. */ - public async handleResolveRequest (didOrDidDocument: string): Promise { + public async handleResolveRequest (didOrDidDocument: string): Promise { console.log(`Handling resolution request for: ${didOrDidDocument}...`); if (!didOrDidDocument.startsWith(this.didMethodName)) { return { @@ -142,8 +138,7 @@ export default class RequestHandler { try { uniquePortion = didOrDidDocument.substring(this.didMethodName.length); - const supportedHashAlgorithms = ProtocolParameters.getSupportedHashAlgorithms(); - parameterIsDid = Multihash.isSupportedHash(Encoder.decodeAsBuffer(uniquePortion), supportedHashAlgorithms); + parameterIsDid = Multihash.isSupportedHash(Encoder.decodeAsBuffer(uniquePortion), this.allSupportedHashAlgorithms); } catch { return { status: ResponseStatus.BadRequest @@ -157,9 +152,9 @@ export default class RequestHandler { } } - private async handleResolveRequestWithDid (did: string): Promise { + private async handleResolveRequestWithDid (did: string): Promise { const didUniqueSuffix = did.substring(this.didMethodName.length); - const didDocument = await this.operationProcessor.resolve(didUniqueSuffix); + const didDocument = await this.resolver.resolve(didUniqueSuffix); if (!didDocument) { return { @@ -173,14 +168,12 @@ export default class RequestHandler { }; } - private async handleResolveRequestWithDidDocument (encodedDidDocument: string): Promise { - // Get the protocol version according to current blockchain time. - const currentTime = this.blockchain.approximateTime; - const protocolVersion = ProtocolParameters.get(currentTime.time); - const currentHashAlgorithm = protocolVersion.hashAlgorithmInMultihashCode; + private async handleResolveRequestWithDidDocument (encodedDidDocument: string): Promise { + // TODO: Issue #256 - Revisit resolution using Initial DID Document, currently assumes this versions protocol parameters. + const currentHashAlgorithm = ProtocolParameters.hashAlgorithmInMultihashCode; // Validate that the given encoded DID Document is a valid original document. - const isValidOriginalDocument = Document.isEncodedStringValidOriginalDocument(encodedDidDocument, protocolVersion.maxOperationByteSize); + const isValidOriginalDocument = Document.isEncodedStringValidOriginalDocument(encodedDidDocument, ProtocolParameters.maxOperationByteSize); if (!isValidOriginalDocument) { return { status: ResponseStatus.BadRequest }; } @@ -189,7 +182,7 @@ export default class RequestHandler { const didUniqueSuffix = Did.getUniqueSuffixFromEncodeDidDocument(encodedDidDocument, currentHashAlgorithm); // Attempt to resolve the DID. - const didDocument = await this.operationProcessor.resolve(didUniqueSuffix); + const didDocument = await this.resolver.resolve(didUniqueSuffix); // If DID Document found then return it. if (didDocument) { diff --git a/lib/core/versions/latest/TransactionProcessor.ts b/lib/core/versions/latest/TransactionProcessor.ts new file mode 100644 index 000000000..920390b0c --- /dev/null +++ b/lib/core/versions/latest/TransactionProcessor.ts @@ -0,0 +1,125 @@ +import AnchorFileModel from './models/AnchorFileModel'; +import AnchorFile from './AnchorFile'; +import BatchFile from './BatchFile'; +import DownloadManager from '../../DownloadManager'; +import IOperationStore from '../../interfaces/IOperationStore'; +import ITransactionProcessor from '../../interfaces/ITransactionProcessor'; +import NamedAnchoredOperationModel from '../../models/NamedAnchoredOperationModel'; +import ProtocolParameters from './ProtocolParameters'; +import timeSpan = require('time-span'); +import TransactionModel from '../../../common/models/TransactionModel'; +import { FetchResultCode } from '../../../common/FetchResultCode'; +import { SidetreeError } from '../../Error'; + +/** + * Implementation of the `ITransactionProcessor`. + */ +export default class TransactionProcessor implements ITransactionProcessor { + public constructor (private downloadManager: DownloadManager, private operationStore: IOperationStore) { } + + public async processTransaction (transaction: TransactionModel): Promise { + console.info(`Downloading anchor file '${transaction.anchorFileHash}', max size limit ${ProtocolParameters.maxAnchorFileSizeInBytes} bytes...`); + const anchorFileFetchResult = await this.downloadManager.download(transaction.anchorFileHash, ProtocolParameters.maxAnchorFileSizeInBytes); + + // No thing to process if the file hash is invalid. No retry needed. + if (anchorFileFetchResult.code === FetchResultCode.InvalidHash) { + console.info(`Anchor file '${transaction.anchorFileHash}' is not a valid hash.`); + return true; + } + + // No thing to process if the file size exceeds protocol specified size limit, no retry needed either. + if (anchorFileFetchResult.code === FetchResultCode.MaxSizeExceeded) { + console.info(`Anchor file '${transaction.anchorFileHash}' exceeded max size limit ${ProtocolParameters.maxAnchorFileSizeInBytes} bytes.`); + return true; + } + + // Content for hash exists but is not a file. No retry needed. + if (anchorFileFetchResult.code === FetchResultCode.NotAFile) { + console.info(`Anchor file hash '${transaction.anchorFileHash}' points to a content that is not a file.`); + return true; + } + + // If Content Addressable Storage is not reachable, mark the transaction for retry later. + if (anchorFileFetchResult.code === FetchResultCode.CasNotReachable) { + console.info(`CAS not reachable for anchor file '${transaction.anchorFileHash}', will try again later.`); + return false; + } + + // If file cannot be found, mark it for retry later. + if (anchorFileFetchResult.code === FetchResultCode.NotFound) { + console.info(`Anchor file '${transaction.anchorFileHash}' not found, will try again later.`); + return false; + } + + console.info(`Anchor file '${transaction.anchorFileHash}' of size ${anchorFileFetchResult.content!.length} bytes downloaded.`); + let anchorFile: AnchorFileModel; + try { + const maxOperationsPerBatch = ProtocolParameters.maxOperationsPerBatch; + anchorFile = AnchorFile.parseAndValidate( + anchorFileFetchResult.content!, + maxOperationsPerBatch + ); + } catch (error) { + // Give meaningful/specific error code and message when possible. + if (error instanceof SidetreeError) { + console.info(`Invalid anchor file: ${error}`); + console.info(`Anchor file '${transaction.anchorFileHash}' failed parsing/validation, transaction '${transaction.transactionNumber}' ignored...`); + return true; + } else { + console.error(`Unexpected error processing anchor file, MUST investigate and fix: ${error}`); + return false; + } + } + + console.info(`Downloading batch file '${anchorFile.batchFileHash}', max size limit ${ProtocolParameters.maxBatchFileSizeInBytes}...`); + const batchFileFetchResult = await this.downloadManager.download(anchorFile.batchFileHash, ProtocolParameters.maxBatchFileSizeInBytes); + + // Nothing to process if the file hash is invalid. No retry needed. + if (batchFileFetchResult.code === FetchResultCode.InvalidHash) { + console.info(`Batch file '${anchorFile.batchFileHash}' is not a valid hash.`); + return true; + } + + // Nothing to process if the file size exceeds protocol specified size limit, no retry needed either. + if (batchFileFetchResult.code === FetchResultCode.MaxSizeExceeded) { + console.info(`Batch file '${anchorFile.batchFileHash}' exceeded max size limit ${ProtocolParameters.maxBatchFileSizeInBytes}...`); + return true; + } + + // Content for hash exists but is not a file. No retry needed. + if (batchFileFetchResult.code === FetchResultCode.NotAFile) { + console.info(`Batch file hash '${anchorFile.batchFileHash}' points to a content that is not a file.`); + return true; + } + + // If Content Addressable Storage is not reachable, mark the transaction for retry later. + if (batchFileFetchResult.code === FetchResultCode.CasNotReachable) { + console.info(`CAS not reachable for batch file '${anchorFile.batchFileHash}', will try again later.`); + return false; + } + + // If file cannot be found, mark it for retry later. + if (batchFileFetchResult.code === FetchResultCode.NotFound) { + console.info(`Batch file '${anchorFile.batchFileHash}' not found, will try again later.`); + return false; + } + + console.info(`Batch file '${anchorFile.batchFileHash}' of size ${batchFileFetchResult.content!.length} downloaded.`); + + let operations: NamedAnchoredOperationModel[]; + try { + operations = await BatchFile.parseAndValidate(batchFileFetchResult.content!, anchorFile, transaction.transactionNumber, transaction.transactionTime); + } catch (error) { + console.info(error); + console.info(`Batch file '${anchorFile.batchFileHash}' failed parsing/validation, transaction '${transaction.transactionNumber}' ignored.`); + return true; + } + + // If the code reaches here, it means that the batch of operations is valid, process the operations. + const endTimer = timeSpan(); + await this.operationStore.put(operations); + console.info(`Processed batch '${anchorFile.batchFileHash}' of ${operations.length} operations. Time taken: ${endTimer.rounded()} ms.`); + + return true; + } +} diff --git a/lib/core/interfaces/OperationQueue.ts b/lib/core/versions/latest/interfaces/IOperationQueue.ts similarity index 95% rename from lib/core/interfaces/OperationQueue.ts rename to lib/core/versions/latest/interfaces/IOperationQueue.ts index e62df3485..0e04b88a3 100644 --- a/lib/core/interfaces/OperationQueue.ts +++ b/lib/core/versions/latest/interfaces/IOperationQueue.ts @@ -1,7 +1,7 @@ /** * An abstraction of a queue of operations used by the Batch Writer. */ -export default interface OperationQueue { +export default interface IOperationQueue { /** * Places an operation at the tail of the queue. diff --git a/lib/core/versions/latest/models/AnchorFileModel.ts b/lib/core/versions/latest/models/AnchorFileModel.ts new file mode 100644 index 000000000..80c9b5722 --- /dev/null +++ b/lib/core/versions/latest/models/AnchorFileModel.ts @@ -0,0 +1,8 @@ +/** + * Defines Anchor File structure. + */ +export default interface AnchorFileModel { + batchFileHash: string; + merkleRoot: string; + didUniqueSuffixes: string[]; +} diff --git a/lib/core/versions/latest/models/BatchFileModel.ts b/lib/core/versions/latest/models/BatchFileModel.ts new file mode 100644 index 000000000..7fdcd5c56 --- /dev/null +++ b/lib/core/versions/latest/models/BatchFileModel.ts @@ -0,0 +1,6 @@ +/** + * Defines Batch File structure. + */ +export default interface BatchFileModel { + operations: string[]; +} diff --git a/lib/core/interfaces/IDidPublicKey.ts b/lib/core/versions/latest/models/DidPublicKeyModel.ts similarity index 81% rename from lib/core/interfaces/IDidPublicKey.ts rename to lib/core/versions/latest/models/DidPublicKeyModel.ts index 8ef79396b..011652e9c 100644 --- a/lib/core/interfaces/IDidPublicKey.ts +++ b/lib/core/versions/latest/models/DidPublicKeyModel.ts @@ -1,7 +1,7 @@ /** * Interface representing a public key inside the 'publicKey' array property of a DID Document. */ -export default interface IDidPublicKey { +export default interface DidPublicKeyModel { id: string; type: string; owner?: string; diff --git a/lib/core/versions/latest/models/DocumentModel.ts b/lib/core/versions/latest/models/DocumentModel.ts new file mode 100644 index 000000000..694492dcd --- /dev/null +++ b/lib/core/versions/latest/models/DocumentModel.ts @@ -0,0 +1,21 @@ +/** + * Defines DID Document data structure used by Sidetree for basic type safety checks. + */ +export default interface DocumentModel { + '@context': string; + id: string; + publicKey: { + id: string, + type: string, + publicKeyJwk?: object + publicKeyHex?: object + }[]; + service: { + type: string, + serviceEndpoint: { + '@context': string; + '@type': string; + instance: string[] + } + }[]; +} diff --git a/lib/core/versions/latest/models/OperationModel.ts b/lib/core/versions/latest/models/OperationModel.ts new file mode 100644 index 000000000..ba7d594f5 --- /dev/null +++ b/lib/core/versions/latest/models/OperationModel.ts @@ -0,0 +1,11 @@ +/** + * Defines operation request data structure for basic type safety checks. + */ +export default interface IOperation { + header: { + operation: string, + kid: string + }; + payload: string; + signature: string; +} diff --git a/lib/core/versions/latest/models/ProtocolParameters.ts b/lib/core/versions/latest/models/ProtocolParameters.ts new file mode 100644 index 000000000..742052f3c --- /dev/null +++ b/lib/core/versions/latest/models/ProtocolParameters.ts @@ -0,0 +1,17 @@ +/** + * Defines the list of protocol parameters, intended ONLY to be used within each version of the protocol implementation. + */ +export default interface ProtocolParameters { + /** Hash algorithm in Multihash code in DEC (not in HEX). */ + hashAlgorithmInMultihashCode: number; + /** Maximum allowed size of anchor file stored in Content Addressable Storage. */ + maxAnchorFileSizeInBytes: number; + /** Maximum allowed size of batch file stored in Content Addressable Storage. */ + maxBatchFileSizeInBytes: number; + /** Maximum allowed length of any encoded hash string across all protocol versions until current point in time. */ + maxEncodedHashStringLength: number; + /** Maximum operations per batch. */ + maxOperationsPerBatch: number; + /** Maximum size of an operation in bytes. */ + maxOperationByteSize: number; +} diff --git a/lib/core/versions/latest/protocol-parameters.json b/lib/core/versions/latest/protocol-parameters.json new file mode 100644 index 000000000..9d26d23f2 --- /dev/null +++ b/lib/core/versions/latest/protocol-parameters.json @@ -0,0 +1,8 @@ +{ + "hashAlgorithmInMultihashCode": 18, + "maxAnchorFileSizeInBytes": 1000000, + "maxBatchFileSizeInBytes": 20000000, + "maxEncodedHashStringLength": 100, + "maxOperationByteSize": 10000, + "maxOperationsPerBatch": 10000 +} \ No newline at end of file diff --git a/lib/core/util/Cryptography.ts b/lib/core/versions/latest/util/Cryptography.ts similarity index 92% rename from lib/core/util/Cryptography.ts rename to lib/core/versions/latest/util/Cryptography.ts index 464ceaba8..fe2fd2a18 100644 --- a/lib/core/util/Cryptography.ts +++ b/lib/core/versions/latest/util/Cryptography.ts @@ -1,5 +1,5 @@ import * as crypto from 'crypto'; -import IDidPublicKey from '../interfaces/IDidPublicKey'; +import DidPublicKeyModel from '../models/DidPublicKeyModel'; import Encoder from '../Encoder'; import { EcPrivateKey, PrivateKey, Secp256k1CryptoSuite } from '@decentralized-identity/did-auth-jose'; const secp256k1 = require('secp256k1'); @@ -17,10 +17,10 @@ export default class Cryptography { /** * Generates a random pair of SECP256K1 public-private key-pair in JWK format. - * NOTE: The public key returned is wrapped as a IDidPublicKey for convenient usage. + * NOTE: The public key returned is wrapped as a DidPublicKeyModel for convenient usage. * @returns Public key, followed by private key. */ - public static async generateKeyPairJwk (keyId: string): Promise<[IDidPublicKey, PrivateKey]> { + public static async generateKeyPairJwk (keyId: string): Promise<[DidPublicKeyModel, PrivateKey]> { const privateKeyJwk = await EcPrivateKey.generatePrivateKey(keyId); const publicKeyJwk = privateKeyJwk.getPublicKey(); const didPublicKey = { @@ -36,7 +36,7 @@ export default class Cryptography { * Generates a random pair of SECP256K1 public-private key-pair in HEX format. * @returns Public key, followed by private key. */ - public static async generateKeyPairHex (keyId: string): Promise<[IDidPublicKey, string]> { + public static async generateKeyPairHex (keyId: string): Promise<[DidPublicKeyModel, string]> { let privateKeyBuffer; do { privateKeyBuffer = crypto.randomBytes(32); @@ -85,7 +85,7 @@ export default class Cryptography { * @param publicKey The public key to be used for verification. * @returns true if signature is successfully verified, false otherwise. */ - public static async verifySignature (content: string, encodedSignature: string, publicKey: IDidPublicKey): Promise { + public static async verifySignature (content: string, encodedSignature: string, publicKey: DidPublicKeyModel): Promise { try { if (publicKey.type !== 'Secp256k1VerificationKey2018') { return false; diff --git a/lib/core/util/JsonAsync.ts b/lib/core/versions/latest/util/JsonAsync.ts similarity index 100% rename from lib/core/util/JsonAsync.ts rename to lib/core/versions/latest/util/JsonAsync.ts diff --git a/lib/core/util/MerkleTree.ts b/lib/core/versions/latest/util/MerkleTree.ts similarity index 100% rename from lib/core/util/MerkleTree.ts rename to lib/core/versions/latest/util/MerkleTree.ts diff --git a/lib/core/util/SortedArray.ts b/lib/core/versions/latest/util/SortedArray.ts similarity index 100% rename from lib/core/util/SortedArray.ts rename to lib/core/versions/latest/util/SortedArray.ts diff --git a/lib/index.ts b/lib/index.ts index 60fac0191..0e25455d6 100644 --- a/lib/index.ts +++ b/lib/index.ts @@ -2,19 +2,17 @@ // Core service exports. import SidetreeCore from './core/Core'; -import ISidetreeConfig from './core/interfaces/IConfig'; -import { IProtocolParameters as ISidetreeProtocolParameters } from './core/ProtocolParameters'; +import SidetreeConfig from './core/models/Config'; import { - IResponse as ISidetreeResponse, + ResponseModel as SidetreeResponseModel, Response as SidetreeResponse } from './common/Response'; export { - ISidetreeConfig, - ISidetreeProtocolParameters, - ISidetreeResponse, + SidetreeConfig, SidetreeCore, - SidetreeResponse + SidetreeResponse, + SidetreeResponseModel }; // Blockchain service exports. diff --git a/lib/ipfs/IpfsStorage.ts b/lib/ipfs/IpfsStorage.ts index a322b2772..f3767ba38 100644 --- a/lib/ipfs/IpfsStorage.ts +++ b/lib/ipfs/IpfsStorage.ts @@ -1,5 +1,5 @@ import * as IPFS from 'ipfs'; -import IFetchResult from '../common/IFetchResult'; +import FetchResult from '../common/models/FetchResult'; import { FetchResultCode } from '../common/FetchResultCode'; /** @@ -41,7 +41,7 @@ export default class IpfsStorage { * The result `code` is set to `FetchResultCode.MaxSizeExceeded` if the content exceeds the specified max size. * The result `code` is set to `FetchResultCode.NotAFile` if the content being downloaded is not a file (e.g. a directory). */ - public async read (hash: string, maxSizeInBytes: number): Promise { + public async read (hash: string, maxSizeInBytes: number): Promise { // If we hit error attempting to fetch the content metadata, return not-found. let contentMetadata = undefined; try { @@ -82,11 +82,11 @@ export default class IpfsStorage { * Fetch the content from IPFS. * This method also allows easy mocking in tests. */ - private async fetchContent (hash: string, maxSizeInBytes: number): Promise { + private async fetchContent (hash: string, maxSizeInBytes: number): Promise { // files.getReadableStream() fetches the content from network if not available in local repo and stores in cache which will be garbage collectable. const readableStream = await (this.node as any).getReadableStream(hash); - let fetchResult: IFetchResult = { code: FetchResultCode.Success }; + let fetchResult: FetchResult = { code: FetchResultCode.Success }; let bufferChunks: Buffer[] = []; let currentContentSize = 0; let resolveFunction: any; diff --git a/lib/ipfs/RequestHandler.ts b/lib/ipfs/RequestHandler.ts index 6ff1792d4..a85724fd2 100644 --- a/lib/ipfs/RequestHandler.ts +++ b/lib/ipfs/RequestHandler.ts @@ -1,7 +1,7 @@ import base64url from 'base64url'; import IpfsStorage from './IpfsStorage'; import { FetchResultCode } from '../common/FetchResultCode'; -import { IResponse, ResponseStatus } from '../common/Response'; +import { ResponseModel, ResponseStatus } from '../common/Response'; import { Timeout } from './Util/Timeout'; const multihashes = require('multihashes'); @@ -27,7 +27,7 @@ export default class RequestHandler { * Handles read request * @param base64urlEncodedMultihash Content Identifier Hash. */ - public async handleFetchRequest (base64urlEncodedMultihash: string, maxSizeInBytes?: number): Promise { + public async handleFetchRequest (base64urlEncodedMultihash: string, maxSizeInBytes?: number): Promise { console.log(`Handling fetch request for '${base64urlEncodedMultihash}'...`); if (maxSizeInBytes === undefined) { @@ -92,10 +92,10 @@ export default class RequestHandler { * Handles sidetree content write request * @param content Sidetree content to write into CAS storage */ - public async handleWriteRequest (content: Buffer): Promise { + public async handleWriteRequest (content: Buffer): Promise { console.log(`Writing content of ${content.length} bytes...`); - let response: IResponse; + let response: ResponseModel; let base64urlEncodedMultihash; try { const base58EncodedMultihashString = await this.ipfsStorage.write(content); diff --git a/package.json b/package.json index ad895c1bd..14cb79434 100644 --- a/package.json +++ b/package.json @@ -53,7 +53,7 @@ }, "scripts": { "precommit": "npm run lint", - "build": "tsc && copyfiles \"src/**/*.json\" dist && copyfiles \"tests/**/*.js*\" dist", + "build": "tsc && copyfiles \"src/**/*.json\" dist && copyfiles \"lib/**/*.json\" dist && copyfiles \"tests/**/*.js*\" dist", "test": "npm run build && nyc jasmine --config=./tests/jasmine.json", "lint": "tslint --fix --project .", "start": "node dist/src/core.js", @@ -71,6 +71,7 @@ ], "exclude": [ "dist/lib/**/interfaces/**", + "dist/lib/**/models/**", "dist/lib/**/I[A-Z]**" ], "reporter": [ diff --git a/src/core-protocol-parameters.json b/src/core-protocol-parameters.json deleted file mode 100644 index 7b6e651f5..000000000 --- a/src/core-protocol-parameters.json +++ /dev/null @@ -1,11 +0,0 @@ -[ - { - "version": "1.0", - "startingBlockchainTime": 1500000, - "hashAlgorithmInMultihashCode": 18, - "maxAnchorFileSizeInBytes": 1000000, - "maxBatchFileSizeInBytes": 20000000, - "maxOperationByteSize": 2000, - "maxOperationsPerBatch": 10000 - } -] \ No newline at end of file diff --git a/src/core-protocol-versioning.json b/src/core-protocol-versioning.json new file mode 100644 index 000000000..997549fc0 --- /dev/null +++ b/src/core-protocol-versioning.json @@ -0,0 +1,6 @@ +[ + { + "startingBlockchainTime": 1500000, + "version": "latest" + } +] \ No newline at end of file diff --git a/src/core.ts b/src/core.ts index 7a29d331a..4bb6f7b5d 100644 --- a/src/core.ts +++ b/src/core.ts @@ -6,19 +6,19 @@ import * as getRawBody from 'raw-body'; import * as Koa from 'koa'; import * as Router from 'koa-router'; import Core from '../lib/core/Core'; -import IConfig from '../lib/core/interfaces/IConfig'; -import { IProtocolParameters } from '../lib/core/ProtocolParameters'; -import { IResponse, Response } from '../lib/common/Response'; +import Config from '../lib/core/models/Config'; +import { IProtocolVersion } from '../lib/core/VersionManager'; +import { Response, ResponseModel } from '../lib/common/Response'; /** Configuration used by this server. */ -interface IServerConfig extends IConfig { +interface IServerConfig extends Config { port: number; } const config: IServerConfig = require('./core-config.json'); -const versionsOfProtocolParameters: IProtocolParameters[] = require('./core-protocol-parameters.json'); +const protocolVersions: IProtocolVersion[] = require('./core-protocol-versioning.json'); -const sidetreeCore = new Core(config, versionsOfProtocolParameters); +const sidetreeCore = new Core(config, protocolVersions); const app = new Koa(); // Raw body parser. @@ -29,12 +29,12 @@ app.use(async (ctx, next) => { const router = new Router(); router.post('/', async (ctx, _next) => { - const response = await sidetreeCore.requestHandler.handleOperationRequest(ctx.body); + const response = await sidetreeCore.handleOperationRequest(ctx.body); setKoaResponse(response, ctx.response); }); router.get('/:didOrDidDocument', async (ctx, _next) => { - const response = await sidetreeCore.requestHandler.handleResolveRequest(ctx.params.didOrDidDocument); + const response = await sidetreeCore.handleResolveRequest(ctx.params.didOrDidDocument); setKoaResponse(response, ctx.response); }); @@ -60,7 +60,7 @@ sidetreeCore.initialize() /** * Sets the koa response according to the Sidetree response object given. */ -const setKoaResponse = (response: IResponse, koaResponse: Koa.Response) => { +const setKoaResponse = (response: ResponseModel, koaResponse: Koa.Response) => { koaResponse.status = Response.toHttpStatus(response.status); if (response.body) { diff --git a/src/ipfs.ts b/src/ipfs.ts index 28a663121..2bf8ac123 100644 --- a/src/ipfs.ts +++ b/src/ipfs.ts @@ -3,9 +3,9 @@ import * as Koa from 'koa'; import * as Router from 'koa-router'; import { - ISidetreeResponse, SidetreeIpfsService, - SidetreeResponse + SidetreeResponse, + SidetreeResponseModel } from '../lib/index'; const config: { @@ -76,7 +76,7 @@ process.on('uncaughtException', () => { * @param koaResponse Koa Response object to be filled * @param contentType Content type to be set for response, defaults to application/json */ -const setKoaResponse = (response: ISidetreeResponse, koaResponse: Koa.Response, contentType?: string) => { +const setKoaResponse = (response: SidetreeResponseModel, koaResponse: Koa.Response, contentType?: string) => { koaResponse.status = SidetreeResponse.toHttpStatus(response.status); if (contentType) { koaResponse.set('Content-Type', contentType); diff --git a/tests/bitcoin/BitcoinProcessor.spec.ts b/tests/bitcoin/BitcoinProcessor.spec.ts index bdb37e512..89f2465c8 100644 --- a/tests/bitcoin/BitcoinProcessor.spec.ts +++ b/tests/bitcoin/BitcoinProcessor.spec.ts @@ -1,7 +1,7 @@ import BitcoinProcessor, { IBlockInfo } from '../../lib/bitcoin/BitcoinProcessor'; -import ErrorCode from '../../lib/common/ErrorCode'; -import ITransaction from '../../lib/common/ITransaction'; +import ErrorCode from '../../lib/common/SharedErrorCode'; import ReadableStream from '../../lib/common/ReadableStream'; +import TransactionModel from '../../lib/common/models/TransactionModel'; import TransactionNumber from '../../lib/bitcoin/TransactionNumber'; import { IBitcoinConfig } from '../../lib/bitcoin/IBitcoinConfig'; import { PrivateKey, Transaction } from 'bitcore-lib'; @@ -75,8 +75,8 @@ describe('BitcoinProcessor', () => { }); } - function createTransactions (count?: number, height?: number): ITransaction[] { - const transactions: ITransaction[] = []; + function createTransactions (count?: number, height?: number): TransactionModel[] { + const transactions: TransactionModel[] = []; if (!count) { count = randomNumber(9) + 1; } @@ -351,7 +351,7 @@ describe('BitcoinProcessor', () => { describe('firstValidTransaction', () => { it('should return the first of the valid transactions', async (done) => { - const transactions: ITransaction[] = []; + const transactions: TransactionModel[] = []; let heights: number[] = []; const count = 10; for (let i = 0; i < count; i++) { @@ -735,7 +735,7 @@ describe('BitcoinProcessor', () => { 'getTransactionsCount').and.returnValue(Promise.resolve(transactions.length)); const exponentialTransactions = spyOn(bitcoinProcessor['transactionStore'], 'getExponentiallySpacedTransactions').and.returnValue(Promise.resolve(transactions)); - const firstValid = spyOn(bitcoinProcessor, 'firstValidTransaction').and.callFake((actualTransactions: ITransaction[]) => { + const firstValid = spyOn(bitcoinProcessor, 'firstValidTransaction').and.callFake((actualTransactions: TransactionModel[]) => { expect(actualTransactions).toEqual(transactions); return Promise.resolve(transactions[1]); }); @@ -757,7 +757,7 @@ describe('BitcoinProcessor', () => { const exponentialTransactions = spyOn(bitcoinProcessor['transactionStore'], 'getExponentiallySpacedTransactions').and.returnValue(Promise.resolve(transactions)); let validHasBeenCalledOnce = false; - const firstValid = spyOn(bitcoinProcessor, 'firstValidTransaction').and.callFake((actualTransactions: ITransaction[]) => { + const firstValid = spyOn(bitcoinProcessor, 'firstValidTransaction').and.callFake((actualTransactions: TransactionModel[]) => { expect(actualTransactions).toEqual(transactions); if (validHasBeenCalledOnce) { return Promise.resolve(transactions[0]); @@ -900,7 +900,7 @@ describe('BitcoinProcessor', () => { const rpcMock = mockRpcCall('getblock', [blockHash, 2], blockData); let seenTransactionNumbers: number[] = []; const addTransaction = spyOn(bitcoinProcessor['transactionStore'], - 'addTransaction').and.callFake((sidetreeTransaction: ITransaction) => { + 'addTransaction').and.callFake((sidetreeTransaction: TransactionModel) => { expect(sidetreeTransaction.transactionTime).toEqual(block); expect(sidetreeTransaction.transactionTimeHash).toEqual(blockData.hash); expect(shouldFindIDs.includes(sidetreeTransaction.anchorFileHash)).toBeTruthy(); @@ -933,7 +933,7 @@ describe('BitcoinProcessor', () => { const rpcMock = mockRpcCall('getblock', [blockHash, 2], blockData); let seenTransactionNumbers: number[] = []; const addTransaction = spyOn(bitcoinProcessor['transactionStore'], - 'addTransaction').and.callFake((sidetreeTransaction: ITransaction) => { + 'addTransaction').and.callFake((sidetreeTransaction: TransactionModel) => { expect(sidetreeTransaction.transactionTime).toEqual(block); expect(sidetreeTransaction.transactionTimeHash).toEqual(blockData.hash); expect(shouldFindIDs.includes(sidetreeTransaction.anchorFileHash)).toBeTruthy(); diff --git a/tests/core/AnchorFile.spec.ts b/tests/core/AnchorFile.spec.ts index 570a993af..764b779d1 100644 --- a/tests/core/AnchorFile.spec.ts +++ b/tests/core/AnchorFile.spec.ts @@ -1,13 +1,12 @@ -import AnchorFile from '../../lib/core/AnchorFile'; -import ErrorCode from '../../lib/common/ErrorCode'; +import AnchorFile from '../../lib/core/versions/latest/AnchorFile'; +import ErrorCode from '../../lib/core/versions/latest/ErrorCode'; import { SidetreeError } from '../../lib/core/Error'; describe('AnchorFile', async () => { describe('parseAndValidate()', async () => { - const hashAlgorithmInMultihashCode = 18; - it('should throw if buffer given is not valid JSON.', async () => { - const parseAndValidate = () => AnchorFile.parseAndValidate(Buffer.from('NotJsonString'), 1, hashAlgorithmInMultihashCode); + const parseAndValidate = + () => AnchorFile.parseAndValidate(Buffer.from('NotJsonString'), 1); expect(parseAndValidate).toThrow(new SidetreeError(ErrorCode.AnchorFileNotJson)); }); @@ -18,7 +17,8 @@ describe('AnchorFile', async () => { didUniqueSuffixes: ['EiA-GtHEOH9IcEEoBQ9p1KCMIjTmTO8x2qXJPb20ry6C0A', 'EiA4zvhtvzTdeLAg8_Pvdtk5xJreNuIpvSpCCbtiTVc8Ow'], merkleRoot: 'EiB4ypIXxG9aFhXv2YC8I2tQvLEBbQAsNzHmph17vMfVYA' }; - const parseAndValidate = () => AnchorFile.parseAndValidate(Buffer.from(JSON.stringify(anchoreFile)), 1, hashAlgorithmInMultihashCode); + const anchorFileBuffer = Buffer.from(JSON.stringify(anchoreFile)); + const parseAndValidate = () => AnchorFile.parseAndValidate(anchorFileBuffer, 1); expect(parseAndValidate).toThrow(new SidetreeError(ErrorCode.AnchorFileHasUnknownProperty)); }); @@ -28,7 +28,8 @@ describe('AnchorFile', async () => { didUniqueSuffixes: ['EiA-GtHEOH9IcEEoBQ9p1KCMIjTmTO8x2qXJPb20ry6C0A', 'EiA4zvhtvzTdeLAg8_Pvdtk5xJreNuIpvSpCCbtiTVc8Ow'], merkleRoot: 'EiB4ypIXxG9aFhXv2YC8I2tQvLEBbQAsNzHmph17vMfVYA' }; - const parseAndValidate = () => AnchorFile.parseAndValidate(Buffer.from(JSON.stringify(anchoreFile)), 1, hashAlgorithmInMultihashCode); + const anchorFileBuffer = Buffer.from(JSON.stringify(anchoreFile)); + const parseAndValidate = () => AnchorFile.parseAndValidate(anchorFileBuffer, 1); expect(parseAndValidate).toThrow(new SidetreeError(ErrorCode.AnchorFileBatchFileHashMissing)); }); @@ -38,7 +39,8 @@ describe('AnchorFile', async () => { // didUniqueSuffixes: ['EiA-GtHEOH9IcEEoBQ9p1KCMIjTmTO8x2qXJPb20ry6C0A'], // Intentionally kept to show what is missing. merkleRoot: 'EiB4ypIXxG9aFhXv2YC8I2tQvLEBbQAsNzHmph17vMfVYA' }; - const parseAndValidate = () => AnchorFile.parseAndValidate(Buffer.from(JSON.stringify(anchoreFile)), 1, hashAlgorithmInMultihashCode); + const anchorFileBuffer = Buffer.from(JSON.stringify(anchoreFile)); + const parseAndValidate = () => AnchorFile.parseAndValidate(anchorFileBuffer, 1); expect(parseAndValidate).toThrow(new SidetreeError(ErrorCode.AnchorFileDidUniqueSuffixesMissing)); }); @@ -48,7 +50,8 @@ describe('AnchorFile', async () => { didUniqueSuffixes: ['EiA-GtHEOH9IcEEoBQ9p1KCMIjTmTO8x2qXJPb20ry6C0A', 'EiA4zvhtvzTdeLAg8_Pvdtk5xJreNuIpvSpCCbtiTVc8Ow'] // merkleRoot: 'EiB4ypIXxG9aFhXv2YC8I2tQvLEBbQAsNzHmph17vMfVYA' // Intentionally kept to show what is missing. }; - const parseAndValidate = () => AnchorFile.parseAndValidate(Buffer.from(JSON.stringify(anchoreFile)), 1, hashAlgorithmInMultihashCode); + const anchorFileBuffer = Buffer.from(JSON.stringify(anchoreFile)); + const parseAndValidate = () => AnchorFile.parseAndValidate(anchorFileBuffer, 1); expect(parseAndValidate).toThrow(new SidetreeError(ErrorCode.AnchorFileMerkleRootMissing)); }); @@ -58,7 +61,8 @@ describe('AnchorFile', async () => { didUniqueSuffixes: ['EiA-GtHEOH9IcEEoBQ9p1KCMIjTmTO8x2qXJPb20ry6C0A', 'EiA4zvhtvzTdeLAg8_Pvdtk5xJreNuIpvSpCCbtiTVc8Ow'], merkleRoot: 'EiB4ypIXxG9aFhXv2YC8I2tQvLEBbQAsNzHmph17vMfVYA' }; - const parseAndValidate = () => AnchorFile.parseAndValidate(Buffer.from(JSON.stringify(anchoreFile)), 1, hashAlgorithmInMultihashCode); + const anchorFileBuffer = Buffer.from(JSON.stringify(anchoreFile)); + const parseAndValidate = () => AnchorFile.parseAndValidate(anchorFileBuffer, 1); expect(parseAndValidate).toThrow(new SidetreeError(ErrorCode.AnchorFileBatchFileHashNotString)); }); @@ -68,9 +72,9 @@ describe('AnchorFile', async () => { didUniqueSuffixes: ['EiA-GtHEOH9IcEEoBQ9p1KCMIjTmTO8x2qXJPb20ry6C0A', 'EiA4zvhtvzTdeLAg8_Pvdtk5xJreNuIpvSpCCbtiTVc8Ow'], merkleRoot: 'EiB4ypIXxG9aFhXv2YC8I2tQvLEBbQAsNzHmph17vMfVYA' }; - const hashAlgorithmInMultihashCode = 18; try { - AnchorFile.parseAndValidate(Buffer.from(JSON.stringify(anchoreFile)), 1, hashAlgorithmInMultihashCode); + const anchorFileBuffer = Buffer.from(JSON.stringify(anchoreFile)); + AnchorFile.parseAndValidate(anchorFileBuffer, 1); } catch (error) { expect(error.code).toEqual(ErrorCode.AnchorFileBatchFileHashUnsupported); } @@ -82,7 +86,8 @@ describe('AnchorFile', async () => { didUniqueSuffixes: ['EiA-GtHEOH9IcEEoBQ9p1KCMIjTmTO8x2qXJPb20ry6C0A', 'EiA4zvhtvzTdeLAg8_Pvdtk5xJreNuIpvSpCCbtiTVc8Ow'], merkleRoot: 12345 }; - const parseAndValidate = () => AnchorFile.parseAndValidate(Buffer.from(JSON.stringify(anchoreFile)), 1, hashAlgorithmInMultihashCode); + const anchorFileBuffer = Buffer.from(JSON.stringify(anchoreFile)); + const parseAndValidate = () => AnchorFile.parseAndValidate(anchorFileBuffer, 1); expect(parseAndValidate).toThrow(new SidetreeError(ErrorCode.AnchorFileMerkleRootNotString)); }); @@ -93,7 +98,8 @@ describe('AnchorFile', async () => { merkleRoot: 'InvalidHash' }; try { - AnchorFile.parseAndValidate(Buffer.from(JSON.stringify(anchoreFile)), 1, hashAlgorithmInMultihashCode); + const anchorFileBuffer = Buffer.from(JSON.stringify(anchoreFile)); + AnchorFile.parseAndValidate(anchorFileBuffer, 1); } catch (error) { expect(error.code).toEqual(ErrorCode.AnchorFileMerkleRootUnsupported); } @@ -105,7 +111,8 @@ describe('AnchorFile', async () => { didUniqueSuffixes: 'IncorrectType', merkleRoot: 'EiB4ypIXxG9aFhXv2YC8I2tQvLEBbQAsNzHmph17vMfVYA' }; - const parseAndValidate = () => AnchorFile.parseAndValidate(Buffer.from(JSON.stringify(anchoreFile)), 1, hashAlgorithmInMultihashCode); + const anchorFileBuffer = Buffer.from(JSON.stringify(anchoreFile)); + const parseAndValidate = () => AnchorFile.parseAndValidate(anchorFileBuffer, 1); expect(parseAndValidate).toThrow(new SidetreeError(ErrorCode.AnchorFileDidUniqueSuffixesNotArray)); }); @@ -115,7 +122,8 @@ describe('AnchorFile', async () => { didUniqueSuffixes: ['EiA-GtHEOH9IcEEoBQ9p1KCMIjTmTO8x2qXJPb20ry6C0A', 'EiA4zvhtvzTdeLAg8_Pvdtk5xJreNuIpvSpCCbtiTVc8Ow'], merkleRoot: 'EiB4ypIXxG9aFhXv2YC8I2tQvLEBbQAsNzHmph17vMfVYA' }; - const parseAndValidate = () => AnchorFile.parseAndValidate(Buffer.from(JSON.stringify(anchoreFile)), 1, hashAlgorithmInMultihashCode); + const anchorFileBuffer = Buffer.from(JSON.stringify(anchoreFile)); + const parseAndValidate = () => AnchorFile.parseAndValidate(anchorFileBuffer, 1); expect(parseAndValidate).toThrow(new SidetreeError(ErrorCode.AnchorFileExceededMaxOperationCount)); }); @@ -125,7 +133,8 @@ describe('AnchorFile', async () => { didUniqueSuffixes: ['EiA-GtHEOH9IcEEoBQ9p1KCMIjTmTO8x2qXJPb20ry6C0A', 'EiA-GtHEOH9IcEEoBQ9p1KCMIjTmTO8x2qXJPb20ry6C0A'], merkleRoot: 'EiB4ypIXxG9aFhXv2YC8I2tQvLEBbQAsNzHmph17vMfVYA' }; - const parseAndValidate = () => AnchorFile.parseAndValidate(Buffer.from(JSON.stringify(anchoreFile)), 2, hashAlgorithmInMultihashCode); + const anchorFileBuffer = Buffer.from(JSON.stringify(anchoreFile)); + const parseAndValidate = () => AnchorFile.parseAndValidate(anchorFileBuffer, 2); expect(parseAndValidate).toThrow(new SidetreeError(ErrorCode.AnchorFileDidUniqueSuffixesHasDuplicates)); }); @@ -135,20 +144,22 @@ describe('AnchorFile', async () => { didUniqueSuffixes: ['EiA-GtHEOH9IcEEoBQ9p1KCMIjTmTO8x2qXJPb20ry6C0A', 12345], merkleRoot: 'EiB4ypIXxG9aFhXv2YC8I2tQvLEBbQAsNzHmph17vMfVYA' }; - const parseAndValidate = () => AnchorFile.parseAndValidate(Buffer.from(JSON.stringify(anchoreFile)), 2, hashAlgorithmInMultihashCode); + const anchorFileBuffer = Buffer.from(JSON.stringify(anchoreFile)); + const parseAndValidate = () => AnchorFile.parseAndValidate(anchorFileBuffer, 2); expect(parseAndValidate).toThrow(new SidetreeError(ErrorCode.AnchorFileDidUniqueSuffixEntryNotString)); }); it('should throw if a DID unique suffix is invalid.', async () => { const anchoreFile = { batchFileHash: 'EiB4ypIXxG9aFhXv2YC8I2tQvLEBbQAsNzHmph17vMfVYA', - didUniqueSuffixes: ['InvalidUniqueSuffix'], + didUniqueSuffixes: ['SuperLongDidUniqueSuffixSuperLongDidUniqueSuffixSuperLongDidUniqueSuffix'], merkleRoot: 'EiB4ypIXxG9aFhXv2YC8I2tQvLEBbQAsNzHmph17vMfVYA' }; try { - AnchorFile.parseAndValidate(Buffer.from(JSON.stringify(anchoreFile)), 1, hashAlgorithmInMultihashCode); + const anchorFileBuffer = Buffer.from(JSON.stringify(anchoreFile)); + AnchorFile.parseAndValidate(anchorFileBuffer, 1); } catch (error) { - expect(error.code).toEqual(ErrorCode.AnchorFileDidUniqueSuffixEntryInvalid); + expect(error.code).toEqual(ErrorCode.AnchorFileDidUniqueSuffixTooLong); } }); }); diff --git a/tests/core/Cas.spec.ts b/tests/core/Cas.spec.ts index c775f0bbf..95fb79044 100644 --- a/tests/core/Cas.spec.ts +++ b/tests/core/Cas.spec.ts @@ -1,10 +1,10 @@ +import Cas from '../../lib/core/Cas'; import ReadableStream from '../../lib/common/ReadableStream'; -import { CasClient } from '../../lib/core/Cas'; import { FetchResultCode } from '../../lib/common/FetchResultCode'; describe('Cas', async () => { it('should return file hash of the content written.', async () => { - const casClient = new CasClient('unused'); + const casClient = new Cas('unused'); const fetchSpy = spyOn(casClient as any, 'fetch').and.returnValue(Promise.resolve({ status: 200, body: 'unused' })); const readStreamSpy = spyOn(ReadableStream, 'readAll').and.returnValue(Promise.resolve('{"hash":"abc"}')); const hash = await casClient.write(Buffer.from('unused')); @@ -15,7 +15,7 @@ describe('Cas', async () => { }); it('should throw if content writing returned with an error.', async () => { - const casClient = new CasClient('unused'); + const casClient = new Cas('unused'); spyOn(casClient as any, 'fetch').and.returnValue(Promise.resolve({ status: 500, body: 'unused' })); spyOn(ReadableStream, 'readAll').and.returnValue(Promise.resolve('abc')); @@ -30,7 +30,7 @@ describe('Cas', async () => { }); it('should set fetch result as not-found when fetch result in an unexpected error.', async () => { - const casClient = new CasClient('unused'); + const casClient = new Cas('unused'); const fetchSpy = spyOn(casClient as any, 'fetch').and.returnValue(Promise.resolve({ status: 200, body: 'unused' })); const readStreamSpy = spyOn(ReadableStream, 'readAll').and.returnValue(Promise.resolve('abc')); const fetchResult = await casClient.read('anyAddress', 1); @@ -42,7 +42,7 @@ describe('Cas', async () => { }); it('should set fetch result as not-found when fetch result in an unexpected error.', async () => { - const casClient = new CasClient('unused'); + const casClient = new Cas('unused'); const fetchSpy = spyOn(casClient as any, 'fetch').and.returnValue(Promise.resolve({ status: 500, body: 'unused' })); const readStreamSpy = spyOn(ReadableStream, 'readAll').and.returnValue(Promise.resolve(JSON.stringify({ code: 'unused' @@ -55,7 +55,7 @@ describe('Cas', async () => { }); it('should set fetch result correctly when fetch responds with a not-found.', async () => { - const casClient = new CasClient('unused'); + const casClient = new Cas('unused'); const fetchSpy = spyOn(casClient as any, 'fetch').and.returnValue(Promise.resolve({ status: 404 })); const fetchResult = await casClient.read('anyAddress', 1); @@ -65,7 +65,7 @@ describe('Cas', async () => { }); it('should set fetch result correctly when fetch responds with a bad-request.', async () => { - const casClient = new CasClient('unused'); + const casClient = new Cas('unused'); const fetchSpy = spyOn(casClient as any, 'fetch').and.returnValue(Promise.resolve({ status: 400 })); const readStreamSpy = spyOn(ReadableStream, 'readAll').and.returnValue(Promise.resolve(JSON.stringify({ code: FetchResultCode.InvalidHash diff --git a/tests/core/Did.spec.ts b/tests/core/Did.spec.ts index d9229ac39..e4065f5af 100644 --- a/tests/core/Did.spec.ts +++ b/tests/core/Did.spec.ts @@ -1,4 +1,4 @@ -import Did from '../../lib/core/Did'; +import Did from '../../lib/core/versions/latest/Did'; describe('DID', async () => { it('isDid() should return false if DID method is not in the DID.', async () => { diff --git a/tests/core/DownloadManager.spec.ts b/tests/core/DownloadManager.spec.ts index 0b9e14329..1c1097218 100644 --- a/tests/core/DownloadManager.spec.ts +++ b/tests/core/DownloadManager.spec.ts @@ -1,13 +1,13 @@ +import ICas from '../../lib/core/interfaces/ICas'; import DownloadManager from '../../lib/core/DownloadManager'; import MockCas from '../mocks/MockCas'; import timeSpan = require('time-span'); -import { Cas } from '../../lib/core/Cas'; describe('DownloadManager', async () => { const maxConcurrentDownloads = 3; const mockSecondsTakenForEachCasFetch = 2; - let cas: Cas; + let cas: ICas; let downloadManager: DownloadManager; const originalDefaultTestTimeout = jasmine.DEFAULT_TIMEOUT_INTERVAL; diff --git a/tests/core/MongoDbOperationQueue.spec.ts b/tests/core/MongoDbOperationQueue.spec.ts index 3836767d3..3a981074c 100644 --- a/tests/core/MongoDbOperationQueue.spec.ts +++ b/tests/core/MongoDbOperationQueue.spec.ts @@ -1,8 +1,8 @@ -import ErrorCode from '../../lib/common/ErrorCode'; -import IConfig from '../../lib/core/interfaces/IConfig'; +import Config from '../../lib/core/models/Config'; +import ErrorCode from '../../lib/core/versions/latest/ErrorCode'; +import IOperationQueue from '../../lib/core/versions/latest/interfaces/IOperationQueue'; import MongoDb from '../common/MongoDb'; -import MongoDbOperationQueue from '../../lib/core/MongoDbOperationQueue'; -import OperationQueue from '../../lib/core/interfaces/OperationQueue'; +import MongoDbOperationQueue from '../../lib/core/versions/latest/MongoDbOperationQueue'; import { SidetreeError } from '../../lib/core/Error'; /** @@ -18,7 +18,7 @@ async function createOperationQueue (transactionStoreUri: string, databaseName: * Generates the given count of operations and queues them in the given operation queue. * e.g. The DID unique suffix will start from '1', '2', '3'... and buffer will be generated from the DID unique suffix. */ -async function generateAndQueueOperations (operationQueue: OperationQueue, count: number): Promise<{ didUniqueSuffix: string, operationBuffer: Buffer }[]> { +async function generateAndQueueOperations (operationQueue: IOperationQueue, count: number): Promise<{ didUniqueSuffix: string, operationBuffer: Buffer }[]> { const operations: { didUniqueSuffix: string, operationBuffer: Buffer }[] = []; for (let i = 1; i <= count; i++) { const didUniqueSuffix = i.toString(); @@ -32,7 +32,7 @@ async function generateAndQueueOperations (operationQueue: OperationQueue, count } describe('MongoDbOperationQueue', async () => { - const config: IConfig = require('../json/config-test.json'); + const config: Config = require('../json/config-test.json'); const databaseName = 'sidetree-test'; let mongoServiceAvailable = false; diff --git a/tests/core/MongoDbOperationStore.spec.ts b/tests/core/MongoDbOperationStore.spec.ts index 16654459a..1c6e74fd1 100644 --- a/tests/core/MongoDbOperationStore.spec.ts +++ b/tests/core/MongoDbOperationStore.spec.ts @@ -1,30 +1,29 @@ -import Cryptography from '../../lib/core/util/Cryptography'; +import AnchoredOperation from '../../lib/core/versions/latest/AnchoredOperation'; +import AnchoredOperationModel from '../../lib/core/models/AnchoredOperationModel'; +import Cryptography from '../../lib/core/versions/latest/util/Cryptography'; import MongoDb from '../common/MongoDb'; import MongoDbOperationStore from '../../lib/core/MongoDbOperationStore'; import OperationGenerator from '../generators/OperationGenerator'; -import OperationStore from '../../lib/core/interfaces/OperationStore'; -import ProtocolParameters from '../../lib/core/ProtocolParameters'; -import { Operation } from '../../lib/core/Operation'; +import IOperationStore from '../../lib/core/interfaces/IOperationStore'; import { DidPublicKey } from '@decentralized-identity/did-common-typescript'; /** * Construct an operation given the payload, transactionNumber, transactionTime, and operationIndex */ function constructAnchoredOperation ( - opBuf: Buffer, + operationBuffer: Buffer, transactionNumber: number, transactionTime: number, - operationIndex: number): Operation { + operationIndex: number): AnchoredOperation { - const resolvedTransaction = { + const anchoredOperationModel: AnchoredOperationModel = { transactionNumber, transactionTime, - transactionTimeHash: 'unused', - anchorFileHash: 'unused', - batchFileHash: 'unused' + operationIndex, + operationBuffer }; - return Operation.createAnchoredOperation(opBuf, resolvedTransaction, operationIndex); + return AnchoredOperation.createAnchoredOperation(anchoredOperationModel); } /** @@ -35,7 +34,7 @@ async function constructAnchoredCreateOperation ( privateKey: string, transactionNumber: number, transactionTime: number, - operationIndex: number): Promise { + operationIndex: number): Promise { const didDocumentTemplate = require('../json/didDocumentTemplate.json'); const operationBuffer = await OperationGenerator.generateCreateOperationBuffer(didDocumentTemplate, publicKey, privateKey); const operation = constructAnchoredOperation(operationBuffer, transactionNumber, transactionTime, operationIndex); @@ -52,7 +51,7 @@ async function constructAnchoredUpdateOperation ( transactionNumber: number, transactionTime: number, operationIndex: number -): Promise { +): Promise { const updatePayload = { didUniqueSuffix, @@ -78,7 +77,7 @@ async function constructAnchoredUpdateOperation ( const databaseName = 'sidetree-test'; const operationCollectionName = 'operations-test'; -async function createOperationStore (mongoDbConnectionString: string): Promise { +async function createOperationStore (mongoDbConnectionString: string): Promise { const operationStore = new MongoDbOperationStore(mongoDbConnectionString, databaseName, operationCollectionName); await operationStore.initialize(); return operationStore; @@ -87,12 +86,12 @@ async function createOperationStore (mongoDbConnectionString: string): Promise { +async function createOperationChain (createOperation: AnchoredOperation, chainLength: number, privateKey: string): Promise { const didUniqueSuffix = createOperation.didUniqueSuffix; - const chain = new Array(createOperation); + const chain = new Array(createOperation); for (let i = 1; i < chainLength ; i++) { const previousOperation = chain[i - 1]; - const previousVersion = previousOperation.getOperationHash(); + const previousVersion = previousOperation.operationHash; const operation = await constructAnchoredUpdateOperation(privateKey, didUniqueSuffix, previousVersion, i, i, 0); chain.push(operation); } @@ -100,34 +99,33 @@ async function createOperationChain (createOperation: Operation, chainLength: nu } // Check if two operations are equal -function checkEqual (operation1: Operation, operation2: Operation): void { +function checkEqual (operation1: AnchoredOperation, operation2: AnchoredOperation): void { expect(operation1.transactionNumber).toBeDefined(); expect(operation2.transactionNumber).toBeDefined(); - expect(operation1.transactionNumber!).toEqual(operation2.transactionNumber!); + expect(operation1.transactionNumber).toEqual(operation2.transactionNumber); expect(operation1.operationIndex).toBeDefined(); expect(operation2.operationIndex).toBeDefined(); - expect(operation1.operationIndex!).toEqual(operation2.operationIndex!); + expect(operation1.operationIndex).toEqual(operation2.operationIndex); expect(operation1.transactionTime).toBeDefined(); expect(operation2.transactionTime).toBeDefined(); - expect(operation1.transactionTime!).toEqual(operation2.transactionTime!); + expect(operation1.transactionTime).toEqual(operation2.transactionTime); expect(operation1.didUniqueSuffix).toEqual(operation2.didUniqueSuffix); - expect(operation1.getOperationHash()).toEqual(operation2.getOperationHash()); + expect(operation1.operationHash).toEqual(operation2.operationHash); } // Check if two operation arrays are equal -function checkEqualArray (putOperations: Operation[], gotOperations: Operation[]): void { +function checkEqualArray (putOperations: AnchoredOperation[], gotOperations: AnchoredOperationModel[]): void { expect(gotOperations.length).toEqual(putOperations.length); for (let i = 0 ; i < putOperations.length ; i++) { - checkEqual(gotOperations[i], putOperations[i]); + const gotOperation = AnchoredOperation.createAnchoredOperation(gotOperations[i]); + checkEqual(gotOperation, putOperations[i]); } } describe('MongoDbOperationStore', async () => { - const versionsOfProtocolParameters = require('../json/protocol-parameters-test.json'); - ProtocolParameters.initialize(versionsOfProtocolParameters); - let operationStore: OperationStore; + let operationStore: IOperationStore; let publicKey: DidPublicKey; let privateKey: string; const config = require('../json/config-test.json'); @@ -153,7 +151,7 @@ describe('MongoDbOperationStore', async () => { it('should get a put create operation', async () => { const operation = await constructAnchoredCreateOperation(publicKey, privateKey, 0, 0, 0); await operationStore.put([operation]); - const returnedOperations = Array.from(await operationStore.get(operation.didUniqueSuffix)); + const returnedOperations = await operationStore.get(operation.didUniqueSuffix); checkEqualArray([operation], returnedOperations); }); @@ -161,10 +159,10 @@ describe('MongoDbOperationStore', async () => { // Use a create operation to generate a DID const createOperation = await constructAnchoredCreateOperation(publicKey, privateKey, 0, 0, 0); const didUniqueSuffix = createOperation.didUniqueSuffix; - const createVersion = createOperation.getOperationHash(); + const createVersion = createOperation.operationHash; const updateOperation = await constructAnchoredUpdateOperation(privateKey, didUniqueSuffix, createVersion, 1, 1, 0); await operationStore.put([updateOperation]); - const returnedOperations = Array.from(await operationStore.get(didUniqueSuffix)); + const returnedOperations = await operationStore.get(didUniqueSuffix); checkEqualArray([updateOperation], returnedOperations); }); @@ -172,12 +170,12 @@ describe('MongoDbOperationStore', async () => { // Use a create operation to generate a DID const createOperation = await constructAnchoredCreateOperation(publicKey, privateKey, 0, 0, 0); const didUniqueSuffix = createOperation.didUniqueSuffix; - const createVersion = createOperation.getOperationHash(); + const createVersion = createOperation.operationHash; const updateOperation = await constructAnchoredUpdateOperation(privateKey, didUniqueSuffix, createVersion, 1, 1, 0); await operationStore.put([updateOperation]); - // duplicate operation + // Insert duplicate operation await operationStore.put([updateOperation]); - const returnedOperations = Array.from(await operationStore.get(didUniqueSuffix)); + const returnedOperations = await operationStore.get(didUniqueSuffix); checkEqualArray([updateOperation], returnedOperations); }); @@ -190,7 +188,7 @@ describe('MongoDbOperationStore', async () => { const operationChain = await createOperationChain(createOperation, chainSize, privateKey); await operationStore.put(operationChain); - const returnedOperations = Array.from(await operationStore.get(didUniqueSuffix)); + const returnedOperations = await operationStore.get(didUniqueSuffix); checkEqualArray(operationChain, returnedOperations); }); @@ -201,11 +199,12 @@ describe('MongoDbOperationStore', async () => { const chainSize = 10; const operationChain = await createOperationChain(createOperation, chainSize, privateKey); + // construct an operation chain with duplicated operations const batchWithDuplicates = operationChain.concat(operationChain); await operationStore.put(batchWithDuplicates); - const returnedOperations = Array.from(await operationStore.get(didUniqueSuffix)); + const returnedOperations = await operationStore.get(didUniqueSuffix); checkEqualArray(operationChain, returnedOperations); }); @@ -218,11 +217,11 @@ describe('MongoDbOperationStore', async () => { const operationChain = await createOperationChain(createOperation, chainSize, privateKey); await operationStore.put(operationChain); - const returnedOperations = Array.from(await operationStore.get(didUniqueSuffix)); + const returnedOperations = await operationStore.get(didUniqueSuffix); checkEqualArray(operationChain, returnedOperations); await operationStore.delete(); - const returnedOperationsAfterRollback = Array.from(await operationStore.get(didUniqueSuffix)); + const returnedOperationsAfterRollback = await operationStore.get(didUniqueSuffix); expect(returnedOperationsAfterRollback.length).toEqual(0); }); @@ -234,12 +233,12 @@ describe('MongoDbOperationStore', async () => { const chainSize = 10; const operationChain = await createOperationChain(createOperation, chainSize, privateKey); await operationStore.put(operationChain); - const returnedOperations = Array.from(await operationStore.get(didUniqueSuffix)); + const returnedOperations = await operationStore.get(didUniqueSuffix); checkEqualArray(operationChain, returnedOperations); const rollbackTime = chainSize / 2; await operationStore.delete(rollbackTime); - const returnedOperationsAfterRollback = Array.from(await operationStore.get(didUniqueSuffix)); + const returnedOperationsAfterRollback = await operationStore.get(didUniqueSuffix); // Returned operations should be equal to the first rollbackTime + 1 operations in the batch checkEqualArray(operationChain.slice(0, rollbackTime + 1), returnedOperationsAfterRollback); }); @@ -252,14 +251,14 @@ describe('MongoDbOperationStore', async () => { const chainSize = 10; const operationChain = await createOperationChain(createOperation, chainSize, privateKey); await operationStore.put(operationChain); - let returnedOperations = Array.from(await operationStore.get(didUniqueSuffix)); + let returnedOperations = await operationStore.get(didUniqueSuffix); checkEqualArray(operationChain, returnedOperations); // Create another instance of the operation store operationStore = await createOperationStore(config.mongoDbConnectionString); // Check if we have all the previously put operations - returnedOperations = Array.from(await operationStore.get(didUniqueSuffix)); + returnedOperations = await operationStore.get(didUniqueSuffix); checkEqualArray(operationChain, returnedOperations); }); @@ -276,7 +275,7 @@ describe('MongoDbOperationStore', async () => { await operationStore.put([operationChain[i]]); } - const returnedOperations = Array.from(await operationStore.get(didUniqueSuffix)); + const returnedOperations = await operationStore.get(didUniqueSuffix); checkEqualArray(operationChain, returnedOperations); }); }); diff --git a/tests/core/MongoDbTransactionStore.spec.ts b/tests/core/MongoDbTransactionStore.spec.ts index 66c01d43c..cf61e2187 100644 --- a/tests/core/MongoDbTransactionStore.spec.ts +++ b/tests/core/MongoDbTransactionStore.spec.ts @@ -1,8 +1,8 @@ -import IConfig from '../../lib/core/interfaces/IConfig'; -import ITransaction from '../../lib/common/ITransaction'; +import Config from '../../lib/core/models/Config'; +import ITransactionStore from '../../lib/core/interfaces/ITransactionStore'; import MongoDb from '../common/MongoDb'; import MongoDbTransactionStore from '../../lib/common/MongoDbTransactionStore'; -import TransactionStore from '../../lib/core/interfaces/TransactionStore'; +import TransactionModel from '../../lib/common/models/TransactionModel'; import { MongoClient } from 'mongodb'; /** @@ -20,10 +20,10 @@ async function createTransactionStore (transactionStoreUri: string, databaseName * @param transactionStore The transaction store to store the generated transactions. * @param count Number of transactions to generate and store. */ -async function generateAndStoreTransactions (transactionStore: TransactionStore, count: number): Promise { - const transactions: ITransaction[] = []; +async function generateAndStoreTransactions (transactionStore: ITransactionStore, count: number): Promise { + const transactions: TransactionModel[] = []; for (let i = 1; i <= count; i++) { - const transaction: ITransaction = { + const transaction: TransactionModel = { anchorFileHash: i.toString(), transactionNumber: i, transactionTime: i, @@ -39,7 +39,7 @@ async function generateAndStoreTransactions (transactionStore: TransactionStore, } describe('MongoDbTransactionStore', async () => { - const config: IConfig = require('../json/config-test.json'); + const config: Config = require('../json/config-test.json'); const databaseName = 'sidetree-test'; let mongoServiceAvailable: boolean | undefined; diff --git a/tests/core/MongoDbUnresolvableTransactionStore.spec.ts b/tests/core/MongoDbUnresolvableTransactionStore.spec.ts index d078f4f3e..f250c7493 100644 --- a/tests/core/MongoDbUnresolvableTransactionStore.spec.ts +++ b/tests/core/MongoDbUnresolvableTransactionStore.spec.ts @@ -1,13 +1,13 @@ -import IConfig from '../../lib/core/interfaces/IConfig'; -import ITransaction from '../../lib/common/ITransaction'; +import Config from '../../lib/core/models/Config'; import MongoDb from '../common/MongoDb'; import MongoDbUnresolvableTransactionStore from '../../lib/core/MongoDbUnresolvableTransactionStore'; +import TransactionModel from '../../lib/common/models/TransactionModel'; import { MongoClient } from 'mongodb'; /** * Creates a MongoDbUnresolvableTransactionStore and initializes it. */ -async function createUnresolvableTransactionStore (transactionStoreUri: string, databaseName: string): Promise { +async function createIUnresolvableTransactionStore (transactionStoreUri: string, databaseName: string): Promise { const unresolvableTransactionStore = new MongoDbUnresolvableTransactionStore(transactionStoreUri, databaseName, 1); await unresolvableTransactionStore.initialize(); return unresolvableTransactionStore; @@ -18,10 +18,10 @@ async function createUnresolvableTransactionStore (transactionStoreUri: string, * e.g. First transaction will have all properties assigned as 1 or '1'; * @param count Number of transactions to generate. */ -async function generateTransactions (count: number): Promise { - const transactions: ITransaction[] = []; +async function generateTransactions (count: number): Promise { + const transactions: TransactionModel[] = []; for (let i = 1; i <= count; i++) { - const transaction: ITransaction = { + const transaction: TransactionModel = { anchorFileHash: i.toString(), transactionNumber: i, transactionTime: i, @@ -35,7 +35,7 @@ async function generateTransactions (count: number): Promise { } describe('MongoDbUnresolvableTransactionStore', async () => { - const config: IConfig = require('../json/config-test.json'); + const config: Config = require('../json/config-test.json'); const databaseName = 'sidetree-test'; let mongoServiceAvailable = false; @@ -43,7 +43,7 @@ describe('MongoDbUnresolvableTransactionStore', async () => { beforeAll(async () => { mongoServiceAvailable = await MongoDb.isServerAvailable(config.mongoDbConnectionString); if (mongoServiceAvailable) { - store = await createUnresolvableTransactionStore(config.mongoDbConnectionString, databaseName); + store = await createIUnresolvableTransactionStore(config.mongoDbConnectionString, databaseName); } }); diff --git a/tests/core/Observer.spec.ts b/tests/core/Observer.spec.ts index a5e23f2ee..5ceb71847 100644 --- a/tests/core/Observer.spec.ts +++ b/tests/core/Observer.spec.ts @@ -1,43 +1,46 @@ import * as retry from 'async-retry'; +import AnchorFileModel from '../../lib/core/versions/latest/models/AnchorFileModel'; +import BatchFileModel from '../../lib/core/versions/latest/models/BatchFileModel'; +import Blockchain from '../../lib/core/Blockchain'; +import Cas from '../../lib/core/Cas'; import DownloadManager from '../../lib/core/DownloadManager'; -import ErrorCode from '../../lib/common/ErrorCode'; -import IFetchResult from '../../lib/common/IFetchResult'; -import ITransaction from '../../lib/common/ITransaction'; +import ErrorCode from '../../lib/common/SharedErrorCode'; +import FetchResult from '../../lib/common/models/FetchResult'; +import IOperationStore from '../../lib/core/interfaces/IOperationStore'; import MockOperationStore from '../mocks/MockOperationStore'; import Observer from '../../lib/core/Observer'; -import OperationProcessor from '../../lib/core/OperationProcessor'; -import OperationStore from '../../lib/core/interfaces/OperationStore'; -import { BlockchainClient } from '../../lib/core/Blockchain'; -import { CasClient } from '../../lib/core/Cas'; +import TransactionModel from '../../lib/common/models/TransactionModel'; +import TransactionProcessor from '../../lib/core/versions/latest/TransactionProcessor'; import { FetchResultCode } from '../../lib/common/FetchResultCode'; import { MockTransactionStore } from '../mocks/MockTransactionStore'; import { SidetreeError } from '../../lib/core/Error'; -import { IAnchorFile } from '../../lib/core/AnchorFile'; -import { IBatchFile } from '../../lib/core/BatchFile'; describe('Observer', async () => { const config = require('../json/config-test.json'); + let getTransactionProcessor: (blockchainTime: number) => TransactionProcessor; + let casClient; let downloadManager: DownloadManager; - let operationProcessor: OperationProcessor; - let operationStore: OperationStore; + let operationStore: IOperationStore; + let transactionStore: MockTransactionStore; const originalDefaultTestTimeout = jasmine.DEFAULT_TIMEOUT_INTERVAL; beforeAll(async () => { jasmine.DEFAULT_TIMEOUT_INTERVAL = 20000; // These asynchronous tests can take a bit longer than normal. - casClient = new CasClient(config.contentAddressableStoreServiceUri); + casClient = new Cas(config.contentAddressableStoreServiceUri); // Setting the CAS to always return 404. spyOn(casClient, 'read').and.returnValue(Promise.resolve({ code: FetchResultCode.NotFound })); - downloadManager = new DownloadManager(config.maxConcurrentDownloads, casClient); operationStore = new MockOperationStore(); - operationProcessor = new OperationProcessor(config.didMethodName, operationStore); - + transactionStore = new MockTransactionStore(); + downloadManager = new DownloadManager(config.maxConcurrentDownloads, casClient); downloadManager.start(); + + getTransactionProcessor = (_blockchainTime: number) => new TransactionProcessor(downloadManager, operationStore); }); afterAll(() => { @@ -68,7 +71,7 @@ describe('Observer', async () => { 'transactions': [] }; - const blockchainClient = new BlockchainClient(config.blockchainServiceUri); + const blockchainClient = new Blockchain(config.blockchainServiceUri); let readInvocationCount = 0; const mockReadFunction = async () => { @@ -82,8 +85,16 @@ describe('Observer', async () => { spyOn(blockchainClient, 'read').and.callFake(mockReadFunction); // Start the Observer. - const transactionStore = new MockTransactionStore(); - const observer = new Observer(blockchainClient, downloadManager, operationProcessor, transactionStore, transactionStore, 1); + const observer = new Observer( + getTransactionProcessor, + blockchainClient, + config.maxConcurrentDownloads, + operationStore, + transactionStore, + transactionStore, + 1 + ); + const processedTransactions = transactionStore.getTransactions(); await observer.startPeriodicProcessing(); // Asynchronously triggers Observer to start processing transactions immediately. @@ -110,16 +121,16 @@ describe('Observer', async () => { it('should process a valid operation batch successfully.', async () => { // Prepare the mock response from the DownloadManager. - const anchorFile: IAnchorFile = { + const anchorFile: AnchorFileModel = { batchFileHash: 'EiB4ypIXxG9aFhXv2YC8I2tQvLEBbQAsNzHmph17vMfVYA', didUniqueSuffixes: ['EiA-GtHEOH9IcEEoBQ9p1KCMIjTmTO8x2qXJPb20ry6C0A', 'EiA4zvhtvzTdeLAg8_Pvdtk5xJreNuIpvSpCCbtiTVc8Ow'], merkleRoot: 'EiB4ypIXxG9aFhXv2YC8I2tQvLEBbQAsNzHmph17vMfVYA' }; - const anchoreFileFetchResult: IFetchResult = { + const anchoreFileFetchResult: FetchResult = { code: FetchResultCode.Success, content: Buffer.from(JSON.stringify(anchorFile)) }; - const batchFile: IBatchFile = { + const batchFile: BatchFileModel = { /* tslint:disable */ operations: [ 'eyJwYXlsb2FkIjoiZXlKamNtVmhkR1ZrSWpvaU1qQXhPUzB3TmkweE5GUXlNam94TkRvME5pNDVORE5hSWl3aVFHTnZiblJsZUhRaU9pSm9kSFJ3Y3pvdkwzY3phV1F1YjNKbkwyUnBaQzkyTVNJc0luQjFZbXhwWTB0bGVTSTZXM3NpYVdRaU9pSWphMlY1TVNJc0luUjVjR1VpT2lKVFpXTndNalUyYXpGV1pYSnBabWxqWVhScGIyNUxaWGt5TURFNElpd2ljSFZpYkdsalMyVjVTbmRySWpwN0ltdHBaQ0k2SWlOclpYa3hJaXdpYTNSNUlqb2lSVU1pTENKaGJHY2lPaUpGVXpJMU5rc2lMQ0pqY25ZaU9pSlFMVEkxTmtzaUxDSjRJam9pTjFGWFRVUjFkRmh3UkdodFVFcHhPWGxDWmxNMmVWVmpaMmxQVDJWTWIxVmplazVPVW5Wd1ZEZElNQ0lzSW5raU9pSnRNVVJIVWpCMldEZHNXRlZLTWtwcU1WQmtNRU5yZWxneFVuSkxiVmhuZERSNk5tMUZUV0Y1ZDNCSkluMTlYWDAiLCJzaWduYXR1cmUiOiJNRVVDSUNnWXk3TmRuRDhZVmhsTXhqaWFJVW11d3VhRHliM2xjNVAzZFVPSlpmVUpBaUVBMGtNbi03anFuaFQtMm5RVk52YldXRmk1NkNDajMweEVZRWxDNmFCMXVRayIsInByb3RlY3RlZCI6ImUzMCIsImhlYWRlciI6eyJvcGVyYXRpb24iOiJjcmVhdGUiLCJwcm9vZk9mV29yayI6IiIsImtpZCI6IiNrZXkxIiwiYWxnIjoiRVMyNTZLIn19', @@ -127,7 +138,7 @@ describe('Observer', async () => { ] /* tslint:enable */ }; - const batchFileFetchResult: IFetchResult = { + const batchFileFetchResult: FetchResult = { code: FetchResultCode.Success, content: Buffer.from(JSON.stringify(batchFile)) }; @@ -143,11 +154,18 @@ describe('Observer', async () => { }; spyOn(downloadManager, 'download').and.callFake(mockDownloadFunction); - const blockchainClient = new BlockchainClient(config.blockchainServiceUri); - const transactionStore = new MockTransactionStore(); - const observer = new Observer(blockchainClient, downloadManager, operationProcessor, transactionStore, transactionStore, 1); - - const mockTransaction: ITransaction = { + const blockchainClient = new Blockchain(config.blockchainServiceUri); + const observer = new Observer( + getTransactionProcessor, + blockchainClient, + config.maxConcurrentDownloads, + operationStore, + transactionStore, + transactionStore, + 1 + ); + + const mockTransaction: TransactionModel = { transactionNumber: 1, transactionTime: 1000000, transactionTimeHash: '1000', @@ -157,14 +175,12 @@ describe('Observer', async () => { transaction: mockTransaction, processingStatus: 'pending' }; - await (observer as any).downloadThenProcessBatchAsync(mockTransaction, transactionUnderProcessing); + await (observer as any).processTransaction(mockTransaction, transactionUnderProcessing); - const iterableOperations1 = await operationStore.get('EiA-GtHEOH9IcEEoBQ9p1KCMIjTmTO8x2qXJPb20ry6C0A'); - const operationArray1 = [...iterableOperations1]; + const operationArray1 = await operationStore.get('EiA-GtHEOH9IcEEoBQ9p1KCMIjTmTO8x2qXJPb20ry6C0A'); expect(operationArray1.length).toEqual(1); - const iterableOperations2 = await operationStore.get('EiA4zvhtvzTdeLAg8_Pvdtk5xJreNuIpvSpCCbtiTVc8Ow'); - const operationArray2 = [...iterableOperations2]; + const operationArray2 = await operationStore.get('EiA4zvhtvzTdeLAg8_Pvdtk5xJreNuIpvSpCCbtiTVc8Ow'); expect(operationArray2.length).toEqual(1); }); @@ -179,9 +195,16 @@ describe('Observer', async () => { const expectedConsoleLogSubstring = tuple[1]; it(`should stop processing a transaction if ${mockFetchReturnCode}`, async () => { - const blockchainClient = new BlockchainClient(config.blockchainServiceUri); - const transactionStore = new MockTransactionStore(); - const observer = new Observer(blockchainClient, downloadManager, operationProcessor, transactionStore, transactionStore, 1); + const blockchainClient = new Blockchain(config.blockchainServiceUri); + const observer = new Observer( + getTransactionProcessor, + blockchainClient, + config.maxConcurrentDownloads, + operationStore, + transactionStore, + transactionStore, + 1 + ); spyOn(downloadManager, 'download').and.returnValue(Promise.resolve({ code: mockFetchReturnCode as FetchResultCode })); @@ -195,7 +218,7 @@ describe('Observer', async () => { spyOn(transactionStore, 'removeUnresolvableTransaction'); spyOn(transactionStore, 'recordUnresolvableTransactionFetchAttempt'); - const mockTransaction: ITransaction = { + const mockTransaction: TransactionModel = { transactionNumber: 1, transactionTime: 1000000, transactionTimeHash: '1000', @@ -205,7 +228,7 @@ describe('Observer', async () => { transaction: mockTransaction, processingStatus: 'pending' }; - await (observer as any).downloadThenProcessBatchAsync(mockTransaction, transactionUnderProcessing); + await (observer as any).processTransaction(mockTransaction, transactionUnderProcessing); expect(expectedConsoleLogDetected).toBeTruthy(); expect(transactionStore.removeUnresolvableTransaction).toHaveBeenCalled(); @@ -267,7 +290,7 @@ describe('Observer', async () => { 'transactions': [] }; - const blockchainClient = new BlockchainClient(config.blockchainServiceUri); + const blockchainClient = new Blockchain(config.blockchainServiceUri); let readInvocationCount = 0; const mockReadFunction = async () => { @@ -291,8 +314,16 @@ describe('Observer', async () => { spyOn(blockchainClient, 'getFirstValidTransaction').and.returnValue(Promise.resolve(initialTransactionFetchResponseBody.transactions[0])); // Process first set of transactions. - const transactionStore = new MockTransactionStore(); - const observer = new Observer(blockchainClient, downloadManager, operationProcessor, transactionStore, transactionStore, 1); + const observer = new Observer( + getTransactionProcessor, + blockchainClient, + config.maxConcurrentDownloads, + operationStore, + transactionStore, + transactionStore, + 1 + ); + await observer.startPeriodicProcessing(); // Asynchronously triggers Observer to start processing transactions immediately. // Monitor the processed transactions list until the expected count or max retries is reached. diff --git a/tests/core/Operation.spec.ts b/tests/core/Operation.spec.ts index bbca13743..ffb0c9d60 100644 --- a/tests/core/Operation.spec.ts +++ b/tests/core/Operation.spec.ts @@ -1,7 +1,7 @@ -import Cryptography from '../../lib/core/util/Cryptography'; -import ErrorCode from '../../lib/common/ErrorCode'; +import Cryptography from '../../lib/core/versions/latest/util/Cryptography'; +import ErrorCode from '../../lib/core/versions/latest/ErrorCode'; +import Operation from '../../lib/core/versions/latest/Operation'; import OperationGenerator from '../generators/OperationGenerator'; -import { Operation } from '../../lib/core/Operation'; import { SidetreeError } from '../../lib/core/Error'; describe('Operation', async () => { @@ -20,21 +20,21 @@ describe('Operation', async () => { createRequest.dummyProperty = '123'; const requestWithUnknownProperty = Buffer.from(JSON.stringify(createRequest)); - expect(() => { Operation.createUnanchoredOperation(requestWithUnknownProperty, 1500000); }).toThrowError(); + expect(() => { Operation.create(requestWithUnknownProperty); }).toThrowError(); }); it('should throw error if more than one type of payload is found when parsing request.', async () => { createRequest.updatePayload = '123'; const requestWithUnknownProperty = Buffer.from(JSON.stringify(createRequest)); - expect(() => { Operation.createUnanchoredOperation(requestWithUnknownProperty, 1500000); }).toThrowError(); + expect(() => { Operation.create(requestWithUnknownProperty); }).toThrowError(); }); it('should throw error if signature is not found when parsing request.', async () => { delete createRequest.signature; const requestWithUnknownProperty = Buffer.from(JSON.stringify(createRequest)); - expect(() => { Operation.createUnanchoredOperation(requestWithUnknownProperty, 1500000); }).toThrowError(); + expect(() => { Operation.create(requestWithUnknownProperty); }).toThrowError(); }); describe('Update payload', async () => { @@ -61,38 +61,6 @@ describe('Operation', async () => { expect(() => { Operation.validateUpdatePayload(updatePayload); }).toThrow(expectedError); }); - it('should throw error if didUniqueSuffix is not a valid multihash string.', async () => { - const updatePayload = generateUpdatePayloadForPublicKeys(); - updatePayload.didUniqueSuffix = 'invalidHash'; - - try { - Operation.validateUpdatePayload(updatePayload); - } catch (error) { - if (error instanceof SidetreeError && - error.code === ErrorCode.OperationUpdatePayloadDidUniqueSuffixInvalid) { - return; // Expected Sidetree error. - } else { - throw error; // Unexpected error, throw to fail the test. - } - } - }); - - it('should throw error if previousOperationHash is not a valid multihash string.', async () => { - const updatePayload = generateUpdatePayloadForPublicKeys(); - updatePayload.previousOperationHash = 'invalidHash'; - - try { - Operation.validateUpdatePayload(updatePayload); - } catch (error) { - if (error instanceof SidetreeError && - error.code === ErrorCode.OperationUpdatePayloadPreviousOperationHashInvalid) { - return; // Expected Sidetree error. - } else { - throw error; // Unexpected error, throw to fail the test. - } - } - }); - it('should throw error if `patches` is not an array.', async () => { const updatePayload = generateUpdatePayloadForPublicKeys(); (updatePayload as any).patches = 'shouldNotBeAString'; diff --git a/tests/core/OperationProcessor.spec.ts b/tests/core/OperationProcessor.spec.ts index 35fa9992b..e29b59706 100644 --- a/tests/core/OperationProcessor.spec.ts +++ b/tests/core/OperationProcessor.spec.ts @@ -1,14 +1,16 @@ -import BatchFile from '../../lib/core/BatchFile'; -import Cryptography from '../../lib/core/util/Cryptography'; -import Document, { IDocument } from '../../lib/core/Document'; +import AnchoredOperation from '../../lib/core/versions/latest/AnchoredOperation'; +import AnchoredOperationModel from '../../lib/core/models/AnchoredOperationModel'; +import BatchFile from '../../lib/core/versions/latest/BatchFile'; +import Cryptography from '../../lib/core/versions/latest/util/Cryptography'; +import Document from '../../lib/core/versions/latest/Document'; +import DocumentModel from '../../lib/core/versions/latest/models/DocumentModel'; +import ICas from '../../lib/core/interfaces/ICas'; +import IOperationStore from '../../lib/core/interfaces/IOperationStore'; import MockCas from '../mocks/MockCas'; import MockOperationStore from '../mocks/MockOperationStore'; import OperationGenerator from '../generators/OperationGenerator'; -import OperationProcessor from '../../lib/core/OperationProcessor'; -import OperationStore from '../../lib/core/interfaces/OperationStore'; -import ProtocolParameters from '../../lib/core/ProtocolParameters'; -import { Cas } from '../../lib/core/Cas'; -import { Operation } from '../../lib/core/Operation'; +import OperationProcessor from '../../lib/core/versions/latest/OperationProcessor'; +import Resolver from '../../lib/core/Resolver'; /** * Creates a batch file with single operation given operation buffer, @@ -16,36 +18,37 @@ import { Operation } from '../../lib/core/Operation'; * @returns The operation in the batch file added in the form of a Operation. */ async function addBatchFileOfOneOperationToCas ( - opBuf: Buffer, - cas: Cas, + operationBuffer: Buffer, + cas: ICas, transactionNumber: number, transactionTime: number, - operationIndex: number): Promise { - const operations: Buffer[] = [ opBuf ]; - const batchBuffer = BatchFile.fromOperationBuffers(operations); - const batchFileAddress = await cas.write(batchBuffer); - const resolvedTransaction = { + operationIndex: number): Promise { + + const operationBuffers: Buffer[] = [ operationBuffer ]; + const batchBuffer = BatchFile.fromOperationBuffers(operationBuffers); + await cas.write(batchBuffer); + + const anchoredOperationModel: AnchoredOperationModel = { + operationBuffer, + operationIndex, transactionNumber, - transactionTime, - transactionTimeHash: 'unused', - anchorFileHash: 'unused', - batchFileHash: batchFileAddress + transactionTime }; - const op = Operation.createAnchoredOperation(opBuf, resolvedTransaction, operationIndex); - return op; + const anchoredOperation = AnchoredOperation.createAnchoredOperation(anchoredOperationModel); + return anchoredOperation; } async function createUpdateSequence ( didUniqueSuffix: string, - createOp: Operation, - cas: Cas, + createOp: AnchoredOperation, + cas: ICas, numberOfUpdates: number, - privateKey: any): Promise { + privateKey: any): Promise { const ops = new Array(createOp); - const opHashes = new Array(createOp.getOperationHash()); + const opHashes = new Array(createOp.operationHash); for (let i = 0; i < numberOfUpdates; ++i) { const mostRecentVersion = opHashes[i]; @@ -76,7 +79,7 @@ async function createUpdateSequence ( ); ops.push(updateOp); - const updateOpHash = updateOp.getOperationHash(); + const updateOpHash = updateOp.operationHash; opHashes.push(updateOpHash); } @@ -114,23 +117,20 @@ function getPermutation (size: number, index: number): Array { return permutation; } -function validateDidDocumentAfterUpdates (didDocument: IDocument | undefined, numberOfUpdates: number) { +function validateDidDocumentAfterUpdates (didDocument: DocumentModel | undefined, numberOfUpdates: number) { expect(didDocument).toBeDefined(); expect(didDocument!.service[0].serviceEndpoint.instance[0]).toEqual('did:sidetree:value' + (numberOfUpdates - 1)); } describe('OperationProcessor', async () => { - const versionsOfProtocolParameters = require('../json/protocol-parameters-test.json'); - ProtocolParameters.initialize(versionsOfProtocolParameters); - // Load the DID Document template. const didDocumentTemplate = require('../json/didDocumentTemplate.json'); let cas = new MockCas(); const config = require('../json/config-test.json'); - let operationProcessor: OperationProcessor; - let operationStore: OperationStore; - let createOp: Operation | undefined; + let resolver: Resolver; + let operationStore: IOperationStore; + let createOp: AnchoredOperation | undefined; let publicKey: any; let privateKey: any; let didUniqueSuffix: string; @@ -140,47 +140,48 @@ describe('OperationProcessor', async () => { cas = new MockCas(); operationStore = new MockOperationStore(); - operationProcessor = new OperationProcessor(config.didMethodName, operationStore); + resolver = new Resolver((_blockchainTime) => new OperationProcessor(config.didMethodName), operationStore); const createOperationBuffer = await OperationGenerator.generateCreateOperationBuffer(didDocumentTemplate, publicKey, privateKey); createOp = await addBatchFileOfOneOperationToCas(createOperationBuffer, cas, 0, 0, 0); - didUniqueSuffix = createOp.getOperationHash(); + didUniqueSuffix = createOp.didUniqueSuffix; }); it('should return a DID Document for resolve(did) for a registered DID', async () => { - await operationProcessor.process([createOp!]); - const didDocument = await operationProcessor.resolve(didUniqueSuffix); + await operationStore.put([createOp!]); + + const didDocument = await resolver.resolve(didUniqueSuffix) as DocumentModel; // This is a poor man's version based on public key properties expect(didDocument).toBeDefined(); - const publicKey2 = Document.getPublicKey(didDocument!, 'key2'); + const publicKey2 = Document.getPublicKey(didDocument, 'key2'); expect(publicKey2).toBeDefined(); expect(publicKey2!.owner).toBeUndefined(); }); it('should ignore a duplicate create operation', async () => { - await operationProcessor.process([createOp!]); + await operationStore.put([createOp!]); // Create and process a duplicate create op const createOperationBuffer = await OperationGenerator.generateCreateOperationBuffer(didDocumentTemplate, publicKey, privateKey); const duplicateCreateOp = await addBatchFileOfOneOperationToCas(createOperationBuffer, cas, 1, 1, 0); - await operationProcessor.process([duplicateCreateOp]); + await operationStore.put([duplicateCreateOp]); - const didDocument = await operationProcessor.resolve(didUniqueSuffix); + const didDocument = await resolver.resolve(didUniqueSuffix) as DocumentModel; // This is a poor man's version based on public key properties expect(didDocument).toBeDefined(); - const publicKey2 = Document.getPublicKey(didDocument!, 'key2'); + const publicKey2 = Document.getPublicKey(didDocument, 'key2'); expect(publicKey2).toBeDefined(); expect(publicKey2!.owner).toBeUndefined(); }); it('should process update to remove a public key correctly', async () => { - await operationProcessor.process([createOp!]); + await operationStore.put([createOp!]); const updatePayload = { didUniqueSuffix, - previousOperationHash: createOp!.getOperationHash(), + previousOperationHash: createOp!.operationHash, patches: [ { action: 'remove-public-keys', @@ -192,21 +193,21 @@ describe('OperationProcessor', async () => { // Generate operation with an invalid key const updateOperationBuffer = await OperationGenerator.generateUpdateOperationBuffer(updatePayload, '#key1', privateKey); const updateOp = await addBatchFileOfOneOperationToCas(updateOperationBuffer, cas, 1, 1, 0); - await operationProcessor.process([updateOp]); + await operationStore.put([updateOp]); - const didDocument = await operationProcessor.resolve(didUniqueSuffix); + const didDocument = await resolver.resolve(didUniqueSuffix) as DocumentModel; expect(didDocument).toBeDefined(); - const key2 = Document.getPublicKey(didDocument!, '#key2'); + const key2 = Document.getPublicKey(didDocument, '#key2'); expect(key2).not.toBeDefined(); // if update above went through, new key would be added. }); it('should process updates correctly', async () => { const numberOfUpdates = 10; const ops = await createUpdateSequence(didUniqueSuffix, createOp!, cas, numberOfUpdates, privateKey); - await operationProcessor.process(ops); + await operationStore.put(ops); - const didDocument = await operationProcessor.resolve(didUniqueSuffix); + const didDocument = await resolver.resolve(didUniqueSuffix) as DocumentModel; validateDidDocumentAfterUpdates(didDocument, numberOfUpdates); }); @@ -215,9 +216,9 @@ describe('OperationProcessor', async () => { const ops = await createUpdateSequence(didUniqueSuffix, createOp!, cas, numberOfUpdates, privateKey); for (let i = numberOfUpdates ; i >= 0 ; --i) { - await operationProcessor.process([ops[i]]); + await operationStore.put([ops[i]]); } - const didDocument = await operationProcessor.resolve(didUniqueSuffix); + const didDocument = await resolver.resolve(didUniqueSuffix) as DocumentModel; validateDidDocumentAfterUpdates(didDocument, numberOfUpdates); }); @@ -231,10 +232,10 @@ describe('OperationProcessor', async () => { for (let i = 0 ; i < numberOfPermutations; ++i) { const permutation = getPermutation(numberOfOps, i); operationStore = new MockOperationStore(); - operationProcessor = new OperationProcessor(config.didMethodName, operationStore); + resolver = new Resolver((_blockchainTime) => new OperationProcessor(config.didMethodName), operationStore); const permutedOps = permutation.map(i => ops[i]); - await operationProcessor.process(permutedOps); - const didDocument = await operationProcessor.resolve(didUniqueSuffix); + await operationStore.put(permutedOps); + const didDocument = await resolver.resolve(didUniqueSuffix) as DocumentModel; validateDidDocumentAfterUpdates(didDocument, numberOfUpdates); } }); @@ -250,27 +251,27 @@ describe('OperationProcessor', async () => { const createOperation = await addBatchFileOfOneOperationToCas(operationBuffer, cas, 1, 0, 0); // Trigger processing of the operation. - await operationProcessor.process([createOperation]); - const didUniqueSuffix = createOperation.getOperationHash(); + await operationStore.put([createOperation]); + const didUniqueSuffix = createOperation.operationHash; // Attempt to resolve the DID and validate the outcome. - const didDocument = await operationProcessor.resolve(didUniqueSuffix); + const didDocument = await resolver.resolve(didUniqueSuffix); expect(didDocument).toBeUndefined(); }); it('should return undefined for deleted did', async () => { const numberOfUpdates = 10; const ops = await createUpdateSequence(didUniqueSuffix, createOp!, cas, numberOfUpdates, privateKey); - await operationProcessor.process(ops); + await operationStore.put(ops); - const didDocument = await operationProcessor.resolve(didUniqueSuffix); + const didDocument = await resolver.resolve(didUniqueSuffix) as DocumentModel; validateDidDocumentAfterUpdates(didDocument, numberOfUpdates); const deleteOperationBuffer = await OperationGenerator.generateDeleteOperationBuffer(didUniqueSuffix, '#key1', privateKey); const deleteOperation = await addBatchFileOfOneOperationToCas(deleteOperationBuffer, cas, numberOfUpdates + 1, numberOfUpdates + 1, 0); - await operationProcessor.process([deleteOperation]); + await operationStore.put([deleteOperation]); - const didDocumentAfterDelete = await operationProcessor.resolve(didUniqueSuffix); + const didDocumentAfterDelete = await resolver.resolve(didUniqueSuffix); expect(didDocumentAfterDelete).toBeUndefined(); }); @@ -285,65 +286,65 @@ describe('OperationProcessor', async () => { const createOperation = await addBatchFileOfOneOperationToCas(operationBuffer, cas, 1, 0, 0); // Trigger processing of the operation. - await operationProcessor.process([createOperation]); - const didUniqueSuffix = createOperation.getOperationHash(); + await operationStore.put([createOperation]); + const didUniqueSuffix = createOperation.operationHash; // Attempt to resolve the DID and validate the outcome. - const didDocument = await operationProcessor.resolve(didUniqueSuffix); + const didDocument = await resolver.resolve(didUniqueSuffix); expect(didDocument).toBeUndefined(); }); it('should return undefined for deleted did', async () => { const numberOfUpdates = 10; const ops = await createUpdateSequence(didUniqueSuffix, createOp!, cas, numberOfUpdates, privateKey); - await operationProcessor.process(ops); + await operationStore.put(ops); - const didDocument = await operationProcessor.resolve(didUniqueSuffix); + const didDocument = await resolver.resolve(didUniqueSuffix) as DocumentModel; validateDidDocumentAfterUpdates(didDocument, numberOfUpdates); const deleteOperationBuffer = await OperationGenerator.generateDeleteOperationBuffer(didUniqueSuffix, '#key1', privateKey); const deleteOperation = await addBatchFileOfOneOperationToCas(deleteOperationBuffer, cas, numberOfUpdates + 1, numberOfUpdates + 1, 0); - await operationProcessor.process([deleteOperation]); + await operationStore.put([deleteOperation]); - const didDocumentAfterDelete = await operationProcessor.resolve(didUniqueSuffix); + const didDocumentAfterDelete = await resolver.resolve(didUniqueSuffix); expect(didDocumentAfterDelete).toBeUndefined(); }); it('should ignore delete operations of a non-existent did', async () => { const deleteOperationBuffer = await OperationGenerator.generateDeleteOperationBuffer(didUniqueSuffix, '#key1', privateKey); const deleteOperation = await addBatchFileOfOneOperationToCas(deleteOperationBuffer, cas, 1, 1, 0); - await operationProcessor.process([deleteOperation]); + await operationStore.put([deleteOperation]); - const didDocumentAfterDelete = await operationProcessor.resolve(didUniqueSuffix); + const didDocumentAfterDelete = await resolver.resolve(didUniqueSuffix); expect(didDocumentAfterDelete).toBeUndefined(); }); it('should ignore delete operations with invalid signing key id', async () => { - await operationProcessor.process([createOp!]); + await operationStore.put([createOp!]); const deleteOperationBuffer = await OperationGenerator.generateDeleteOperationBuffer(didUniqueSuffix, 'InvalidKeyId', privateKey); const deleteOperation = await addBatchFileOfOneOperationToCas(deleteOperationBuffer, cas, 1, 1, 0); - await operationProcessor.process([deleteOperation]); + await operationStore.put([deleteOperation]); - const didDocument = await operationProcessor.resolve(didUniqueSuffix); + const didDocument = await resolver.resolve(didUniqueSuffix) as DocumentModel; expect(didDocument).toBeDefined(); - const publicKey2 = Document.getPublicKey(didDocument!, 'key2'); + const publicKey2 = Document.getPublicKey(didDocument, 'key2'); expect(publicKey2).toBeDefined(); expect(publicKey2!.owner).toBeUndefined(); }); it('should ignore delete operations with invalid signature', async () => { - await operationProcessor.process([createOp!]); + await operationStore.put([createOp!]); const deleteOperation = await OperationGenerator.generateDeleteOperation(didUniqueSuffix, '#key1', privateKey); deleteOperation.signature = 'InvalidSignature'; const deleteOperationBuffer = Buffer.from(JSON.stringify(deleteOperation)); const anchoredDeleteOperation = await addBatchFileOfOneOperationToCas(deleteOperationBuffer, cas, 1, 1, 0); - await operationProcessor.process([anchoredDeleteOperation]); + await operationStore.put([anchoredDeleteOperation]); - const didDocument = await operationProcessor.resolve(didUniqueSuffix); + const didDocument = await resolver.resolve(didUniqueSuffix) as DocumentModel; expect(didDocument).toBeDefined(); - const publicKey2 = Document.getPublicKey(didDocument!, 'key2'); + const publicKey2 = Document.getPublicKey(didDocument, 'key2'); expect(publicKey2).toBeDefined(); expect(publicKey2!.owner).toBeUndefined(); }); @@ -354,19 +355,19 @@ describe('OperationProcessor', async () => { // elide i = 0, the create operation for (let i = 1 ; i < ops.length ; ++i) { - await operationProcessor.process([ops[i]]); + await operationStore.put([ops[i]]); } - const didDocument = await operationProcessor.resolve(didUniqueSuffix); + const didDocument = await resolver.resolve(didUniqueSuffix); expect(didDocument).toBeUndefined(); }); it('should ignore update operation signed with an unresolvable key', async () => { - await operationProcessor.process([createOp!]); + await operationStore.put([createOp!]); const updatePayload = { didUniqueSuffix, - previousOperationHash: createOp!.getOperationHash(), + previousOperationHash: createOp!.operationHash, patches: [ { action: 'add-public-keys', @@ -384,21 +385,21 @@ describe('OperationProcessor', async () => { // Generate operation with an invalid key const updateOperationBuffer = await OperationGenerator.generateUpdateOperationBuffer(updatePayload, '#UnresolvableKey', privateKey); const updateOp = await addBatchFileOfOneOperationToCas(updateOperationBuffer, cas, 1, 1, 0); - await operationProcessor.process([updateOp]); + await operationStore.put([updateOp]); - const didDocument = await operationProcessor.resolve(didUniqueSuffix); + const didDocument = await resolver.resolve(didUniqueSuffix) as DocumentModel; expect(didDocument).toBeDefined(); - const newKey = Document.getPublicKey(didDocument!, 'new-key'); + const newKey = Document.getPublicKey(didDocument, 'new-key'); expect(newKey).not.toBeDefined(); // if update above went through, new key would be added. }); it('should ignore update operation with an invalid signature', async () => { - await operationProcessor.process([createOp!]); + await operationStore.put([createOp!]); const updatePayload = { didUniqueSuffix, - previousOperationHash: createOp!.getOperationHash(), + previousOperationHash: createOp!.operationHash, patches: [ { action: 'add-public-keys', @@ -418,21 +419,21 @@ describe('OperationProcessor', async () => { updateOperation.signature = 'InvalidSignature'; const updateOperationBuffer = Buffer.from(JSON.stringify(updateOperation)); const anchoredUpdateOperation = await addBatchFileOfOneOperationToCas(updateOperationBuffer, cas, 1, 1, 0); - await operationProcessor.process([anchoredUpdateOperation]); + await operationStore.put([anchoredUpdateOperation]); - const didDocument = await operationProcessor.resolve(didUniqueSuffix); + const didDocument = await resolver.resolve(didUniqueSuffix) as DocumentModel; expect(didDocument).toBeDefined(); - const newKey = Document.getPublicKey(didDocument!, 'new-key'); + const newKey = Document.getPublicKey(didDocument, 'new-key'); expect(newKey).not.toBeDefined(); // if update above went through, new key would be added. }); it('should pick earlier of two conflicting updates', async () => { - await operationProcessor.process([createOp!]); + await operationStore.put([createOp!]); const update1Payload = { didUniqueSuffix, - previousOperationHash: createOp!.getOperationHash(), + previousOperationHash: createOp!.operationHash, patches: [ { action: 'add-public-keys', @@ -449,7 +450,7 @@ describe('OperationProcessor', async () => { const update2Payload = { didUniqueSuffix, - previousOperationHash: createOp!.getOperationHash(), + previousOperationHash: createOp!.operationHash, patches: [ { action: 'add-public-keys', @@ -466,29 +467,29 @@ describe('OperationProcessor', async () => { const updateOperation2Buffer = await OperationGenerator.generateUpdateOperationBuffer(update2Payload, '#key1', privateKey); const updateOperation2 = await addBatchFileOfOneOperationToCas(updateOperation2Buffer, cas, 2, 2, 0); - await operationProcessor.process([updateOperation2]); + await operationStore.put([updateOperation2]); const updateOperation1Buffer = await OperationGenerator.generateUpdateOperationBuffer(update1Payload, '#key1', privateKey); const updateOperation1 = await addBatchFileOfOneOperationToCas(updateOperation1Buffer, cas, 1, 1, 0); - await operationProcessor.process([updateOperation1]); + await operationStore.put([updateOperation1]); - const didDocument = await operationProcessor.resolve(didUniqueSuffix); + const didDocument = await resolver.resolve(didUniqueSuffix) as DocumentModel; expect(didDocument).toBeDefined(); - expect(didDocument!.publicKey.length).toEqual(3); - expect(didDocument!.publicKey[2].id).toEqual('#new-key1'); + expect(didDocument.publicKey.length).toEqual(3); + expect(didDocument.publicKey[2].id).toEqual('#new-key1'); }); it('should rollback all', async () => { const numberOfUpdates = 10; const ops = await createUpdateSequence(didUniqueSuffix, createOp!, cas, numberOfUpdates, privateKey); - await operationProcessor.process(ops); - const didDocument = await operationProcessor.resolve(didUniqueSuffix); + await operationStore.put(ops); + const didDocument = await resolver.resolve(didUniqueSuffix) as DocumentModel; validateDidDocumentAfterUpdates(didDocument, numberOfUpdates); // rollback - await operationProcessor.rollback(); - const didDocumentAfterRollback = await operationProcessor.resolve(didUniqueSuffix); + await operationStore.delete(); + const didDocumentAfterRollback = await resolver.resolve(didUniqueSuffix); expect(didDocumentAfterRollback).toBeUndefined(); }); }); diff --git a/tests/core/ProtocolParameters.spec.ts b/tests/core/ProtocolParameters.spec.ts deleted file mode 100644 index 3c60783d1..000000000 --- a/tests/core/ProtocolParameters.spec.ts +++ /dev/null @@ -1,14 +0,0 @@ -import ProtocolParameters from '../../lib/core/ProtocolParameters'; - -describe('ProtocolParameters', () => { - const versionsOfProtocolParameters = require('../json/protocol-parameters-test.json'); - ProtocolParameters.initialize(versionsOfProtocolParameters); - - it('should fetch right protocol given the logical blockchain time.', async () => { - const protocol01 = ProtocolParameters.get(1); - expect(protocol01.startingBlockchainTime).toBe(0); - - const protocol10 = ProtocolParameters.get(500000); - expect(protocol10.startingBlockchainTime).toBe(500000); - }); -}); diff --git a/tests/core/RequestHandler.spec.ts b/tests/core/RequestHandler.spec.ts index 0b475f6b8..5ab08a1d2 100644 --- a/tests/core/RequestHandler.spec.ts +++ b/tests/core/RequestHandler.spec.ts @@ -1,47 +1,46 @@ -import BatchFile from '../../lib/core/BatchFile'; -import BatchWriter from '../../lib/core/BatchWriter'; -import Cryptography from '../../lib/core/util/Cryptography'; -import Did from '../../lib/core/Did'; -import IConfig from '../../lib/core/interfaces/IConfig'; -import IDidPublicKey from '../../lib/core/interfaces/IDidPublicKey'; -import Encoder from '../../lib/core/Encoder'; +import AnchoredOperation from '../../lib/core/versions/latest/AnchoredOperation'; +import AnchoredOperationModel from '../../lib/core/models/AnchoredOperationModel'; +import BatchFile from '../../lib/core/versions/latest/BatchFile'; +import BatchScheduler from '../../lib/core/BatchScheduler'; +import BatchWriter from '../../lib/core/versions/latest/BatchWriter'; +import Cryptography from '../../lib/core/versions/latest/util/Cryptography'; +import Did from '../../lib/core/versions/latest/Did'; +import DidPublicKeyModel from '../../lib/core/versions/latest/models/DidPublicKeyModel'; +import DocumentModel from '../../lib/core/versions/latest/models/DocumentModel'; +import Config from '../../lib/core/models/Config'; +import Encoder from '../../lib/core/versions/latest/Encoder'; +import ICas from '../../lib/core/interfaces/ICas'; +import IOperationStore from '../../lib/core/interfaces/IOperationStore'; import MockBlockchain from '../mocks/MockBlockchain'; import MockCas from '../mocks/MockCas'; import MockOperationQueue from '../mocks/MockOperationQueue'; import MockOperationStore from '../mocks/MockOperationStore'; -import Multihash from '../../lib/core/Multihash'; +import Multihash from '../../lib/core/versions/latest/Multihash'; import OperationGenerator from '../generators/OperationGenerator'; -import OperationProcessor from '../../lib/core/OperationProcessor'; -import OperationStore from '../../lib/core/interfaces/OperationStore'; -import ProtocolParameters from '../../lib/core/ProtocolParameters'; -import RequestHandler from '../../lib/core/RequestHandler'; -import { Cas } from '../../lib/core/Cas'; -import { IDocument } from '../../lib/core/Document'; -import { Operation } from '../../lib/core/Operation'; +import Resolver from '../../lib/core/Resolver'; +import OperationProcessor from '../../lib/core/versions/latest/OperationProcessor'; +import RequestHandler from '../../lib/core/versions/latest/RequestHandler'; import { Response } from '../../lib/common/Response'; describe('RequestHandler', () => { - const versionsOfProtocolParameters = require('../json/protocol-parameters-test.json'); - ProtocolParameters.initialize(versionsOfProtocolParameters); - // Surpress console logging during dtesting so we get a compact test summary in console. console.info = () => { return; }; console.error = () => { return; }; - const config: IConfig = require('../json/config-test.json'); + const config: Config = require('../json/config-test.json'); const didMethodName = config.didMethodName; // Load the DID Document template. const didDocumentTemplate = require('../json/didDocumentTemplate.json'); const blockchain = new MockBlockchain(); - let cas: Cas; - let batchWriter: BatchWriter; - let operationStore: OperationStore; - let operationProcessor; + let cas: ICas; + let batchScheduler: BatchScheduler; + let operationStore: IOperationStore; + let resolver: Resolver; let requestHandler: RequestHandler; - let publicKey: IDidPublicKey; + let publicKey: DidPublicKeyModel; let privateKey: any; let did: string; // This DID is created at the beginning of every test. let didUniqueSuffix: string; @@ -49,13 +48,21 @@ describe('RequestHandler', () => { // Start a new instance of Operation Processor, and create a DID before every test. beforeEach(async () => { + const allSupportedHashAlgorithms = [18]; const operationQueue = new MockOperationQueue(); + cas = new MockCas(); - batchWriter = new BatchWriter(blockchain, cas, config.batchingIntervalInSeconds, operationQueue); - operationStore = new MockOperationStore(); - operationProcessor = new OperationProcessor(config.didMethodName, operationStore); + const batchWriter = new BatchWriter(operationQueue, blockchain, cas); - requestHandler = new RequestHandler(operationProcessor, blockchain, batchWriter, didMethodName); + operationStore = new MockOperationStore(); + resolver = new Resolver((_blockchainTime) => new OperationProcessor(config.didMethodName), operationStore); + batchScheduler = new BatchScheduler((_blockchainTime) => batchWriter, blockchain, config.batchingIntervalInSeconds); + requestHandler = new RequestHandler( + resolver, + operationQueue, + didMethodName, + allSupportedHashAlgorithms + ); // Set a latest time that must be able to resolve to a protocol version in the protocol config file used. const mockLatestTime = { @@ -69,42 +76,40 @@ describe('RequestHandler', () => { const createOperationBuffer = await OperationGenerator.generateCreateOperationBuffer(didDocumentTemplate, publicKey, privateKey); await requestHandler.handleOperationRequest(createOperationBuffer); - await batchWriter.writeOperationBatch(); + await batchScheduler.writeOperationBatch(); // Generate the batch file and batch file hash. const batchBuffer = BatchFile.fromOperationBuffers([createOperationBuffer]); batchFileHash = MockCas.getAddress(batchBuffer); // Now force Operation Processor to process the create operation. - const resolvedTransaction = { + const anchoredOperationModel: AnchoredOperationModel = { transactionNumber: 1, transactionTime: 1, - transactionTimeHash: 'NOT_NEEDED', - anchorFileHash: 'NOT_NEEDED', - batchFileHash + operationBuffer: createOperationBuffer, + operationIndex: 0 }; - const createOperation = Operation.createAnchoredOperation(createOperationBuffer, resolvedTransaction, 0); - await operationProcessor.process([createOperation]); + const createOperation = AnchoredOperation.createAnchoredOperation(anchoredOperationModel); + await operationStore.put([createOperation]); // NOTE: this is a repeated step already done in beforeEach(), // but the same step needed to be in beforeEach() for other tests such as update and delete. const response = await requestHandler.handleOperationRequest(createOperationBuffer); const httpStatus = Response.toHttpStatus(response.status); - const currentBlockchainTime = blockchain.approximateTime; - const currentHashingAlgorithm = ProtocolParameters.get(currentBlockchainTime.time).hashAlgorithmInMultihashCode; + const currentHashingAlgorithm = 18; didUniqueSuffix = Did.getUniqueSuffixFromEncodeDidDocument(createOperation.encodedPayload, currentHashingAlgorithm); did = didMethodName + didUniqueSuffix; expect(httpStatus).toEqual(200); expect(response).toBeDefined(); - expect((response.body as IDocument).id).toEqual(did); + expect((response.body as DocumentModel).id).toEqual(did); }); it('should handle create operation request.', async () => { const blockchainWriteSpy = spyOn(blockchain, 'write'); - await batchWriter.writeOperationBatch(); + await batchScheduler.writeOperationBatch(); expect(blockchainWriteSpy).toHaveBeenCalledTimes(1); // Verfiy that CAS was invoked to store the batch file. @@ -147,8 +152,7 @@ describe('RequestHandler', () => { publicKey: [publicKey] }; const encodedOriginalDidDocument = Encoder.encode(JSON.stringify(originalDidDocument)); - const currentBlockchainTime = blockchain.approximateTime; - const hashAlgorithmInMultihashCode = ProtocolParameters.get(currentBlockchainTime.time).hashAlgorithmInMultihashCode; + const hashAlgorithmInMultihashCode = 18; const documentHash = Multihash.hash(Buffer.from(encodedOriginalDidDocument), hashAlgorithmInMultihashCode); const expectedDid = didMethodName + Encoder.encode(documentHash); const response = await requestHandler.handleResolveRequest(didMethodName + encodedOriginalDidDocument); @@ -177,7 +181,7 @@ describe('RequestHandler', () => { it('should respond with HTTP 200 when DID is delete operation request is successful.', async () => { // write operation batch to prevent the violation of 1 operation per DID per batch rule. - await batchWriter.writeOperationBatch(); + await batchScheduler.writeOperationBatch(); const request = await OperationGenerator.generateDeleteOperationBuffer(didUniqueSuffix, '#key1', privateKey); const response = await requestHandler.handleOperationRequest(request); const httpStatus = Response.toHttpStatus(response.status); @@ -187,7 +191,7 @@ describe('RequestHandler', () => { it('should respond with HTTP 200 when an update operation rquest is successful.', async () => { // write operation batch to prevent the violation of 1 operation per DID per batch rule. - await batchWriter.writeOperationBatch(); + await batchScheduler.writeOperationBatch(); // Create a request that will delete the 2nd public key. const patches = [ diff --git a/tests/core/util/Document.spec.ts b/tests/core/util/Document.spec.ts index 7bb751ffc..7fb298a35 100644 --- a/tests/core/util/Document.spec.ts +++ b/tests/core/util/Document.spec.ts @@ -1,5 +1,5 @@ -import Document from '../../../lib/core/Document'; -import Encoder from '../../../lib/core/Encoder'; +import Document from '../../../lib/core/versions/latest/Document'; +import Encoder from '../../../lib/core/versions/latest/Encoder'; describe('Document', () => { diff --git a/tests/core/util/MerkleTree.spec.ts b/tests/core/util/MerkleTree.spec.ts index 3b26a67d5..57e9c9a5f 100644 --- a/tests/core/util/MerkleTree.spec.ts +++ b/tests/core/util/MerkleTree.spec.ts @@ -1,5 +1,5 @@ -import Cryptography from '../../../lib/core/util/Cryptography'; -import MerkleTree from '../../../lib/core/util/MerkleTree'; +import Cryptography from '../../../lib/core/versions/latest/util/Cryptography'; +import MerkleTree from '../../../lib/core/versions/latest/util/MerkleTree'; describe('MerkleTree', () => { diff --git a/tests/generators/OperationGenerator.ts b/tests/generators/OperationGenerator.ts index 2afbc4ec3..afead1203 100644 --- a/tests/generators/OperationGenerator.ts +++ b/tests/generators/OperationGenerator.ts @@ -1,6 +1,7 @@ -import IDidPublicKey from '../../lib/core/interfaces/IDidPublicKey'; -import Encoder from '../../lib/core/Encoder'; -import { IOperation, Operation } from '../../lib/core/Operation'; +import DidPublicKeyModel from '../../lib/core/versions/latest/models/DidPublicKeyModel'; +import Encoder from '../../lib/core/versions/latest/Encoder'; +import Operation from '../../lib/core/versions/latest/Operation'; +import OperationModel from '../../lib/core/versions/latest/models/OperationModel'; import { PrivateKey } from '@decentralized-identity/did-auth-jose'; /** @@ -13,7 +14,12 @@ export default class OperationGenerator { * Creates a Create Operation with valid signature. * @param didDocumentTemplate A DID Document used as the template. Must contain at least one public-key. */ - public static async generateCreateOperation (didDocumentTemplate: any, publicKey: IDidPublicKey, privateKey: string | PrivateKey): Promise { + public static async generateCreateOperation ( + didDocumentTemplate: any, + publicKey: DidPublicKeyModel, + privateKey: string | PrivateKey + ): Promise { + // Replace the placeholder public-key with the public-key given. didDocumentTemplate.publicKey[0] = publicKey; @@ -41,7 +47,7 @@ export default class OperationGenerator { * Creates a Create Operation buffer with valid signature. * @param didDocumentTemplate A DID Document used as the template. Must contain at least one public-key. */ - public static async generateCreateOperationBuffer (didDocumentTemplate: any, publicKey: IDidPublicKey, privateKey: string | PrivateKey): Promise { + public static async generateCreateOperationBuffer (didDocumentTemplate: any, publicKey: DidPublicKeyModel, privateKey: string | PrivateKey): Promise { const operation = await OperationGenerator.generateCreateOperation(didDocumentTemplate, publicKey, privateKey); return Buffer.from(JSON.stringify(operation)); } @@ -57,7 +63,7 @@ export default class OperationGenerator { /** * Generates an Update Operation buffer with valid signature. */ - public static async generateUpdateOperation (updatePayload: object, keyId: string, privateKey: string | PrivateKey): Promise { + public static async generateUpdateOperation (updatePayload: object, keyId: string, privateKey: string | PrivateKey): Promise { // Encode Update payload. const updatePayloadJson = JSON.stringify(updatePayload); const updatePayloadEncoded = Encoder.encode(updatePayloadJson); @@ -89,7 +95,7 @@ export default class OperationGenerator { /** * Generates a Delete Operation. */ - public static async generateDeleteOperation (didUniqueSuffix: string, keyId: string, privateKey: string | PrivateKey): Promise { + public static async generateDeleteOperation (didUniqueSuffix: string, keyId: string, privateKey: string | PrivateKey): Promise { const payload = { didUniqueSuffix }; // Encode payload. diff --git a/tests/generators/VegetaLoadGenerator.ts b/tests/generators/VegetaLoadGenerator.ts index b41ec95b2..feeccd8c7 100644 --- a/tests/generators/VegetaLoadGenerator.ts +++ b/tests/generators/VegetaLoadGenerator.ts @@ -1,8 +1,7 @@ import * as fs from 'fs'; -import Cryptography from '../../lib/core/util/Cryptography'; -import Did from '../../lib/core/Did'; +import Cryptography from '../../lib/core/versions/latest/util/Cryptography'; +import Did from '../../lib/core/versions/latest/Did'; import OperationGenerator from './OperationGenerator'; -import ProtocolParameters from '../../lib/core/ProtocolParameters'; /** * Class for generating files used for load testing using Vegeta. @@ -17,12 +16,9 @@ export default class VegetaLoadGenerator { * @param uniqueDidCount The number of unique DID to be generated. * @param endpointUrl The URL that the requests will be sent to. * @param absoluteFolderPath The folder that all the generated files will be saved to. - * @param blockchainTime The simulated blockchain time used to calculate the DID unique suffix. + * @param hashAlgorithmInMultihashCode The hash algorithm in Multihash code in DEC (not in HEX). */ - public static async generateLoadFiles (uniqueDidCount: number, endpointUrl: string, absoluteFolderPath: string, blockchainTime: number) { - const versionsOfProtocolParameters = require('../json/protocol-parameters-test.json'); - ProtocolParameters.initialize(versionsOfProtocolParameters); - + public static async generateLoadFiles (uniqueDidCount: number, endpointUrl: string, absoluteFolderPath: string, hashAlgorithmInMultihashCode: number) { const didDocumentTemplate = require('../json/didDocumentTemplate.json'); const keyId = '#key1'; @@ -43,7 +39,7 @@ export default class VegetaLoadGenerator { fs.writeFileSync(absoluteFolderPath + `/requests/create${i}.json`, createOperationBuffer); // Compute the DID unique suffix from the generated Create payload. - const didUniqueSuffix = Did.getUniqueSuffixFromEncodeDidDocument(createPayload, ProtocolParameters.get(blockchainTime).hashAlgorithmInMultihashCode); + const didUniqueSuffix = Did.getUniqueSuffixFromEncodeDidDocument(createPayload, hashAlgorithmInMultihashCode); // Generate an Update payload. const updatePayload = { diff --git a/tests/json/core-protocol-versioning-test.json b/tests/json/core-protocol-versioning-test.json new file mode 100644 index 000000000..ef3b9a68f --- /dev/null +++ b/tests/json/core-protocol-versioning-test.json @@ -0,0 +1,6 @@ +[ + { + "startingBlockchainTime": 0, + "version": "latest" + } +] \ No newline at end of file diff --git a/tests/mocks/MockBlockchain.ts b/tests/mocks/MockBlockchain.ts index bd5a78045..32982bb5d 100644 --- a/tests/mocks/MockBlockchain.ts +++ b/tests/mocks/MockBlockchain.ts @@ -1,11 +1,11 @@ -import IBlockchainTime from '../../lib/core/interfaces/IBlockchainTime'; -import ITransaction from '../../lib/common/ITransaction'; -import { Blockchain } from '../../lib/core/Blockchain'; +import BlockchainTimeModel from '../../lib/core/models/BlockchainTimeModel'; +import IBlockchain from '../../lib/core/interfaces/IBlockchain'; +import TransactionModel from '../../lib/common/models/TransactionModel'; /** * Mock Blockchain class for testing. */ -export default class MockBlockchain implements Blockchain { +export default class MockBlockchain implements IBlockchain { /** Stores each hash given in write() method. */ hashes: string[] = []; @@ -13,7 +13,7 @@ export default class MockBlockchain implements Blockchain { this.hashes.push(anchorFileHash); } - public async read (sinceTransactionNumber?: number, _transactionTimeHash?: string): Promise<{ moreTransactions: boolean, transactions: ITransaction[] }> { + public async read (sinceTransactionNumber?: number, _transactionTimeHash?: string): Promise<{ moreTransactions: boolean, transactions: TransactionModel[] }> { if (sinceTransactionNumber === undefined) { sinceTransactionNumber = -1; } @@ -24,7 +24,7 @@ export default class MockBlockchain implements Blockchain { moreTransactions = true; } - const transactions: ITransaction[] = []; + const transactions: TransactionModel[] = []; if (this.hashes.length > 0 && sinceTransactionNumber < this.hashes.length - 1) { const hashIndex = sinceTransactionNumber + 1; @@ -43,19 +43,19 @@ export default class MockBlockchain implements Blockchain { }; } - public async getFirstValidTransaction (_transactions: ITransaction[]): Promise { + public async getFirstValidTransaction (_transactions: TransactionModel[]): Promise { return undefined; } - private latestTime?: IBlockchainTime = { time: 500000, hash: 'dummyHash' }; + private latestTime?: BlockchainTimeModel = { time: 500000, hash: 'dummyHash' }; - public get approximateTime (): IBlockchainTime { + public get approximateTime (): BlockchainTimeModel { return this.latestTime!; } /** * Hardcodes the latest time to be returned. */ - public setLatestTime (time: IBlockchainTime) { + public setLatestTime (time: BlockchainTimeModel) { this.latestTime = time; } } diff --git a/tests/mocks/MockCas.ts b/tests/mocks/MockCas.ts index 25fa63eb7..b6fde14c3 100644 --- a/tests/mocks/MockCas.ts +++ b/tests/mocks/MockCas.ts @@ -1,14 +1,14 @@ -import Encoder from '../../lib/core/Encoder'; -import IFetchResult from '../../lib/common/IFetchResult'; -import Multihash from '../../lib/core/Multihash'; -import { Cas } from '../../lib/core/Cas'; +import Encoder from '../../lib/core/versions/latest/Encoder'; +import FetchResult from '../../lib/common/models/FetchResult'; +import ICas from '../../lib/core/interfaces/ICas'; +import Multihash from '../../lib/core/versions/latest/Multihash'; import { FetchResultCode } from '../../lib/common/FetchResultCode'; /** * Implementation of a CAS class for testing. * Simply using a hash map to store all the content by hash. */ -export default class MockCas implements Cas { +export default class MockCas implements ICas { /** A Map that stores the given content. */ private storage: Map = new Map(); @@ -37,7 +37,7 @@ export default class MockCas implements Cas { return encodedHash; } - public async read (address: string, _maxSizeInBytes: number): Promise { + public async read (address: string, _maxSizeInBytes: number): Promise { // Wait for configured time before returning. await new Promise(resolve => setTimeout(resolve, this.mockSecondsTakenForEachCasFetch * 1000)); diff --git a/tests/mocks/MockOperationQueue.ts b/tests/mocks/MockOperationQueue.ts index c7bf98f93..287e047c4 100644 --- a/tests/mocks/MockOperationQueue.ts +++ b/tests/mocks/MockOperationQueue.ts @@ -1,9 +1,9 @@ -import OperationQueue from '../../lib/core/interfaces/OperationQueue'; +import IOperationQueue from '../../lib/core/versions/latest/interfaces/IOperationQueue'; /** * A mock in-memory operation queue used by the Batch Writer. */ -export default class MockOperationQueue implements OperationQueue { +export default class MockOperationQueue implements IOperationQueue { private latestTimestamp = 0; private operations: Map = new Map(); diff --git a/tests/mocks/MockOperationStore.ts b/tests/mocks/MockOperationStore.ts index f5dc962ba..0bcc0feed 100644 --- a/tests/mocks/MockOperationStore.ts +++ b/tests/mocks/MockOperationStore.ts @@ -1,19 +1,20 @@ -import OperationStore from '../../lib/core/interfaces/OperationStore'; -import { Operation } from '../../lib/core/Operation'; +import AnchoredOperationModel from '../../lib/core/models/AnchoredOperationModel'; +import IOperationStore from '../../lib/core/interfaces/IOperationStore'; +import NamedAnchoredOperationModel from '../../lib/core/models/NamedAnchoredOperationModel'; /** * Compare two operations returning -1, 0, 1 when the first operand * is less than, equal, and greater than the second, respectively. * Used to sort operations by blockchain 'time' order. */ -function compareOperation (op1: Operation, op2: Operation): number { - if (op1.transactionNumber! < op2.transactionNumber!) { +function compareOperation (op1: AnchoredOperationModel, op2: AnchoredOperationModel): number { + if (op1.transactionNumber < op2.transactionNumber) { return -1; - } else if (op1.transactionNumber! > op2.transactionNumber!) { + } else if (op1.transactionNumber > op2.transactionNumber) { return 1; - } else if (op1.operationIndex! < op2.operationIndex!) { + } else if (op1.operationIndex < op2.operationIndex) { return -1; - } else if (op1.operationIndex! > op2.operationIndex!) { + } else if (op1.operationIndex > op2.operationIndex) { return 1; } @@ -23,34 +24,30 @@ function compareOperation (op1: Operation, op2: Operation): number { /** * A simple in-memory implementation of operation store. */ -export default class MockOperationStore implements OperationStore { +export default class MockOperationStore implements IOperationStore { // Map DID unique suffixes to operations over it stored as an array. The array might be sorted // or unsorted by blockchain time order. - private readonly didToOperations: Map> = new Map(); + private readonly didToOperations: Map = new Map(); // Map DID unique suffixes to a boolean indicating if the operations array for the DID is sorted // or not. private readonly didUpdatedSinceLastSort: Map = new Map(); - private readonly emptyOperationsArray: Array = new Array(); - /** * Inserts an operation into the in-memory store. */ - private async insert (operation: Operation): Promise { - const didUniqueSuffix = operation.didUniqueSuffix; - - this.ensureDidEntriesExist(didUniqueSuffix); + private async insert (operation: NamedAnchoredOperationModel): Promise { + this.ensureDidContainerExist(operation.didUniqueSuffix); // Append the operation to the operation array for the did ... - this.didToOperations.get(didUniqueSuffix)!.push(operation); + this.didToOperations.get(operation.didUniqueSuffix)!.push(operation); // ... which leaves the array unsorted, so we record this fact - this.didUpdatedSinceLastSort.set(didUniqueSuffix, true); + this.didUpdatedSinceLastSort.set(operation.didUniqueSuffix, true); } /** * Implements OperationStore.put() */ - public async put (operations: Array): Promise { + public async put (operations: NamedAnchoredOperationModel[]): Promise { for (const operation of operations) { await this.insert(operation); } @@ -61,11 +58,11 @@ export default class MockOperationStore implements OperationStore { * Get an iterator that returns all operations with a given * didUniqueSuffix ordered by (transactionNumber, operationIndex). */ - public async get (didUniqueSuffix: string): Promise> { + public async get (didUniqueSuffix: string): Promise { let didOps = this.didToOperations.get(didUniqueSuffix); if (!didOps) { - return this.emptyOperationsArray; + return []; } const updatedSinceLastSort = this.didUpdatedSinceLastSort.get(didUniqueSuffix)!; @@ -104,11 +101,11 @@ export default class MockOperationStore implements OperationStore { * Remove operations. A simple linear scan + filter that leaves the * original order intact for non-filters operations. */ - private static removeOperations (operations: Array, transactionNumber: number) { + private static removeOperations (operations: AnchoredOperationModel[], transactionNumber: number) { let writeIndex = 0; for (let i = 0 ; i < operations.length ; i++) { - if (operations[i].transactionNumber! <= transactionNumber) { + if (operations[i].transactionNumber <= transactionNumber) { operations[writeIndex++] = operations[i]; } } @@ -118,9 +115,9 @@ export default class MockOperationStore implements OperationStore { } } - private ensureDidEntriesExist (did: string) { + private ensureDidContainerExist (did: string) { if (this.didToOperations.get(did) === undefined) { - this.didToOperations.set(did, new Array()); + this.didToOperations.set(did, new Array()); this.didUpdatedSinceLastSort.set(did, false); } } diff --git a/tests/mocks/MockTransactionStore.ts b/tests/mocks/MockTransactionStore.ts index 7a7fa85c0..5c5f9d790 100644 --- a/tests/mocks/MockTransactionStore.ts +++ b/tests/mocks/MockTransactionStore.ts @@ -1,10 +1,10 @@ -import ITransaction from '../../lib/common/ITransaction'; -import SortedArray from '../../lib/core/util/SortedArray'; -import TransactionStore from '../../lib/core/interfaces/TransactionStore'; -import UnresolvableTransactionStore from '../../lib/core/interfaces/UnresolvableTransactionStore'; +import ITransactionStore from '../../lib/core/interfaces/ITransactionStore'; +import IUnresolvableTransactionStore from '../../lib/core/interfaces/IUnresolvableTransactionStore'; +import SortedArray from '../../lib/core/versions/latest/util/SortedArray'; +import TransactionModel from '../../lib/common/models/TransactionModel'; interface IUnresolvableTransactionInternal { - transaction: ITransaction; + transaction: TransactionModel; firstFetchTime: number; retryAttempts: number; nextRetryTime: number; @@ -13,11 +13,11 @@ interface IUnresolvableTransactionInternal { /** * In-memory implementation of the `TransactionStore`. */ -export class MockTransactionStore implements TransactionStore, UnresolvableTransactionStore { - private processedTransactions: ITransaction[] = []; +export class MockTransactionStore implements ITransactionStore, IUnresolvableTransactionStore { + private processedTransactions: TransactionModel[] = []; private unresolvableTransactions: Map = new Map(); - async addTransaction (transaction: ITransaction): Promise { + async addTransaction (transaction: TransactionModel): Promise { const lastTransaction = await this.getLastTransaction(); // If the last transaction is later or equal to the transaction to add, @@ -29,7 +29,7 @@ export class MockTransactionStore implements TransactionStore, UnresolvableTrans this.processedTransactions.push(transaction); } - async getLastTransaction (): Promise { + async getLastTransaction (): Promise { if (this.processedTransactions.length === 0) { return undefined; } @@ -39,8 +39,8 @@ export class MockTransactionStore implements TransactionStore, UnresolvableTrans return lastProcessedTransaction; } - async getExponentiallySpacedTransactions (): Promise { - const exponentiallySpacedTransactions: ITransaction[] = []; + async getExponentiallySpacedTransactions (): Promise { + const exponentiallySpacedTransactions: TransactionModel[] = []; let index = this.processedTransactions.length - 1; let distance = 1; while (index >= 0) { @@ -51,15 +51,15 @@ export class MockTransactionStore implements TransactionStore, UnresolvableTrans return exponentiallySpacedTransactions; } - public async getTransaction (_transactionNumber: number): Promise { + public async getTransaction (_transactionNumber: number): Promise { throw new Error('Not implemented.'); } - public async getTransactionsLaterThan (_transactionNumber: number | undefined, _max: number): Promise { + public async getTransactionsLaterThan (_transactionNumber: number | undefined, _max: number): Promise { throw new Error('Not implemented.'); } - async recordUnresolvableTransactionFetchAttempt (transaction: ITransaction): Promise { + async recordUnresolvableTransactionFetchAttempt (transaction: TransactionModel): Promise { const unresolvableTransaction = this.unresolvableTransactions.get(transaction.transactionNumber); if (unresolvableTransaction === undefined) { @@ -83,11 +83,11 @@ export class MockTransactionStore implements TransactionStore, UnresolvableTrans } } - async removeUnresolvableTransaction (transaction: ITransaction): Promise { + async removeUnresolvableTransaction (transaction: TransactionModel): Promise { this.unresolvableTransactions.delete(transaction.transactionNumber); } - async getUnresolvableTransactionsDueForRetry (): Promise { + async getUnresolvableTransactionsDueForRetry (): Promise { const now = Date.now(); const unresolvableTransactionsToRetry = []; @@ -111,7 +111,7 @@ export class MockTransactionStore implements TransactionStore, UnresolvableTrans // Locate the index of the given transaction using binary search. const compareTransactionAndTransactionNumber - = (transaction: ITransaction, transactionNumber: number) => { return transaction.transactionNumber - transactionNumber; }; + = (transaction: TransactionModel, transactionNumber: number) => { return transaction.transactionNumber - transactionNumber; }; const bestKnownValidRecentProcessedTransactionIndex = SortedArray.binarySearch(this.processedTransactions, transactionNumber, compareTransactionAndTransactionNumber); @@ -149,7 +149,7 @@ export class MockTransactionStore implements TransactionStore, UnresolvableTrans * Gets the list of transactions. * Mainly used for test purposes. */ - public getTransactions (): ITransaction[] { + public getTransactions (): TransactionModel[] { return this.processedTransactions; } } diff --git a/tests/src/bitcoin.spec.ts b/tests/src/bitcoin.spec.ts index 400696e50..f650c51f5 100644 --- a/tests/src/bitcoin.spec.ts +++ b/tests/src/bitcoin.spec.ts @@ -3,7 +3,7 @@ process.env.SIDETREE_TEST_MODE = 'true'; process.env.SIDETREE_BITCOIN_CONFIG_FILE_PATH = '../tests/json/bitcoin-config-test.json'; import * as supertest from 'supertest'; -import ErrorCode from '../../lib/common/ErrorCode'; +import ErrorCode from '../../lib/common/SharedErrorCode'; import RequestError from '../../lib/bitcoin/RequestError'; import { blockchainService, server } from '../../src/bitcoin'; import { ResponseStatus } from '../../lib/common/Response';