diff --git a/package.json b/package.json index 36d5e15e20..025df5ae74 100644 --- a/package.json +++ b/package.json @@ -31,7 +31,7 @@ "ts-loader": "^9.2.6", "ts-node": "^10.4.0", "tsconfig-paths": "^3.12.0", - "typescript": "^4.4.4" + "typescript": "^4.9.5" }, "resolutions": { "@polkadot/api": "10.1.4", diff --git a/packages/common/fixtures/package.json b/packages/common/fixtures/package.json index 7e66c73991..710e0e6ab0 100644 --- a/packages/common/fixtures/package.json +++ b/packages/common/fixtures/package.json @@ -21,7 +21,7 @@ "devDependencies": { "@polkadot/api": "^10", "@subql/types": "latest", - "typescript": "^4.1.3", + "typescript": "^4.9.5", "@subql/cli": "latest" } } diff --git a/packages/node-core/package.json b/packages/node-core/package.json index 0c8aa33c5f..a378748691 100644 --- a/packages/node-core/package.json +++ b/packages/node-core/package.json @@ -27,7 +27,7 @@ "@subql/types": "workspace:*", "@subql/utils": "workspace:*", "@subql/x-merkle-mountain-range": "^2.0.0-0.1.2", - "@willsoto/nestjs-prometheus": "^4.4.0", + "@willsoto/nestjs-prometheus": "^5.1.1", "async-lock": "^1.4.0", "async-mutex": "^0.4.0", "lodash": "^4.17.21", diff --git a/packages/node-core/src/api.service.ts b/packages/node-core/src/api.service.ts index f37962ee1d..9215d63a4d 100644 --- a/packages/node-core/src/api.service.ts +++ b/packages/node-core/src/api.service.ts @@ -2,8 +2,7 @@ // SPDX-License-Identifier: Apache-2.0 import {Injectable} from '@nestjs/common'; -// import {ApiWrapper} from '@subql/types-avalanche'; -import {NetworkMetadataPayload} from './events'; +import {ISubqueryProject} from './indexer'; import {getLogger} from './logger'; const logger = getLogger('api'); @@ -14,14 +13,11 @@ type FetchFunction = (batch: number[]) => Promise; type FetchFunctionProvider = () => FetchFunction; @Injectable() -export abstract class ApiService { - networkMeta: NetworkMetadataPayload; +export abstract class ApiService

{ + constructor(protected project: P) {} - constructor(protected project: any) {} - - abstract init(): Promise; - abstract get api(): any; /*ApiWrapper*/ - abstract fetchBlocks(batch: number[]): Promise; + abstract init(): Promise>; + abstract get api(): A; /*ApiWrapper*/ async fetchBlocksGeneric( fetchFuncProvider: FetchFunctionProvider, @@ -35,7 +31,7 @@ export abstract class ApiService { // Get the latest fetch function from the provider const fetchFunc = fetchFuncProvider(); return await fetchFunc(batch); - } catch (e) { + } catch (e: any) { logger.error(e, `Failed to fetch blocks ${batch[0]}...${batch[batch.length - 1]}`); reconnectAttempts++; diff --git a/packages/node-core/src/configure/NodeConfig.ts b/packages/node-core/src/configure/NodeConfig.ts index 40307ff9f6..a956bc6ce2 100644 --- a/packages/node-core/src/configure/NodeConfig.ts +++ b/packages/node-core/src/configure/NodeConfig.ts @@ -26,7 +26,7 @@ export interface IConfig { readonly networkDictionary?: string; readonly dictionaryResolver?: string; readonly outputFmt?: 'json'; - readonly logLevel?: LevelWithSilent; + readonly logLevel: LevelWithSilent; readonly queryLimit: number; readonly indexCountLimit: number; readonly timestampField: boolean; @@ -45,15 +45,16 @@ export interface IConfig { readonly pgCa?: string; readonly pgKey?: string; readonly pgCert?: string; - readonly storeCacheThreshold?: number; - readonly storeGetCacheSize?: number; - readonly storeCacheAsync?: boolean; + readonly storeCacheThreshold: number; + readonly storeGetCacheSize: number; + readonly storeCacheAsync: boolean; } export type MinConfig = Partial> & Pick; const DEFAULT_CONFIG = { localMode: false, + logLevel: 'info', batchSize: 100, timeout: 900, blockTime: 6000, @@ -104,7 +105,11 @@ export class NodeConfig implements IConfig { get subqueryName(): string { assert(this._config.subquery); - return this._config.subqueryName ?? last(this.subquery.split(path.sep)); + const name = this._config.subqueryName ?? last(this.subquery.split(path.sep)); + if (!name) { + throw new Error('Unable to get subquery name'); + } + return name; } get localMode(): boolean { @@ -134,10 +139,10 @@ export class NodeConfig implements IConfig { } get storeCacheAsync(): boolean { - return this._config.storeCacheAsync; + return !!this._config.storeCacheAsync; } - get dictionaryResolver(): string { + get dictionaryResolver(): string | undefined { return this._config.dictionaryResolver; } @@ -188,7 +193,7 @@ export class NodeConfig implements IConfig { get mmrPath(): string { return this._config.mmrPath ?? `.mmr/${this.subqueryName}.mmr`; } - get ipfs(): string { + get ipfs(): string | undefined { return this._config.ipfs; } @@ -196,16 +201,16 @@ export class NodeConfig implements IConfig { return this._config.dbSchema ?? this.subqueryName; } - get workers(): number { + get workers(): number | undefined { return this._config.workers; } get profiler(): boolean { - return this._config.profiler; + return !!this._config.profiler; } get unsafe(): boolean { - return this._config.unsafe; + return !!this._config.unsafe; } get subscription(): boolean { @@ -221,7 +226,7 @@ export class NodeConfig implements IConfig { } get unfinalizedBlocks(): boolean { - return this._config.unfinalizedBlocks; + return !!this._config.unfinalizedBlocks; } get isPostgresSecureConnection(): boolean { @@ -234,8 +239,8 @@ export class NodeConfig implements IConfig { } try { return getFileContent(this._config.pgCa, 'postgres ca cert'); - } catch (e) { - logger.error(e); + } catch (e: any) { + logger.error(e, 'Unable to get postges CA Cert'); throw e; } } @@ -247,8 +252,8 @@ export class NodeConfig implements IConfig { try { return getFileContent(this._config.pgKey, 'postgres client key'); - } catch (e) { - logger.error(e); + } catch (e: any) { + logger.error(e, 'Unable to get postgres client key'); throw e; } } @@ -259,8 +264,8 @@ export class NodeConfig implements IConfig { } try { return getFileContent(this._config.pgCert, 'postgres client cert'); - } catch (e) { - logger.error(e); + } catch (e: any) { + logger.error(e, 'Unable to get postgres client cert'); throw e; } } diff --git a/packages/node-core/src/db/db.module.ts b/packages/node-core/src/db/db.module.ts index 4c580f5929..3601b17b49 100644 --- a/packages/node-core/src/db/db.module.ts +++ b/packages/node-core/src/db/db.module.ts @@ -35,7 +35,7 @@ async function establishConnectionSequelize(option: SequelizeOption, numRetries: const sequelize = new Sequelize(uri, option); try { await sequelize.authenticate(); - } catch (error) { + } catch (error: any) { if (JSON.stringify(error.message).includes(CONNECTION_SSL_ERROR_REGEX)) { logger.warn('Database does not support SSL connection, will try to connect without it'); option.dialectOptions = undefined; diff --git a/packages/node-core/src/indexer/PoiBlock.ts b/packages/node-core/src/indexer/PoiBlock.ts index 9fe1684a92..2facb7a869 100644 --- a/packages/node-core/src/indexer/PoiBlock.ts +++ b/packages/node-core/src/indexer/PoiBlock.ts @@ -1,29 +1,21 @@ // Copyright 2020-2022 OnFinality Limited authors & contributors // SPDX-License-Identifier: Apache-2.0 -import { u8aConcat, numberToU8a, hexToU8a, isHex, isU8a } from '@polkadot/util'; -import { blake2AsU8a } from '@polkadot/util-crypto'; -import { ProofOfIndex } from './entities/Poi.entity'; +import {u8aConcat, numberToU8a, hexToU8a, isHex, isU8a} from '@polkadot/util'; +import {blake2AsU8a} from '@polkadot/util-crypto'; +import {ProofOfIndex} from './entities/Poi.entity'; const poiBlockHash = ( id: number, chainBlockHash: string | Uint8Array, operationHashRoot: Uint8Array, parentHash: Uint8Array, - projectId: string, + projectId: string ): Uint8Array => { if (!id || !chainBlockHash || !operationHashRoot || !projectId) { throw new Error('Poof of index: can not generate block hash'); } - return blake2AsU8a( - u8aConcat( - numberToU8a(id), - chainBlockHash, - operationHashRoot, - Buffer.from(projectId), - parentHash, - ), - ); + return blake2AsU8a(u8aConcat(numberToU8a(id), chainBlockHash, operationHashRoot, Buffer.from(projectId), parentHash)); }; export class PoiBlock implements ProofOfIndex { @@ -32,7 +24,6 @@ export class PoiBlock implements ProofOfIndex { readonly hash: Uint8Array; readonly parentHash: Uint8Array; readonly operationHashRoot: Uint8Array; - mmrRoot: Uint8Array; readonly projectId: string; constructor( @@ -41,7 +32,7 @@ export class PoiBlock implements ProofOfIndex { hash: Uint8Array, parentHash: Uint8Array, operationHashRoot: Uint8Array, - projectId: string, + projectId: string ) { this.id = id; this.chainBlockHash = chainBlockHash; @@ -56,29 +47,18 @@ export class PoiBlock implements ProofOfIndex { chainBlockHash: string | Uint8Array, operationHashRoot: Uint8Array, parentHash: Uint8Array, - projectId: string, + projectId: string ): PoiBlock { - const _poiBlockHash = poiBlockHash( - id, - chainBlockHash, - operationHashRoot, - parentHash, - projectId, - ); - let _chainBlockHash: Uint8Array; + const _poiBlockHash = poiBlockHash(id, chainBlockHash, operationHashRoot, parentHash, projectId); + let _chainBlockHash: Uint8Array | undefined; if (isHex(chainBlockHash)) { _chainBlockHash = hexToU8a(chainBlockHash); } else if (isU8a(chainBlockHash)) { _chainBlockHash = chainBlockHash; + } else { + throw new Error(`Unable to create PoiBlock, chainBlockHash was not valid. Received: "${chainBlockHash}"`); } - const poiBlock = new PoiBlock( - id, - _chainBlockHash, - _poiBlockHash, - parentHash, - operationHashRoot, - projectId, - ); + const poiBlock = new PoiBlock(id, _chainBlockHash, _poiBlockHash, parentHash, operationHashRoot, projectId); return poiBlock; } } diff --git a/packages/node-core/src/indexer/benchmark.service.ts b/packages/node-core/src/indexer/benchmark.service.ts index 85627820f3..18ce4131fc 100644 --- a/packages/node-core/src/indexer/benchmark.service.ts +++ b/packages/node-core/src/indexer/benchmark.service.ts @@ -17,15 +17,15 @@ dayjs.extend(duration); @Injectable() export class BenchmarkService { - private currentProcessingHeight: number; - private currentProcessingTimestamp: number; - private targetHeight: number; - private lastRegisteredHeight: number; - private lastRegisteredTimestamp: number; - private blockPerSecond: number; + private currentProcessingHeight?: number; + private currentProcessingTimestamp?: number; + private targetHeight?: number; + private lastRegisteredHeight?: number; + private lastRegisteredTimestamp?: number; + private blockPerSecond?: number; - private currentProcessedBlockAmount: number; - private lastProcessedBlockAmount: number; + private currentProcessedBlockAmount?: number; + private lastProcessedBlockAmount?: number; constructor(private nodeConfig: NodeConfig) {} @@ -40,37 +40,41 @@ export class BenchmarkService { const timeDiff = this.currentProcessingTimestamp - this.lastRegisteredTimestamp; this.blockPerSecond = heightDiff === 0 || timeDiff === 0 ? 0 : heightDiff / (timeDiff / 1000); - const blockDuration = dayjs.duration( - (this.targetHeight - this.currentProcessingHeight) / this.blockPerSecond, - 'seconds' - ); - const hoursMinsStr = blockDuration.format('HH [hours] mm [mins]'); - const days = Math.floor(blockDuration.asDays()); - const durationStr = `${days} days ${hoursMinsStr}`; + if (!this.targetHeight) { + logger.debug('Target height is not defined, not logging benchmark'); + } else { + const blockDuration = dayjs.duration( + (this.targetHeight - this.currentProcessingHeight) / this.blockPerSecond, + 'seconds' + ); + const hoursMinsStr = blockDuration.format('HH [hours] mm [mins]'); + const days = Math.floor(blockDuration.asDays()); + const durationStr = `${days} days ${hoursMinsStr}`; + + if (this.nodeConfig.profiler && this.currentProcessedBlockAmount && this.lastProcessedBlockAmount) { + logger.info( + `Processed ${ + this.currentProcessedBlockAmount - this.lastProcessedBlockAmount + } blocks in the last ${SAMPLING_TIME_VARIANCE}secs ` + ); + } - if (this.nodeConfig.profiler) { logger.info( - `Processed ${ - this.currentProcessedBlockAmount - this.lastProcessedBlockAmount - } blocks in the last ${SAMPLING_TIME_VARIANCE}secs ` + this.targetHeight === this.lastRegisteredHeight && this.blockPerSecond === 0 + ? 'Fully synced, waiting for new blocks' + : `${this.blockPerSecond.toFixed( + 2 + )} blocks/s. Target height: ${this.targetHeight.toLocaleString()}. Current height: ${this.currentProcessingHeight.toLocaleString()}. Estimated time remaining: ${ + this.blockPerSecond === 0 ? 'unknown' : durationStr + }` ); } - - logger.info( - this.targetHeight === this.lastRegisteredHeight && this.blockPerSecond === 0 - ? 'Fully synced, waiting for new blocks' - : `${this.blockPerSecond.toFixed( - 2 - )} blocks/s. Target height: ${this.targetHeight.toLocaleString()}. Current height: ${this.currentProcessingHeight.toLocaleString()}. Estimated time remaining: ${ - this.blockPerSecond === 0 ? 'unknown' : durationStr - }` - ); } this.lastRegisteredHeight = this.currentProcessingHeight; this.lastRegisteredTimestamp = this.currentProcessingTimestamp; this.lastProcessedBlockAmount = this.currentProcessedBlockAmount; } - } catch (e) { + } catch (e: any) { logger.warn(e, 'Failed to measure benchmark'); } } diff --git a/packages/node-core/src/indexer/blockDispatcher/base-block-dispatcher.ts b/packages/node-core/src/indexer/blockDispatcher/base-block-dispatcher.ts index 92ab8fad40..198a3029b8 100644 --- a/packages/node-core/src/indexer/blockDispatcher/base-block-dispatcher.ts +++ b/packages/node-core/src/indexer/blockDispatcher/base-block-dispatcher.ts @@ -20,6 +20,7 @@ import {NodeConfig} from '../../configure'; import {IndexerEvent} from '../../events'; import {getLogger} from '../../logger'; import {IQueue} from '../../utils'; +import {CachePoiModel} from '../storeCache/cachePoi'; const logger = getLogger('BaseBlockDispatcherService'); @@ -50,11 +51,11 @@ function isNullMerkelRoot(operationHash: Uint8Array): boolean { } export abstract class BaseBlockDispatcher implements IBlockDispatcher { - protected _latestBufferedHeight: number; - protected _processedBlockCount: number; - protected latestProcessedHeight: number; - protected currentProcessingHeight: number; - protected onDynamicDsCreated: (height: number) => Promise; + protected _latestBufferedHeight = 0; + protected _processedBlockCount = 0; + protected latestProcessedHeight = 0; + protected currentProcessingHeight = 0; + private _onDynamicDsCreated?: (height: number) => Promise; constructor( protected nodeConfig: NodeConfig, @@ -72,8 +73,9 @@ export abstract class BaseBlockDispatcher implements IBloc abstract enqueueBlocks(heights: number[], latestBufferHeight?: number): Promise; async init(onDynamicDsCreated: (height: number) => Promise): Promise { - this.onDynamicDsCreated = onDynamicDsCreated; - this.setProcessedBlockCount(await this.storeCacheService.metadata.find('processedBlockCount', 0)); + this._onDynamicDsCreated = onDynamicDsCreated; + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + this.setProcessedBlockCount((await this.storeCacheService.metadata.find('processedBlockCount', 0))!); } get queueSize(): number { @@ -81,7 +83,7 @@ export abstract class BaseBlockDispatcher implements IBloc } get freeSize(): number { - return this.queue.freeSpace; + return this.queue.freeSpace!; } get smartBatchSize(): number { @@ -92,6 +94,13 @@ export abstract class BaseBlockDispatcher implements IBloc return this.smartBatchService.minimumHeapRequired; } + protected get onDynamicDsCreated(): (height: number) => Promise { + if (!this._onDynamicDsCreated) { + throw new Error('BaseBlockDispatcher has not been initialized'); + } + return this._onDynamicDsCreated; + } + get latestBufferedHeight(): number { return this._latestBufferedHeight; } @@ -132,7 +141,6 @@ export abstract class BaseBlockDispatcher implements IBloc // Is called directly before a block is processed protected preProcessBlock(height: number): void { - this.storeService.setOperationStack(); this.storeService.setBlockHeight(height); this.currentProcessingHeight = height; @@ -201,7 +209,7 @@ export abstract class BaseBlockDispatcher implements IBloc await this.poiService.getLatestPoiBlockHash(), this.project.id ); - this.storeCacheService.poi.set(poiBlock); + this.poi.set(poiBlock); this.poiService.setLatestPoiBlockHash(poiBlock.hash); this.storeCacheService.metadata.set('lastPoiHeight', height); } @@ -233,4 +241,12 @@ export abstract class BaseBlockDispatcher implements IBloc meta.setIncrement('processedBlockCount'); } } + + private get poi(): CachePoiModel { + const poi = this.storeCacheService.poi; + if (!poi) { + throw new Error('MMR service expected POI but it was not found'); + } + return poi; + } } diff --git a/packages/node-core/src/indexer/blockDispatcher/block-dispatcher.ts b/packages/node-core/src/indexer/blockDispatcher/block-dispatcher.ts index d55dc9c440..220940f2a9 100644 --- a/packages/node-core/src/indexer/blockDispatcher/block-dispatcher.ts +++ b/packages/node-core/src/indexer/blockDispatcher/block-dispatcher.ts @@ -89,7 +89,7 @@ export abstract class BlockDispatcher this.queue.putMany(heights); - this.latestBufferedHeight = latestBufferHeight ?? last(heights); + this.latestBufferedHeight = latestBufferHeight ?? last(heights) ?? this.latestBufferedHeight; void this.fetchBlocksFromQueue(); } @@ -111,7 +111,7 @@ export abstract class BlockDispatcher try { while (!this.isShutdown) { - const blockNums = this.queue.takeMany(Math.min(this.nodeConfig.batchSize, this.processQueue.freeSpace)); + const blockNums = this.queue.takeMany(Math.min(this.nodeConfig.batchSize, this.processQueue.freeSpace!)); // Used to compare before and after as a way to check if queue was flushed const bufferedHeight = this._latestBufferedHeight; @@ -138,7 +138,7 @@ export abstract class BlockDispatcher // If specVersion not changed, a known overallSpecVer will be pass in // Otherwise use api to fetch runtimes - if (memoryLock.isLocked) { + if (memoryLock.isLocked()) { await memoryLock.waitForUnlock(); } @@ -148,7 +148,8 @@ export abstract class BlockDispatcher // Check if the queues have been flushed between queue.takeMany and fetchBlocksBatches resolving // Peeking the queue is because the latestBufferedHeight could have regrown since fetching block - if (bufferedHeight > this._latestBufferedHeight || this.queue.peek() < Math.min(...blockNums)) { + const peeked = this.queue.peek(); + if (bufferedHeight > this._latestBufferedHeight || (peeked && peeked < Math.min(...blockNums))) { logger.info(`Queue was reset for new DS, discarding fetched blocks`); continue; } @@ -163,8 +164,8 @@ export abstract class BlockDispatcher await this.postProcessBlock(height, processBlockResponse); //set block to null for garbage collection - block = null; - } catch (e) { + (block as any) = null; + } catch (e: any) { // TODO discard any cache changes from this block height if (this.isShutdown) { return; @@ -186,7 +187,7 @@ export abstract class BlockDispatcher value: this.processQueue.size, }); } - } catch (e) { + } catch (e: any) { logger.error(e, 'Failed to fetch blocks from queue'); if (!this.isShutdown) { process.exit(1); diff --git a/packages/node-core/src/indexer/blockDispatcher/worker-block-dispatcher.ts b/packages/node-core/src/indexer/blockDispatcher/worker-block-dispatcher.ts index 4afc3ab8f4..79537d027a 100644 --- a/packages/node-core/src/indexer/blockDispatcher/worker-block-dispatcher.ts +++ b/packages/node-core/src/indexer/blockDispatcher/worker-block-dispatcher.ts @@ -28,14 +28,17 @@ type Worker = { terminate: () => Promise; }; +function initAutoQueue(workers: number | undefined, batchSize: number): AutoQueue { + assert(workers && workers < 0, 'Number of workers must be greater than 0'); + return new AutoQueue(workers * batchSize * 2); +} + export abstract class WorkerBlockDispatcher extends BaseBlockDispatcher, DS> implements OnApplicationShutdown { - protected workers: W[]; + protected workers: W[] = []; private numWorkers: number; - - private taskCounter = 0; private isShutdown = false; protected abstract fetchBlock(worker: W, height: number): Promise; @@ -57,14 +60,15 @@ export abstract class WorkerBlockDispatcher eventEmitter, project, projectService, - new AutoQueue(nodeConfig.workers * nodeConfig.batchSize * 2), + initAutoQueue(nodeConfig.workers, nodeConfig.batchSize), smartBatchService, storeService, storeCacheService, poiService, dynamicDsService ); - this.numWorkers = nodeConfig.workers; + // initAutoQueue will assert that workers is set. unfortunately we cant do anything before the super call + this.numWorkers = nodeConfig.workers!; } async init(onDynamicDsCreated: (height: number) => Promise): Promise { @@ -118,7 +122,7 @@ export abstract class WorkerBlockDispatcher heights.map(async (height) => this.enqueueBlock(height, await this.getNextWorkerIndex())); } - this.latestBufferedHeight = latestBufferHeight ?? last(heights); + this.latestBufferedHeight = latestBufferHeight ?? last(heights) ?? this.latestBufferedHeight; } private async enqueueBlock(height: number, workerIdx: number): Promise { @@ -151,7 +155,7 @@ export abstract class WorkerBlockDispatcher blockHash, reindexBlockHeight, }); - } catch (e) { + } catch (e: any) { // TODO discard any cache changes from this block height logger.error( e, diff --git a/packages/node-core/src/indexer/dictionary.service.test.ts b/packages/node-core/src/indexer/dictionary.service.test.ts index 772c9837e5..eea1062ef2 100644 --- a/packages/node-core/src/indexer/dictionary.service.test.ts +++ b/packages/node-core/src/indexer/dictionary.service.test.ts @@ -119,7 +119,7 @@ describe('DictionaryService', () => { const endBlock = 10001; const dic = await dictionaryService.getDictionary(startBlock, endBlock, batchSize, HAPPY_PATH_CONDITIONS); - expect(dic.batchBlocks.length).toBeGreaterThan(1); + expect(dic?.batchBlocks.length).toBeGreaterThan(1); }, 500000); it('return undefined when dictionary api failed', async () => { @@ -145,7 +145,7 @@ describe('DictionaryService', () => { const startBlock = 400000000; const endBlock = 400010000; const dic = await dictionaryService.getDictionary(startBlock, endBlock, batchSize, HAPPY_PATH_CONDITIONS); - expect(dic._metadata).toBeDefined(); + expect(dic?._metadata).toBeDefined(); }, 500000); it('test query the correct range', async () => { @@ -164,7 +164,7 @@ describe('DictionaryService', () => { ], }, ]); - expect(dic.batchBlocks).toEqual(range(startBlock, startBlock + batchSize)); + expect(dic?.batchBlocks).toEqual(range(startBlock, startBlock + batchSize)); }, 500000); it('use minimum value of event/extrinsic returned block as batch end block', async () => { @@ -213,7 +213,7 @@ describe('DictionaryService', () => { ], }, ]); - expect(dic.batchBlocks[dic.batchBlocks.length - 1]).toBe(333524); + expect(dic?.batchBlocks[dic.batchBlocks.length - 1]).toBe(333524); }, 500000); it('able to build queryEntryMap', async () => { diff --git a/packages/node-core/src/indexer/dictionary.service.ts b/packages/node-core/src/indexer/dictionary.service.ts index 8c93d2566d..ecaee963a9 100644 --- a/packages/node-core/src/indexer/dictionary.service.ts +++ b/packages/node-core/src/indexer/dictionary.service.ts @@ -144,7 +144,8 @@ function buildDictQueryFragment( }; if (useDistinct) { - node.args.distinct = ['BLOCK_HEIGHT']; + // Non null assertion here because we define the object explicitly + node.args!.distinct = ['BLOCK_HEIGHT']; } return [gqlVars, node]; @@ -152,12 +153,12 @@ function buildDictQueryFragment( @Injectable() export class DictionaryService implements OnApplicationShutdown { - protected client: ApolloClient; + private _client?: ApolloClient; private isShutdown = false; - private mappedDictionaryQueryEntries: Map; + private mappedDictionaryQueryEntries: Map = new Map(); private useDistinct = true; private useStartHeight = true; - protected _startHeight: number; + protected _startHeight?: number; constructor( readonly dictionaryEndpoint: string, @@ -184,7 +185,7 @@ export class DictionaryService implements OnApplicationShutdown { link = new HttpLink({uri: this.dictionaryEndpoint, fetch}); } - this.client = new ApolloClient({ + this._client = new ApolloClient({ cache: new InMemoryCache({resultCaching: true}), link, defaultOptions: { @@ -207,8 +208,19 @@ export class DictionaryService implements OnApplicationShutdown { } get startHeight(): number { + if (!this._startHeight) { + throw new Error('Dictionary start height is not set'); + } return this._startHeight; } + + protected get client(): ApolloClient { + if (!this._client) { + throw new Error('Dictionary service has not been initialized'); + } + return this._client; + } + onApplicationShutdown(): void { this.isShutdown = true; } @@ -303,30 +315,22 @@ export class DictionaryService implements OnApplicationShutdown { } return buildQuery(vars, nodes); } - buildDictionaryEntryMap( + buildDictionaryEntryMap( dataSources: Array, buildDictionaryQueryEntries: (startBlock: number) => DictionaryQueryEntry[] ): void { - const mappedDictionaryQueryEntries = new Map(); - for (const ds of dataSources.sort((a, b) => a.startBlock - b.startBlock)) { - mappedDictionaryQueryEntries.set(ds.startBlock, buildDictionaryQueryEntries(ds.startBlock)); + this.mappedDictionaryQueryEntries.set(ds.startBlock, buildDictionaryQueryEntries(ds.startBlock)); } - this.mappedDictionaryQueryEntries = mappedDictionaryQueryEntries; } getDictionaryQueryEntries(queryEndBlock: number): DictionaryQueryEntry[] { - let dictionaryQueryEntries: DictionaryQueryEntry[]; - this.mappedDictionaryQueryEntries.forEach((value, key) => { + for (const [key, value] of this.mappedDictionaryQueryEntries) { if (queryEndBlock >= key) { - dictionaryQueryEntries = value; + return value; } - }); - - if (dictionaryQueryEntries === undefined) { - return []; } - return dictionaryQueryEntries; + return []; } async scopedDictionaryEntries( diff --git a/packages/node-core/src/indexer/dynamic-ds.service.ts b/packages/node-core/src/indexer/dynamic-ds.service.ts index 08d04e21e6..ba86e7e758 100644 --- a/packages/node-core/src/indexer/dynamic-ds.service.ts +++ b/packages/node-core/src/indexer/dynamic-ds.service.ts @@ -22,13 +22,20 @@ export interface IDynamicDsService { } export abstract class DynamicDsService implements IDynamicDsService { - private metadata: CacheMetadataModel; - private tempDsRecords: Record; + private _metadata?: CacheMetadataModel; + private tempDsRecords: Record = {}; - private _datasources: DS[]; + private _datasources?: DS[]; init(metadata: CacheMetadataModel): void { - this.metadata = metadata; + this._metadata = metadata; + } + + private get metadata(): CacheMetadataModel { + if (!this._metadata) { + throw new Error('DynamicDsService has not been initialized'); + } + return this._metadata; } async resetDynamicDatasource(targetHeight: number): Promise { @@ -52,7 +59,7 @@ export abstract class DynamicDsService implements IDynamicDsService { this._datasources.push(ds); return ds; - } catch (e) { + } catch (e: any) { logger.error(e, 'Failed to create dynamic ds'); process.exit(1); } @@ -81,7 +88,7 @@ export abstract class DynamicDsService implements IDynamicDsService { logger.info(`Loaded ${dataSources.length} dynamic datasources`); return dataSources; - } catch (e) { + } catch (e: any) { logger.error(`Unable to get dynamic datasources:\n${e.message}`); process.exit(1); } diff --git a/packages/node-core/src/indexer/mmr.service.ts b/packages/node-core/src/indexer/mmr.service.ts index 9a41b1ca91..1debe48ae3 100644 --- a/packages/node-core/src/indexer/mmr.service.ts +++ b/packages/node-core/src/indexer/mmr.service.ts @@ -134,7 +134,9 @@ export class MmrService implements OnApplicationShutdown { } private validatePoiMmr(poiWithMmr: ProofOfIndex, mmrValue: Uint8Array): void { - if (!u8aEq(poiWithMmr.mmrRoot, mmrValue)) { + if (!poiWithMmr.mmrRoot) { + throw new Error(`Poi block height ${poiWithMmr.id}, Poi mmr has not been set`); + } else if (!u8aEq(poiWithMmr.mmrRoot, mmrValue)) { throw new Error( `Poi block height ${poiWithMmr.id}, Poi mmr ${u8aToHex( poiWithMmr.mmrRoot diff --git a/packages/node-core/src/indexer/poi.service.ts b/packages/node-core/src/indexer/poi.service.ts index 84540674e2..7ffe3f40e3 100644 --- a/packages/node-core/src/indexer/poi.service.ts +++ b/packages/node-core/src/indexer/poi.service.ts @@ -37,7 +37,7 @@ export class PoiService implements OnApplicationShutdown { } } - async getLatestPoiBlockHash(): Promise { + async getLatestPoiBlockHash(): Promise { if (!this.latestPoiBlockHash || !isMainThread) { const poiBlockHash = await this.fetchPoiBlockHashFromDb(); if (poiBlockHash === null || poiBlockHash === undefined) { diff --git a/packages/node-core/src/indexer/sandbox.spec.ts b/packages/node-core/src/indexer/sandbox.spec.ts index c99e2ba484..2214a275f3 100644 --- a/packages/node-core/src/indexer/sandbox.spec.ts +++ b/packages/node-core/src/indexer/sandbox.spec.ts @@ -25,7 +25,7 @@ describe('sandbox for subql-node', () => { }, new NodeConfig({subquery: ' ', subqueryName: ' '}) ); - let sandboxFuncionEndAt: Date; + let sandboxFuncionEndAt: Date | undefined; vm.on('console.log', (line) => { if (line === 'OK') { sandboxFuncionEndAt = new Date(); @@ -33,7 +33,7 @@ describe('sandbox for subql-node', () => { }); await vm.securedExec('testSandbox', []); const secureExecEndAt = new Date(); - expect(sandboxFuncionEndAt).toBeTruthy(); - expect(secureExecEndAt.getTime()).toBeGreaterThanOrEqual(sandboxFuncionEndAt.getTime()); + expect(sandboxFuncionEndAt).toBeDefined(); + expect(secureExecEndAt.getTime()).toBeGreaterThanOrEqual(sandboxFuncionEndAt?.getTime() ?? 0); }); }); diff --git a/packages/node-core/src/indexer/store.service.spec.ts b/packages/node-core/src/indexer/store.service.spec.ts index f499bc4137..7e79de1f55 100644 --- a/packages/node-core/src/indexer/store.service.spec.ts +++ b/packages/node-core/src/indexer/store.service.spec.ts @@ -8,7 +8,7 @@ describe('Store Service', () => { let storeService: StoreService; it('addIdAndBlockRangeAttributes', () => { - storeService = new StoreService(null, null, null); + storeService = new StoreService(null as any, null as any, null as any, null as any); const attributes = { id: { type: DataTypes.STRING, diff --git a/packages/node-core/src/indexer/store.service.ts b/packages/node-core/src/indexer/store.service.ts index 835361b9a5..dc1b6c0868 100644 --- a/packages/node-core/src/indexer/store.service.ts +++ b/packages/node-core/src/indexer/store.service.ts @@ -60,7 +60,6 @@ import { import {generateIndexName, modelToTableName} from '../utils/sequelizeUtil'; import {MetadataFactory, MetadataRepo, PoiFactory, PoiRepo} from './entities'; import {CacheMetadataModel} from './storeCache'; -import {CachePoiModel} from './storeCache/cachePoi'; import {StoreCacheService} from './storeCache/storeCache.service'; import {StoreOperations} from './StoreOperations'; import {IProjectNetworkConfig, ISubqueryProject, OperationType} from './types'; @@ -83,60 +82,101 @@ interface NotifyTriggerPayload { eventManipulation: string; } +class NoInitError extends Error { + constructor() { + super('StoreService has not been initialized'); + } +} + @Injectable() export class StoreService { - private modelIndexedFields: IndexField[]; - private schema: string; - private modelsRelations: GraphQLModelsRelationsEnums; - private poiRepo: PoiRepo | undefined; - private metaDataRepo: MetadataRepo; - private operationStack: StoreOperations; - @Inject('ISubqueryProject') private subqueryProject: ISubqueryProject; - private blockHeight: number; - historical: boolean; - private dbType: SUPPORT_DB; - private useSubscription: boolean; + private poiRepo?: PoiRepo; private removedIndexes: RemovedIndexes = {}; + private _modelIndexedFields?: IndexField[]; + private _modelsRelations?: GraphQLModelsRelationsEnums; + private _metaDataRepo?: MetadataRepo; + private _historical?: boolean; + private _dbType?: SUPPORT_DB; + private _metadataModel?: CacheMetadataModel; + + // Should be updated each block + private _blockHeight?: number; + private operationStack?: StoreOperations; + + constructor( + private sequelize: Sequelize, + private config: NodeConfig, + readonly storeCache: StoreCacheService, + @Inject('ISubqueryProject') private subqueryProject: ISubqueryProject + ) {} + + private get modelIndexedFields(): IndexField[] { + assert(!!this._modelIndexedFields, new NoInitError()); + return this._modelIndexedFields; + } + + private get modelsRelations(): GraphQLModelsRelationsEnums { + assert(!!this._modelsRelations, new NoInitError()); + return this._modelsRelations; + } + + private get metaDataRepo(): MetadataRepo { + assert(!!this._metaDataRepo, new NoInitError()); + return this._metaDataRepo; + } + + private get blockHeight(): number { + assert(!!this._blockHeight, new Error('StoreService.setBlockHeight has not been called')); + return this._blockHeight; + } + + private get historical(): boolean { + assert(!!this._historical, new NoInitError()); + return this._historical; + } - poiModel?: CachePoiModel; - metadataModel: CacheMetadataModel; + private get dbType(): SUPPORT_DB { + assert(!!this._dbType, new NoInitError()); + return this._dbType; + } - constructor(private sequelize: Sequelize, private config: NodeConfig, readonly storeCache: StoreCacheService) {} + private get metadataModel(): CacheMetadataModel { + assert(!!this._metadataModel, new NoInitError()); + return this._metadataModel; + } async init(modelsRelations: GraphQLModelsRelationsEnums, schema: string): Promise { - this.schema = schema; - this.modelsRelations = modelsRelations; - this.historical = await this.getHistoricalStateEnabled(); + this._modelsRelations = modelsRelations; + this._historical = await this.getHistoricalStateEnabled(schema); logger.info(`Historical state is ${this.historical ? 'enabled' : 'disabled'}`); - this.dbType = await getDbType(this.sequelize); + this._dbType = await getDbType(this.sequelize); - this.useSubscription = this.config.subscription; - if (this.useSubscription && this.dbType === SUPPORT_DB.cockRoach) { - this.useSubscription = false; + let useSubscription = this.config.subscription; + if (useSubscription && this.dbType === SUPPORT_DB.cockRoach) { + useSubscription = false; logger.warn(`Subscription is not support with ${this.dbType}`); } if (this.historical && this.dbType === SUPPORT_DB.cockRoach) { - this.historical = false; + this._historical = false; logger.warn(`Historical feature is not support with ${this.dbType}`); } this.storeCache.init(this.historical, this.dbType === SUPPORT_DB.cockRoach); try { - await this.syncSchema(this.schema); + await this.syncSchema(schema, useSubscription); } catch (e: any) { logger.error(e, `Having a problem when syncing schema`); process.exit(1); } try { - this.modelIndexedFields = await this.getAllIndexFields(this.schema); + this._modelIndexedFields = await this.getAllIndexFields(schema); } catch (e: any) { logger.error(e, `Having a problem when get indexed fields`); process.exit(1); } this.storeCache.setRepos(this.metaDataRepo, this.poiRepo); - this.metadataModel = this.storeCache.metadata; - this.poiModel = this.storeCache.poi ?? undefined; + this._metadataModel = this.storeCache.metadata; this.metadataModel.set('historicalStateEnabled', this.historical); this.metadataModel.setIncrement('schemaMigrationCount'); @@ -168,7 +208,7 @@ export class StoreService { } // eslint-disable-next-line complexity - async syncSchema(schema: string): Promise { + async syncSchema(schema: string, useSubscription: boolean): Promise { const enumTypeMap = new Map(); if (this.historical) { const [results] = await this.sequelize.query(BTREE_GIST_EXTENSION_EXIST_QUERY); @@ -236,7 +276,7 @@ export class StoreService { } const extraQueries = []; // Function need to create ahead of triggers - if (this.useSubscription) { + if (useSubscription) { extraQueries.push(createSendNotificationTriggerFunction(schema)); } for (const model of this.modelsRelations.models) { @@ -278,7 +318,7 @@ export class StoreService { // see https://github.com/subquery/subql/issues/1542 } - if (this.useSubscription) { + if (useSubscription) { const triggerName = hashName(schema, 'notify_trigger', sequelizeModel.tableName); const notifyTriggers = await getTriggers(this.sequelize, triggerName); // Triggers not been found @@ -295,7 +335,7 @@ export class StoreService { } } // We have to drop the function after all triggers depend on it are removed - if (!this.useSubscription && this.dbType !== SUPPORT_DB.cockRoach) { + if (!useSubscription && this.dbType !== SUPPORT_DB.cockRoach) { extraQueries.push(dropNotifyFunction(schema)); } @@ -357,7 +397,7 @@ export class StoreService { this.poiRepo = PoiFactory(this.sequelize, schema); } - this.metaDataRepo = await MetadataFactory( + this._metaDataRepo = await MetadataFactory( this.sequelize, schema, this.config.multiChain, @@ -374,12 +414,12 @@ export class StoreService { // this.afterHandleCockroachIndex() } - async getHistoricalStateEnabled(): Promise { + async getHistoricalStateEnabled(schema: string): Promise { const {disableHistorical, multiChain} = this.config; try { const tableRes = await this.sequelize.query>( - `SELECT table_name FROM information_schema.tables where table_schema='${this.schema}'`, + `SELECT table_name FROM information_schema.tables where table_schema='${schema}'`, {type: QueryTypes.SELECT} ); @@ -396,7 +436,7 @@ export class StoreService { if (metadataTableNames.length === 1) { const res = await this.sequelize.query<{key: string; value: boolean | string}>( - `SELECT key, value FROM "${this.schema}"."${metadataTableNames[0]}" WHERE (key = 'historicalStateEnabled' OR key = 'genesisHash')`, + `SELECT key, value FROM "${schema}"."${metadataTableNames[0]}" WHERE (key = 'historicalStateEnabled' OR key = 'genesisHash')`, {type: QueryTypes.SELECT} ); @@ -457,7 +497,7 @@ export class StoreService { ): void { if (this.dbType === SUPPORT_DB.cockRoach) { indexes.forEach((index, i) => { - if (index.using === IndexType.HASH && !existedIndexes.includes(index.name)) { + if (index.using === IndexType.HASH && !existedIndexes.includes(index.name!)) { const cockroachDbIndexQuery = `CREATE INDEX "${index.name}" ON "${schema}"."${modelToTableName(modelName)}"(${ index.fields }) USING HASH;`; @@ -500,7 +540,7 @@ export class StoreService { // Only used with historical to add indexes to ID fields for gettign entitities by ID private addHistoricalIdIndex(model: GraphQLModelsType, indexes: IndexesOptions[]): void { const idFieldName = model.fields.find((field) => field.type === 'ID')?.name; - if (idFieldName && !indexes.find((idx) => idx.fields.includes(idFieldName))) { + if (idFieldName && !indexes.find((idx) => idx.fields?.includes(idFieldName))) { indexes.push({ fields: [Utils.underscoredIf(idFieldName, true)], unique: false, @@ -583,18 +623,16 @@ export class StoreService { }); } - setOperationStack(): void { + setBlockHeight(blockHeight: number): void { + this._blockHeight = blockHeight; if (this.config.proofOfIndex) { this.operationStack = new StoreOperations(this.modelsRelations.models); } } - setBlockHeight(blockHeight: number): void { - this.blockHeight = blockHeight; - } - getOperationMerkleRoot(): Uint8Array { if (this.config.proofOfIndex) { + assert(this.operationStack, new Error('OperationStack is not set, make sure `setBlockHeight` has been called')); this.operationStack.makeOperationMerkleTree(); const merkelRoot = this.operationStack.getOperationMerkleRoot(); if (merkelRoot === null) { @@ -649,6 +687,9 @@ group by } async rewind(targetBlockHeight: number, transaction: Transaction): Promise { + if (!this.historical) { + throw new Error('Unable to reindex, historical state not enabled'); + } for (const model of Object.values(this.sequelize.models)) { if ('__block_range' in model.getAttributes()) { await model.destroy({ @@ -682,9 +723,7 @@ group by } this.metadataModel.set('lastProcessedHeight', targetBlockHeight); if (this.config.proofOfIndex) { - if (!this.poiRepo) { - throw new Error('Expected POI repo to exist'); - } + assert(this.poiRepo, new Error('Expected POI repo to exist')); await this.poiRepo.destroy({ transaction, where: { @@ -763,9 +802,7 @@ group by try { this.storeCache.getModel(entity).set(_id, data, this.blockHeight); - if (this.config.proofOfIndex) { - this.operationStack.put(OperationType.Set, entity, data); - } + this.operationStack?.put(OperationType.Set, entity, data); } catch (e) { throw new Error(`Failed to set Entity ${entity} with _id ${_id}: ${e}`); } @@ -775,10 +812,8 @@ group by try { this.storeCache.getModel(entity).bulkCreate(data, this.blockHeight); - if (this.config.proofOfIndex) { - for (const item of data) { - this.operationStack.put(OperationType.Set, entity, item); - } + for (const item of data) { + this.operationStack?.put(OperationType.Set, entity, item); } } catch (e) { throw new Error(`Failed to bulkCreate Entity ${entity}: ${e}`); @@ -788,10 +823,8 @@ group by bulkUpdate: async (entity: string, data: Entity[], fields?: string[]): Promise => { try { this.storeCache.getModel(entity).bulkUpdate(data, this.blockHeight, fields); - if (this.config.proofOfIndex) { - for (const item of data) { - this.operationStack.put(OperationType.Set, entity, item); - } + for (const item of data) { + this.operationStack?.put(OperationType.Set, entity, item); } } catch (e) { throw new Error(`Failed to bulkCreate Entity ${entity}: ${e}`); @@ -802,9 +835,7 @@ group by try { this.storeCache.getModel(entity).remove(id, this.blockHeight); - if (this.config.proofOfIndex) { - this.operationStack.put(OperationType.Remove, entity, id); - } + this.operationStack?.put(OperationType.Remove, entity, id); } catch (e) { throw new Error(`Failed to remove Entity ${entity} with id ${id}: ${e}`); } diff --git a/packages/node-core/src/indexer/storeCache/cacheModel.ts b/packages/node-core/src/indexer/storeCache/cacheModel.ts index 76fb9ff3ef..92ef58b8a2 100644 --- a/packages/node-core/src/indexer/storeCache/cacheModel.ts +++ b/packages/node-core/src/indexer/storeCache/cacheModel.ts @@ -136,7 +136,7 @@ export class CachedModel< async getOneByField(field: keyof T, value: T[keyof T]): Promise { if (field === 'id') { - return this.get(value.toString()); + return this.get(`${value}`); } else { const oneFromCached = this.getFromCache(field, value, true)[0]; if (oneFromCached) { @@ -361,7 +361,8 @@ export class CachedModel< if ( getValue && !unifiedIds.includes(key) && - ((field === undefined && value === undefined) || + (field === undefined || + value === undefined || (Array.isArray(value) && includes(value, getValue[field])) || isEqual(getValue[field], value)) ) { diff --git a/packages/node-core/src/indexer/storeCache/setValueModel.ts b/packages/node-core/src/indexer/storeCache/setValueModel.ts index e9e114e39f..e116fcd6a4 100644 --- a/packages/node-core/src/indexer/storeCache/setValueModel.ts +++ b/packages/node-core/src/indexer/storeCache/setValueModel.ts @@ -101,7 +101,7 @@ export class SetValueModel { } isMatchData(field?: keyof T, value?: T[keyof T] | T[keyof T][]): boolean { - if (field === undefined && value === undefined) { + if (field === undefined || value === undefined) { return true; } if (Array.isArray(value)) { diff --git a/packages/node-core/src/indexer/storeCache/types.ts b/packages/node-core/src/indexer/storeCache/types.ts index 062e80a999..8a620753b6 100644 --- a/packages/node-core/src/indexer/storeCache/types.ts +++ b/packages/node-core/src/indexer/storeCache/types.ts @@ -56,4 +56,4 @@ export type SetValue = { export type SetData = Record>; -export class GetData extends LRUCache {} +export class GetData extends LRUCache {} diff --git a/packages/node-core/src/indexer/types.ts b/packages/node-core/src/indexer/types.ts index d777e383fb..cdeeaf94ec 100644 --- a/packages/node-core/src/indexer/types.ts +++ b/packages/node-core/src/indexer/types.ts @@ -21,10 +21,15 @@ export interface IProjectNetworkConfig extends ProjectNetworkConfig { chainId: string; } -export interface ISubqueryProject { +export interface ISubqueryProject< + N extends IProjectNetworkConfig = IProjectNetworkConfig, + DS = unknown, + T = unknown, + C = unknown +> { id: string; root: string; - network: Partial; + network: N; dataSources: DS[]; schema: GraphQLSchema; templates: T[]; diff --git a/packages/node-core/src/indexer/worker/worker.builder.ts b/packages/node-core/src/indexer/worker/worker.builder.ts index dbfd4915eb..2680bce6d3 100644 --- a/packages/node-core/src/indexer/worker/worker.builder.ts +++ b/packages/node-core/src/indexer/worker/worker.builder.ts @@ -128,7 +128,10 @@ abstract class WorkerIO { args, }); } catch (e) { - this.logger.error(e as any, `Failed to post message, function="${fnName}", args="${JSON.stringify(args)}"`); + this.logger.error( + e as any, + `Failed to post message, function="${String(fnName)}", args="${JSON.stringify(args)}"` + ); reject(e); } }); diff --git a/packages/node-core/src/utils/object.ts b/packages/node-core/src/utils/object.ts index 127aabc10a..5635efcb7f 100644 --- a/packages/node-core/src/utils/object.ts +++ b/packages/node-core/src/utils/object.ts @@ -7,7 +7,7 @@ export function assign( target: TObject, src: TSource1, src2?: TSource2 -): TObject & TSource1 & TSource2 { +): TObject & TSource1 & (TSource2 | undefined) { return assignWith(target, src, src2, (objValue, srcValue) => (isUndefined(srcValue) ? objValue : srcValue)); } diff --git a/packages/node-core/test/v1.0.0/package.json b/packages/node-core/test/v1.0.0/package.json index 6e24d528c9..f99f6b7b35 100644 --- a/packages/node-core/test/v1.0.0/package.json +++ b/packages/node-core/test/v1.0.0/package.json @@ -21,7 +21,7 @@ "devDependencies": { "@polkadot/api": "^10", "@subql/types": "latest", - "typescript": "^4.1.3", + "typescript": "^4.9.5", "@subql/cli": "latest" }, "resolutions": { diff --git a/packages/node/package.json b/packages/node/package.json index 0ca22b2caf..1165cbd2e8 100644 --- a/packages/node/package.json +++ b/packages/node/package.json @@ -48,7 +48,6 @@ "rxjs": "^7.5.2", "sequelize": "6.28.0", "tar": "^6.1.11", - "typescript": "^4.4.4", "vm2": "^3.9.9", "yargs": "^16.2.0" }, diff --git a/packages/node/src/configure/SubqueryProject.ts b/packages/node/src/configure/SubqueryProject.ts index 70d819268b..7988e474a9 100644 --- a/packages/node/src/configure/SubqueryProject.ts +++ b/packages/node/src/configure/SubqueryProject.ts @@ -57,7 +57,7 @@ const NOT_SUPPORT = (name: string) => { export class SubqueryProject { id: string; root: string; - network: Partial; + network: SubstrateProjectNetworkConfig; dataSources: SubqlProjectDs[]; schema: GraphQLSchema; templates: SubqlProjectDsTemplate[]; diff --git a/packages/node/src/indexer/api.service.ts b/packages/node/src/indexer/api.service.ts index d273b3f6ba..0fa3684ef7 100644 --- a/packages/node/src/indexer/api.service.ts +++ b/packages/node/src/indexer/api.service.ts @@ -34,7 +34,7 @@ const logger = getLogger('api'); @Injectable() export class ApiService - extends BaseApiService + extends BaseApiService implements OnApplicationShutdown { private fetchBlocksBatches = SubstrateUtil.fetchBlocksBatches; @@ -43,7 +43,7 @@ export class ApiService networkMeta: NetworkMetadataPayload; constructor( - @Inject('ISubqueryProject') protected project: SubqueryProject, + @Inject('ISubqueryProject') project: SubqueryProject, private connectionPoolService: ConnectionPoolService, private eventEmitter: EventEmitter2, private nodeConfig: NodeConfig, diff --git a/packages/node/src/indexer/fetch.service.spec.ts b/packages/node/src/indexer/fetch.service.spec.ts index 9a78da2420..8b37f673cd 100644 --- a/packages/node/src/indexer/fetch.service.spec.ts +++ b/packages/node/src/indexer/fetch.service.spec.ts @@ -353,7 +353,7 @@ function mockStoreService(): StoreService { getOperationMerkleRoot: () => { return null; }, - } as StoreService; + } as unknown as StoreService; } function mockStoreCache(): StoreCacheService { diff --git a/packages/node/src/indexer/indexer.manager.spec.ts b/packages/node/src/indexer/indexer.manager.spec.ts index ef02e756a1..14f20501a5 100644 --- a/packages/node/src/indexer/indexer.manager.spec.ts +++ b/packages/node/src/indexer/indexer.manager.spec.ts @@ -144,7 +144,12 @@ function createIndexerManager( const dynamicDsService = new DynamicDsService(dsProcessorService, project); const storeCache = new StoreCacheService(sequilize, nodeConfig, eventEmitter); - const storeService = new StoreService(sequilize, nodeConfig, storeCache); + const storeService = new StoreService( + sequilize, + nodeConfig, + storeCache, + project, + ); const poiService = new PoiService(storeCache); const mmrService = new MmrService(nodeConfig, storeCache); const unfinalizedBlocksService = new UnfinalizedBlocksService( diff --git a/packages/node/src/utils/reindex.ts b/packages/node/src/utils/reindex.ts index f997b71e67..d69ecb04ce 100644 --- a/packages/node/src/utils/reindex.ts +++ b/packages/node/src/utils/reindex.ts @@ -21,10 +21,6 @@ export async function reindex( sequelize: Sequelize, forceCleanService?: ForceCleanService, ): Promise { - if (!storeService.historical) { - logger.warn('Unable to reindex, historical state not enabled'); - return; - } if (!lastProcessedHeight || lastProcessedHeight < targetBlockHeight) { logger.warn( `Skipping reindexing to block ${targetBlockHeight}: current indexing height ${lastProcessedHeight} is behind requested block`, diff --git a/packages/node/test/projectFixture/v0.2.0/package.json b/packages/node/test/projectFixture/v0.2.0/package.json index 9f35fcf772..cd69eeff03 100644 --- a/packages/node/test/projectFixture/v0.2.0/package.json +++ b/packages/node/test/projectFixture/v0.2.0/package.json @@ -21,7 +21,7 @@ "devDependencies": { "@polkadot/api": "^5.7.1", "@subql/types": "latest", - "typescript": "^4.1.3", + "typescript": "^4.9.5", "@subql/cli": "latest" } } diff --git a/packages/node/test/projectFixture/v0.3.0/package.json b/packages/node/test/projectFixture/v0.3.0/package.json index 9f35fcf772..cd69eeff03 100644 --- a/packages/node/test/projectFixture/v0.3.0/package.json +++ b/packages/node/test/projectFixture/v0.3.0/package.json @@ -21,7 +21,7 @@ "devDependencies": { "@polkadot/api": "^5.7.1", "@subql/types": "latest", - "typescript": "^4.1.3", + "typescript": "^4.9.5", "@subql/cli": "latest" } } diff --git a/packages/node/test/projectFixture/v1.0.0/package.json b/packages/node/test/projectFixture/v1.0.0/package.json index 6e24d528c9..f99f6b7b35 100644 --- a/packages/node/test/projectFixture/v1.0.0/package.json +++ b/packages/node/test/projectFixture/v1.0.0/package.json @@ -21,7 +21,7 @@ "devDependencies": { "@polkadot/api": "^10", "@subql/types": "latest", - "typescript": "^4.1.3", + "typescript": "^4.9.5", "@subql/cli": "latest" }, "resolutions": { diff --git a/packages/query/package.json b/packages/query/package.json index 0ee3900204..36702737db 100644 --- a/packages/query/package.json +++ b/packages/query/package.json @@ -66,7 +66,6 @@ "@types/lodash": "^4.14.178", "@types/rimraf": "^3.0.2", "@types/yargs": "^16.0.4", - "nodemon": "^2.0.15", - "typescript": "^4.4.4" + "nodemon": "^2.0.15" } } diff --git a/packages/validator/fixtures/package.json b/packages/validator/fixtures/package.json index ad8f0d1561..cce3dcc936 100644 --- a/packages/validator/fixtures/package.json +++ b/packages/validator/fixtures/package.json @@ -20,7 +20,7 @@ "license": "Apache-2.0", "devDependencies": { "@subql/types": "latest", - "typescript": "^4.1.3", + "typescript": "^4.9.5", "@subql/cli": "^0.7.3" } } diff --git a/yarn.lock b/yarn.lock index c4050c470a..24e13eab91 100644 --- a/yarn.lock +++ b/yarn.lock @@ -4177,7 +4177,7 @@ __metadata: "@subql/utils": "workspace:*" "@subql/x-merkle-mountain-range": ^2.0.0-0.1.2 "@types/async-lock": ^1 - "@willsoto/nestjs-prometheus": ^4.4.0 + "@willsoto/nestjs-prometheus": ^5.1.1 async-lock: ^1.4.0 async-mutex: ^0.4.0 lodash: ^4.17.21 @@ -4237,7 +4237,6 @@ __metadata: sequelize: 6.28.0 supertest: ^6.2.2 tar: ^6.1.11 - typescript: ^4.4.4 vm2: ^3.9.9 yargs: ^16.2.0 bin: @@ -4284,7 +4283,6 @@ __metadata: rimraf: ^3.0.2 rxjs: ^7.5.2 subscriptions-transport-ws: ^0.11.0 - typescript: ^4.4.4 yargs: ^16.2.0 bin: subql-query: ./bin/run @@ -5633,6 +5631,16 @@ __metadata: languageName: node linkType: hard +"@willsoto/nestjs-prometheus@npm:^5.1.1": + version: 5.1.1 + resolution: "@willsoto/nestjs-prometheus@npm:5.1.1" + peerDependencies: + "@nestjs/common": ^7.0.0 || ^8.0.0 || ^9.0.0 + prom-client: ^13.0.0 || ^14.0.0 + checksum: fa60f20442605c4243bc21b829be80a53ac2a0d1b7c5ab5c816048970ae57486cbba38c9d9707c989608501b27aa9a50acb5046dca76d19900a5719e068f57ae + languageName: node + linkType: hard + "@wry/context@npm:^0.6.0": version: 0.6.1 resolution: "@wry/context@npm:0.6.1" @@ -16766,7 +16774,7 @@ __metadata: ts-loader: ^9.2.6 ts-node: ^10.4.0 tsconfig-paths: ^3.12.0 - typescript: ^4.4.4 + typescript: ^4.9.5 languageName: unknown linkType: soft @@ -17626,13 +17634,13 @@ __metadata: languageName: node linkType: hard -"typescript@npm:^4.4.4": - version: 4.6.4 - resolution: "typescript@npm:4.6.4" +"typescript@npm:^4.9.5": + version: 4.9.5 + resolution: "typescript@npm:4.9.5" bin: tsc: bin/tsc tsserver: bin/tsserver - checksum: e7bfcc39cd4571a63a54e5ea21f16b8445268b9900bf55aee0e02ad981be576acc140eba24f1af5e3c1457767c96cea6d12861768fb386cf3ffb34013718631a + checksum: ee000bc26848147ad423b581bd250075662a354d84f0e06eb76d3b892328d8d4440b7487b5a83e851b12b255f55d71835b008a66cbf8f255a11e4400159237db languageName: node linkType: hard @@ -17646,13 +17654,13 @@ __metadata: languageName: node linkType: hard -"typescript@patch:typescript@^4.4.4#~builtin": - version: 4.6.4 - resolution: "typescript@patch:typescript@npm%3A4.6.4#~builtin::version=4.6.4&hash=493e53" +"typescript@patch:typescript@^4.9.5#~builtin": + version: 4.9.5 + resolution: "typescript@patch:typescript@npm%3A4.9.5#~builtin::version=4.9.5&hash=493e53" bin: tsc: bin/tsc tsserver: bin/tsserver - checksum: 8cff08bf66d9ecfbf9fcc5edde04a5a7923e6cac3b21d99b4e9a06973bf5bd7f9a83ec7eed24129c1b9e13fd861de8c1070110d4b9ce9f18ab57c6999e9c9a6f + checksum: 2eee5c37cad4390385db5db5a8e81470e42e8f1401b0358d7390095d6f681b410f2c4a0c496c6ff9ebd775423c7785cdace7bcdad76c7bee283df3d9718c0f20 languageName: node linkType: hard