From 72d457134c6abd258d7e587243983e2f2f0e52ba Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Emanuel=20Tesa=C5=99?= Date: Tue, 31 Oct 2023 09:19:15 +0100 Subject: [PATCH] Fix tests and CI --- .github/workflows/main.yml | 2 + src/update-feeds/dapi-data-registry.ts | 13 +++-- src/update-feeds/update-feeds.test.ts | 66 +++++++++++++++----------- test/fixtures/dapi-data-registry.ts | 40 +++++++++++----- test/utils.ts | 2 +- 5 files changed, 78 insertions(+), 45 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 56c4d32f..476d3414 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -61,6 +61,8 @@ jobs: cache: 'pnpm' - name: Install Dependencies run: pnpm install --frozen-lockfile + - name: Compile contracts + run: pnpm run contracts:compile - name: Start Hardhat run: pnpm dev:eth-node& - name: Test E2E diff --git a/src/update-feeds/dapi-data-registry.ts b/src/update-feeds/dapi-data-registry.ts index 4a33701c..fba299a8 100644 --- a/src/update-feeds/dapi-data-registry.ts +++ b/src/update-feeds/dapi-data-registry.ts @@ -9,7 +9,7 @@ export const getDapiDataRegistry = (address: string, provider: ethers.providers. export const verifyMulticallResponse = ( response: Awaited> ) => { - const [successes, returndata] = response; + const { successes, returndata } = response; if (!successes.every(Boolean)) throw new Error('One of the multicalls failed'); return returndata; @@ -19,7 +19,6 @@ export const decodeDapisCountResponse = (dapiDataRegistry: DapiDataRegistry, dap const dapisCount = dapiDataRegistry.interface.decodeFunctionResult('dapisCount', dapisCountReturndata)[0] as Awaited< ReturnType >; - return dapisCount.toNumber(); }; @@ -36,10 +35,16 @@ export const decodeReadDapiWithIndexResponse = ( // Ethers responses are returned as a combination of array and object. When such object is logged, only the array part // is logged. To make the logs more readable, we convert the object part to a plain object. + const { deviationReference, deviationThresholdInPercentage, heartbeatInterval } = updateParameters; + const { value, timestamp } = dataFeedValue; return { dapiName, - updateParameters, - dataFeedValue, + updateParameters: { + deviationReference, + deviationThresholdInPercentage, + heartbeatInterval, + }, + dataFeedValue: { value, timestamp }, dataFeed, signedApiUrls, }; diff --git a/src/update-feeds/update-feeds.test.ts b/src/update-feeds/update-feeds.test.ts index 8ad93f71..526b6c9c 100644 --- a/src/update-feeds/update-feeds.test.ts +++ b/src/update-feeds/update-feeds.test.ts @@ -1,6 +1,9 @@ import { ethers } from 'ethers'; -import { generateMockDapiDataRegistry, generateReadDapisResponse } from '../../test/fixtures/dapi-data-registry'; +import { + generateMockDapiDataRegistry, + generateReadDapiWithIndexResponse, +} from '../../test/fixtures/dapi-data-registry'; import { allowPartial } from '../../test/utils'; import type { DapiDataRegistry } from '../../typechain-types'; import type { Chain } from '../config/schema'; @@ -38,7 +41,7 @@ describe(startUpdateFeedLoops.name, () => { // Expect the intervals to be called with the correct stagger time. expect(setInterval).toHaveBeenCalledTimes(2); - expect(intervalCalls[1]! - intervalCalls[0]!).toBeGreaterThanOrEqual(40); // Reserving 10s as the buffer for computing stagger time. + expect(intervalCalls[1]! - intervalCalls[0]!).toBeGreaterThanOrEqual(40); // Reserving 10ms as the buffer for computing stagger time. // Expect the logs to be called with the correct context. expect(logger.debug).toHaveBeenCalledTimes(3); @@ -92,21 +95,21 @@ describe(startUpdateFeedLoops.name, () => { // Expect the logs to be called with the correct context. expect(logger.debug).toHaveBeenCalledTimes(4); - expect(logger.debug).toHaveBeenCalledWith('Starting update loops for chain', { + expect(logger.debug).toHaveBeenNthCalledWith(1, 'Starting update loops for chain', { chainId: '123', staggerTime: 100, providerNames: ['first-provider'], }); - expect(logger.debug).toHaveBeenCalledWith('Starting update loops for chain', { + expect(logger.debug).toHaveBeenNthCalledWith(2, 'Starting update feed loop', { + chainId: '123', + providerName: 'first-provider', + }); + expect(logger.debug).toHaveBeenNthCalledWith(3, 'Starting update loops for chain', { chainId: '456', staggerTime: 100, providerNames: ['another-provider'], }); - expect(logger.debug).toHaveBeenCalledWith('Starting update feed loop', { - chainId: '123', - providerName: 'first-provider', - }); - expect(logger.debug).toHaveBeenCalledWith('Starting update feed loop', { + expect(logger.debug).toHaveBeenNthCalledWith(4, 'Starting update feed loop', { chainId: '456', providerName: 'another-provider', }); @@ -119,7 +122,7 @@ describe(runUpdateFeed.name, () => { jest .spyOn(dapiDataRegistryModule, 'getDapiDataRegistry') .mockReturnValue(dapiDataRegistry as unknown as DapiDataRegistry); - dapiDataRegistry.readDapis.mockRejectedValueOnce(new Error('provider-error')); + dapiDataRegistry.callStatic.tryMulticall.mockRejectedValueOnce(new Error('provider-error')); jest.spyOn(logger, 'error'); await runUpdateFeed( @@ -145,16 +148,19 @@ describe(runUpdateFeed.name, () => { it('fetches other batches in a staggered way and logs errors', async () => { // Prepare the mocked contract so it returns three batches (of size 1) of dAPIs and the second batch fails to load. - const firstBatch = generateReadDapisResponse(); - const thirdBatch = generateReadDapisResponse(); + const firstBatch = generateReadDapiWithIndexResponse(); + const thirdBatch = generateReadDapiWithIndexResponse(); const dapiDataRegistry = generateMockDapiDataRegistry(); jest .spyOn(dapiDataRegistryModule, 'getDapiDataRegistry') .mockReturnValue(dapiDataRegistry as unknown as DapiDataRegistry); - dapiDataRegistry.readDapis.mockResolvedValueOnce(firstBatch); - dapiDataRegistry.readDapis.mockRejectedValueOnce(new Error('provider-error')); - dapiDataRegistry.readDapis.mockResolvedValueOnce(thirdBatch); - dapiDataRegistry.dapisCount.mockResolvedValueOnce(ethers.BigNumber.from(3)); + dapiDataRegistry.interface.decodeFunctionResult.mockImplementation((_fn, value) => value); + dapiDataRegistry.callStatic.tryMulticall.mockResolvedValueOnce({ + successes: [true, true], + returndata: [[ethers.BigNumber.from(3)], firstBatch], + }); + dapiDataRegistry.callStatic.tryMulticall.mockResolvedValueOnce({ successes: [false], returndata: [] }); + dapiDataRegistry.callStatic.tryMulticall.mockResolvedValueOnce({ successes: [true], returndata: [thirdBatch] }); const sleepCalls = [] as number[]; const originalSleep = utilsModule.sleep; jest.spyOn(utilsModule, 'sleep').mockImplementation(async (ms) => { @@ -179,36 +185,42 @@ describe(runUpdateFeed.name, () => { // Expect the contract to fetch the batches to be called with the correct stagger time. expect(utilsModule.sleep).toHaveBeenCalledTimes(3); - expect(sleepCalls[0]).toBeGreaterThanOrEqual(40); // Reserving 10s as the buffer for computing stagger time. + expect(sleepCalls[0]).toBeGreaterThanOrEqual(40); // Reserving 10ms as the buffer for computing stagger time. expect(sleepCalls[1]).toBeGreaterThanOrEqual(0); expect(sleepCalls[2]).toBe(49.999_999_999_999_99); // Stagger time is actually 150 / 3 = 50, but there is an rounding error. // Expect the logs to be called with the correct context. expect(logger.error).toHaveBeenCalledTimes(1); - expect(logger.error).toHaveBeenCalledWith('Failed to get active dAPIs batch', new Error('provider-error'), { - chainId: '123', - providerName: 'provider-name', - }); - expect(logger.debug).toHaveBeenCalledTimes(4); - expect(logger.debug).toHaveBeenCalledWith('Fetching first batch of dAPIs batches', { + expect(logger.error).toHaveBeenCalledWith( + 'Failed to get active dAPIs batch', + new Error('One of the multicalls failed'), + { + chainId: '123', + providerName: 'provider-name', + } + ); + expect(logger.debug).toHaveBeenCalledTimes(6); + expect(logger.debug).toHaveBeenNthCalledWith(1, 'Fetching first batch of dAPIs batches', { chainId: '123', providerName: 'provider-name', }); - expect(logger.debug).toHaveBeenCalledWith('Fetching batches of active dAPIs', { + expect(logger.debug).toHaveBeenNthCalledWith(2, 'Processing batch of active dAPIs', expect.anything()); + expect(logger.debug).toHaveBeenNthCalledWith(3, 'Fetching batches of active dAPIs', { batchesCount: 3, - staggerTime: 49.999_999_999_999_99, chainId: '123', providerName: 'provider-name', + staggerTime: 49.999_999_999_999_99, }); - expect(logger.debug).toHaveBeenCalledWith('Fetching batch of active dAPIs', { + expect(logger.debug).toHaveBeenNthCalledWith(4, 'Fetching batch of active dAPIs', { batchIndex: 1, chainId: '123', providerName: 'provider-name', }); - expect(logger.debug).toHaveBeenCalledWith('Fetching batch of active dAPIs', { + expect(logger.debug).toHaveBeenNthCalledWith(5, 'Fetching batch of active dAPIs', { batchIndex: 2, chainId: '123', providerName: 'provider-name', }); + expect(logger.debug).toHaveBeenNthCalledWith(6, 'Processing batch of active dAPIs', expect.anything()); }); }); diff --git a/test/fixtures/dapi-data-registry.ts b/test/fixtures/dapi-data-registry.ts index bb6f1dbb..0dc01de4 100644 --- a/test/fixtures/dapi-data-registry.ts +++ b/test/fixtures/dapi-data-registry.ts @@ -1,21 +1,35 @@ +import { ethers } from 'ethers'; + +import type { ReadDapiWithIndexResponse } from '../../src/update-feeds/dapi-data-registry'; import type { DapiDataRegistry } from '../../typechain-types'; +import type { DeepPartial } from '../utils'; -export const generateReadDapisResponse = () => [ - { - totalCount: 1, - dapiNames: ['MOCK_FEED'], - dataFeedIds: ['0xebba8507d616ed80766292d200a3598fdba656d9938cecc392765d4a284a69a4'], - updateParameters: [{ deviationThresholdInPercentage: 0.5, deviationReference: 0.5, heartbeatInterval: 100 }], - // NOTE: We will need to decode this from the contract, because it will store the template IDs as encoded bytes. - dataFeedTemplateIds: [['0xcc35bd1800c06c12856a87311dd95bfcbb3add875844021d59a929d79f3c99bd']], - signedApiUrls: [['http://localhost:8080']], - airnodeAddresses: ['0xbF3137b0a7574563a23a8fC8badC6537F98197CC'], +export const generateReadDapiWithIndexResponse = (): ReadDapiWithIndexResponse => ({ + dapiName: 'MOCK_FEED', + updateParameters: { + deviationThresholdInPercentage: ethers.BigNumber.from(0.5 * 1e8), + deviationReference: ethers.BigNumber.from(0.5 * 1e8), + heartbeatInterval: 100, + }, + dataFeedValue: { + value: ethers.BigNumber.from(123 * 1e6), + timestamp: 1_629_811_200, }, -]; + dataFeed: '0xebba8507d616ed80766292d200a3598fdba656d9938cecc392765d4a284a69a4', + signedApiUrls: ['http://localhost:8080'], +}); export const generateMockDapiDataRegistry = () => { return { - readDapis: jest.fn(), + interface: { + encodeFunctionData: jest.fn(), + decodeFunctionResult: jest.fn(), + }, + callStatic: { + tryMulticall: jest.fn(), + }, + tryMulticall: jest.fn(), + readDapiWithIndex: jest.fn(), dapisCount: jest.fn(), - } satisfies Partial; + } satisfies DeepPartial; }; diff --git a/test/utils.ts b/test/utils.ts index e6f30ce7..28b6d856 100644 --- a/test/utils.ts +++ b/test/utils.ts @@ -9,7 +9,7 @@ export const signData = async (signer: ethers.Signer, templateId: string, timest export const generateRandomBytes32 = () => ethers.utils.hexlify(ethers.utils.randomBytes(32)); -type DeepPartial = T extends object +export type DeepPartial = T extends object ? { [P in keyof T]?: DeepPartial; }