Skip to content

Commit

Permalink
Use the contract in the implementation, update tests
Browse files Browse the repository at this point in the history
  • Loading branch information
Siegrift committed Oct 26, 2023
1 parent aea2e35 commit 66756e9
Show file tree
Hide file tree
Showing 11 changed files with 105 additions and 41 deletions.
6 changes: 5 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ A record of chain configurations. The record key is the chain ID. For example:
}
```

##### `contracts` _(optional)_
##### `contracts`

A record of contract addresses used by Airseeker. If not specified, the addresses are loaded from
[Airnode protocol v1](https://github.com/api3dao/airnode-protocol-v1).
Expand All @@ -123,6 +123,10 @@ A record of contract addresses used by Airseeker. If not specified, the addresse
The address of the Api3ServerV1 contract. If not specified, the address is loaded from the Airnode protocol v1
repository.

###### DapiDataRegistry

The address of the DapiDataRegistry contract.

##### `providers`

A record of providers. The record key is the provider name. Provider name is only used for internal purposes and to
Expand Down
3 changes: 2 additions & 1 deletion config/airseeker.example.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,8 @@
"chains": {
"31337": {
"contracts": {
"Api3ServerV1": "0xe7f1725E7734CE288F8367e1Bb143E90bb3F0512"
"Api3ServerV1": "0xe7f1725E7734CE288F8367e1Bb143E90bb3F0512",
"DapiDataRegistry": "0xDD78254f864F97f65e2d86541BdaEf88A504D2B2"
},
"__Temporary__DapiDataRegistry": {
"airnodeToSignedApiUrl": {
Expand Down
3 changes: 2 additions & 1 deletion jest-e2e.config.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,13 @@ module.exports = {
bail: true,
collectCoverage: true,
coverageDirectory: 'coverage',
coveragePathIgnorePatterns: ['node_modules', '<rootDir>/typechain-types'], // Coverage is collected for all files imported by the tests. We want to exclude files generated by Typechain.
coverageProvider: 'v8',
modulePathIgnorePatterns: ['<rootDir>/.build', '<rootDir>/dist/', '<rootDir>/build/'],
preset: 'ts-jest',
restoreMocks: true,
setupFiles: [join(__dirname, './jest.setup.js')],
testEnvironment: 'jest-environment-node',
testMatch: ['**/?(*.)+(feature).[t]s?(x)'],
testPathIgnorePatterns: ['<rootDir>/.build', '<rootDir>/dist/', '<rootDir>/build/', '<rootDir>/typechain-types'],
verbose: true,
};
4 changes: 3 additions & 1 deletion jest-unit.config.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,15 +6,17 @@ const { join } = require('node:path');
* @type {import('jest').Config}
*/
module.exports = {
// TODO: Refactor these test targets with better defaults.
bail: true,
collectCoverage: true,
coverageDirectory: 'coverage',
coveragePathIgnorePatterns: ['node_modules', '<rootDir>/typechain-types'], // Coverage is collected for all files imported by the tests. We want to exclude files generated by Typechain.
coverageProvider: 'v8',
modulePathIgnorePatterns: ['<rootDir>/.build', '<rootDir>/dist/', '<rootDir>/build/'],
preset: 'ts-jest',
restoreMocks: true,
setupFiles: [join(__dirname, './jest.setup.js')],
testEnvironment: 'jest-environment-node',
testMatch: ['**/?(*.)+(spec|test).[t]s?(x)'],
testPathIgnorePatterns: ['<rootDir>/.build', '<rootDir>/dist/', '<rootDir>/build/', '<rootDir>/typechain-types'],
verbose: true,
};
9 changes: 8 additions & 1 deletion src/config/schema.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ describe('chains schema', () => {
'31337': {
contracts: {
Api3ServerV1: '0xe7f1725E7734CE288F8367e1Bb143E90bb3F0512',
DapiDataRegistry: '0xDD78254f864F97f65e2d86541BdaEf88A504D2B2',
},
providers: {
hardhat: {
Expand All @@ -68,6 +69,7 @@ describe('chains schema', () => {

expect(parsed['31337']!.contracts).toStrictEqual({
Api3ServerV1: '0xe7f1725E7734CE288F8367e1Bb143E90bb3F0512',
DapiDataRegistry: '0xDD78254f864F97f65e2d86541BdaEf88A504D2B2',
});
});

Expand All @@ -87,13 +89,17 @@ describe('chains schema', () => {
gasSettings,
dataFeedBatchSize: 10,
dataFeedUpdateInterval: 60,
contracts: {
DapiDataRegistry: '0xDD78254f864F97f65e2d86541BdaEf88A504D2B2',
},
},
};

const parsed = chainsSchema.parse(chains);

expect(parsed['1']!.contracts).toStrictEqual({
Api3ServerV1: '0x3dEC619dc529363767dEe9E71d8dD1A5bc270D76',
DapiDataRegistry: '0xDD78254f864F97f65e2d86541BdaEf88A504D2B2',
});
});

Expand Down Expand Up @@ -121,7 +127,7 @@ describe('chains schema', () => {
{
code: 'custom',
message: 'Invalid contract addresses',
path: ['chains', '31337', 'contracts'],
path: ['31337', 'contracts', 'Api3ServerV1'],
},
])
);
Expand All @@ -132,6 +138,7 @@ describe('chains schema', () => {
'31337': {
contracts: {
Api3ServerV1: '0xInvalid',
DapiDataRegistry: '0xDD78254f864F97f65e2d86541BdaEf88A504D2B2',
},
providers: {
hardhat: {
Expand Down
5 changes: 4 additions & 1 deletion src/config/schema.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ export type Provider = z.infer<typeof providerSchema>;
export const optionalContractsSchema = z
.object({
Api3ServerV1: evmAddressSchema.optional(),
DapiDataRegistry: evmAddressSchema, // TODO: Make optional and load from "airnode-protocol-v1" or some other location and document it accordingly.
})
.strict();

Expand Down Expand Up @@ -87,13 +88,15 @@ export const chainsSchema = z.record(optionalChainSchema).transform((chains, ctx
Object.entries(chains).map(([chainId, chain]) => {
const { contracts } = chain;
const parsedContracts = contractsSchema.safeParse({
...contracts,
Api3ServerV1: contracts?.Api3ServerV1 ?? references.Api3ServerV1[chainId],
});
if (!parsedContracts.success) {
ctx.addIssue({
code: 'custom',
message: 'Invalid contract addresses',
path: ['chains', chainId, 'contracts'],
// Show at least the first error.
path: [chainId, 'contracts', ...parsedContracts.error.errors[0]!.path],
});

return z.NEVER;
Expand Down
9 changes: 9 additions & 0 deletions src/update-feeds/dapi-data-registry.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
import type { ethers } from 'ethers';

// TODO: The contract is not yet published, so we generate the Typechain artifacts locally and import it from there.
import { type DapiDataRegistry, DapiDataRegistry__factory } from '../../typechain-types';

export const getDapiDataRegistry = (address: string, provider: ethers.providers.StaticJsonRpcProvider) =>
DapiDataRegistry__factory.connect(address, provider);

export type ReadDapisResponse = Awaited<ReturnType<DapiDataRegistry['readDapis']>>;
48 changes: 37 additions & 11 deletions src/update-feeds/update-feeds.test.ts
Original file line number Diff line number Diff line change
@@ -1,10 +1,14 @@
import { ethers } from 'ethers';

import { generateMockDapiDataRegistry, generateReadDapisResponse } from '../../test/fixtures/dapi-data-registry';
import { allowPartial } from '../../test/utils';
import type { DapiDataRegistry } from '../../typechain-types';
import type { Chain } from '../config/schema';
import { logger } from '../logger';
import * as stateModule from '../state';
import * as utilsModule from '../utils';

import * as contractMockModule from './temporary-contract-mock';
import * as dapiDataRegistryModule from './dapi-data-registry';
import { runUpdateFeed, startUpdateFeedsLoops } from './update-feeds';

describe(startUpdateFeedsLoops.name, () => {
Expand Down Expand Up @@ -34,7 +38,7 @@ describe(startUpdateFeedsLoops.name, () => {

// Expect the intervals to be called with the correct stagger time.
expect(setInterval).toHaveBeenCalledTimes(2);
expect(intervalCalls[1]! - intervalCalls[0]!).toBeGreaterThanOrEqual(50);
expect(intervalCalls[1]! - intervalCalls[0]!).toBeGreaterThanOrEqual(40); // Reserving 10s as the buffer for computing stagger time.

// Expect the logs to be called with the correct context.
expect(logger.debug).toHaveBeenCalledTimes(3);
Expand Down Expand Up @@ -111,12 +115,23 @@ describe(startUpdateFeedsLoops.name, () => {

describe(runUpdateFeed.name, () => {
it('aborts when fetching first dAPIs batch fails', async () => {
jest.spyOn(contractMockModule, 'getStaticActiveDapis').mockRejectedValue(new Error('provider-error'));
const dapiDataRegistry = generateMockDapiDataRegistry();
jest
.spyOn(dapiDataRegistryModule, 'getDapiDataRegistry')
.mockReturnValue(dapiDataRegistry as unknown as DapiDataRegistry);
dapiDataRegistry.readDapis.mockRejectedValueOnce(new Error('provider-error'));
jest.spyOn(logger, 'error');

await runUpdateFeed(
'provider-name',
allowPartial<Chain>({ dataFeedBatchSize: 2, dataFeedUpdateInterval: 10 }),
allowPartial<Chain>({
dataFeedBatchSize: 2,
dataFeedUpdateInterval: 10,
providers: { ['provider-name']: { url: 'provider-url' } },
contracts: {
DapiDataRegistry: '0xDD78254f864F97f65e2d86541BdaEf88A504D2B2',
},
}),
'123'
);

Expand All @@ -130,24 +145,35 @@ describe(runUpdateFeed.name, () => {

it('fetches other batches in a staggered way and logs errors', async () => {
// Prepare the mocked contract so it returns three batches (of size 1) of dAPIs and the second batch fails to load.
const mockedFeed = await contractMockModule.getStaticActiveDapis(0, 0);
const firstBatch = { ...mockedFeed, totalCount: 3 };
const thirdBatch = { ...mockedFeed, totalCount: 3 };
const firstBatch = generateReadDapisResponse();
const thirdBatch = generateReadDapisResponse();
const dapiDataRegistry = generateMockDapiDataRegistry();
jest
.spyOn(dapiDataRegistryModule, 'getDapiDataRegistry')
.mockReturnValue(dapiDataRegistry as unknown as DapiDataRegistry);
dapiDataRegistry.readDapis.mockResolvedValueOnce(firstBatch);
dapiDataRegistry.readDapis.mockRejectedValueOnce(new Error('provider-error'));
dapiDataRegistry.readDapis.mockResolvedValueOnce(thirdBatch);
dapiDataRegistry.dapisCount.mockResolvedValueOnce(ethers.BigNumber.from(3));
const sleepCalls = [] as number[];
const originalSleep = utilsModule.sleep;
jest.spyOn(utilsModule, 'sleep').mockImplementation(async (ms) => {
sleepCalls.push(ms);
return originalSleep(ms);
});
jest.spyOn(contractMockModule, 'getStaticActiveDapis').mockResolvedValueOnce(firstBatch);
jest.spyOn(contractMockModule, 'getStaticActiveDapis').mockRejectedValueOnce(new Error('provider-error'));
jest.spyOn(contractMockModule, 'getStaticActiveDapis').mockResolvedValueOnce(thirdBatch);
jest.spyOn(logger, 'debug');
jest.spyOn(logger, 'error');

await runUpdateFeed(
'provider-name',
allowPartial<Chain>({ dataFeedBatchSize: 1, dataFeedUpdateInterval: 0.15 }),
allowPartial<Chain>({
dataFeedBatchSize: 1,
dataFeedUpdateInterval: 0.15,
providers: { ['provider-name']: { url: 'provider-url' } },
contracts: {
DapiDataRegistry: '0xDD78254f864F97f65e2d86541BdaEf88A504D2B2',
},
}),
'123'
);

Expand Down
35 changes: 21 additions & 14 deletions src/update-feeds/update-feeds.ts
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
import { go } from '@api3/promise-utils';
import { ethers } from 'ethers';
import { range, size } from 'lodash';

import type { Chain } from '../config/schema';
import { logger } from '../logger';
import { getState } from '../state';
import { isFulfilled, sleep } from '../utils';

import { type ActiveDapisBatch, getStaticActiveDapis } from './temporary-contract-mock';
import { getDapiDataRegistry, type ReadDapisResponse } from './dapi-data-registry';

export const startUpdateFeedsLoops = async () => {
const state = getState();
Expand Down Expand Up @@ -35,21 +36,33 @@ export const startUpdateFeedsLoops = async () => {
};

export const runUpdateFeed = async (providerName: string, chain: Chain, chainId: string) => {
const { dataFeedBatchSize, dataFeedUpdateInterval } = chain;
const { dataFeedBatchSize, dataFeedUpdateInterval, providers, contracts } = chain;
// TODO: Consider adding a start timestamp (as ID) to the logs to identify batches from this runUpdateFeed tick.
const baseLogContext = { chainId, providerName };

// Create a provider and connect it to the DapiDataRegistry contract.
const provider = new ethers.providers.StaticJsonRpcProvider(providers[providerName]);
const dapiDataRegistry = getDapiDataRegistry(contracts.DapiDataRegistry, provider);

logger.debug(`Fetching first batch of dAPIs batches`, baseLogContext);
const firstBatchStartTime = Date.now();
const goFirstBatch = await go(async () => getActiveDapiBatch(chain));
const goFirstBatch = await go(async () => {
// TODO: Use multicall to fetch this is a single RPC call.
return {
batch: await dapiDataRegistry.readDapis(0, dataFeedBatchSize),
// eslint-disable-next-line unicorn/no-await-expression-member
totalDapisCount: (await dapiDataRegistry.dapisCount()).toNumber(),
};
});
if (!goFirstBatch.success) {
logger.error(`Failed to get first active dAPIs batch`, goFirstBatch.error, baseLogContext);
return;
}
const processFirstBatchPromise = processBatch(goFirstBatch.data);
const { batch: firstBatch, totalDapisCount: totalCount } = goFirstBatch.data;
const processFirstBatchPromise = processBatch(firstBatch);

// Calculate the stagger time between the rest of the batches.
const batchesCount = goFirstBatch.data.totalCount / dataFeedBatchSize;
const batchesCount = totalCount / dataFeedBatchSize;
const staggerTime = batchesCount <= 1 ? 0 : (dataFeedUpdateInterval / batchesCount) * 1000;

// Wait the remaining stagger time required after fetching the first batch.
Expand All @@ -63,15 +76,15 @@ export const runUpdateFeed = async (providerName: string, chain: Chain, chainId:
await sleep((batchIndex - 1) * staggerTime);

logger.debug(`Fetching batch of active dAPIs`, { batchIndex, ...baseLogContext });
return getActiveDapiBatch(chain, batchIndex * dataFeedBatchSize);
return dapiDataRegistry.readDapis(batchIndex * dataFeedBatchSize, dataFeedBatchSize);
})
);
for (const batch of otherBatches.filter((batch) => !isFulfilled(batch))) {
logger.error(`Failed to get active dAPIs batch`, (batch as PromiseRejectedResult).reason, baseLogContext);
}
const processOtherBatchesPromises = otherBatches
.filter((result) => isFulfilled(result))
.map(async (result) => processBatch((result as PromiseFulfilledResult<ActiveDapisBatch>).value));
.map(async (result) => processBatch((result as PromiseFulfilledResult<ReadDapisResponse>).value));

// Wait for all the batches to be processed.
//
Expand All @@ -80,12 +93,6 @@ export const runUpdateFeed = async (providerName: string, chain: Chain, chainId:
await Promise.all([processFirstBatchPromise, ...processOtherBatchesPromises]);
};

export const processBatch = async (_batch: ActiveDapisBatch) => {
export const processBatch = async (_batch: ReadDapisResponse) => {
// TODO: Implement.
};

export const getActiveDapiBatch = async (chain: Chain, offset = 0) => {
const { dataFeedBatchSize } = chain;

return getStaticActiveDapis(offset, dataFeedBatchSize);
};
Original file line number Diff line number Diff line change
@@ -1,9 +1,7 @@
// NOTE: The function is currently returning static data, because the contract is not yet finalized, but we mark it as
// async in advance.
//
// eslint-disable-next-line @typescript-eslint/require-await
export const getStaticActiveDapis = async (_offset: number, _limit: number) => {
return {
import type { DapiDataRegistry } from '../../typechain-types';

export const generateReadDapisResponse = () => [
{
totalCount: 1,
dapiNames: ['MOCK_FEED'],
dataFeedIds: ['0xebba8507d616ed80766292d200a3598fdba656d9938cecc392765d4a284a69a4'],
Expand All @@ -12,7 +10,12 @@ export const getStaticActiveDapis = async (_offset: number, _limit: number) => {
dataFeedTemplateIds: [['0xcc35bd1800c06c12856a87311dd95bfcbb3add875844021d59a929d79f3c99bd']],
signedApiUrls: [['http://localhost:8080']],
airnodeAddresses: ['0xbF3137b0a7574563a23a8fC8badC6537F98197CC'],
};
};
},
];

export type ActiveDapisBatch = Awaited<ReturnType<typeof getStaticActiveDapis>>;
export const generateMockDapiDataRegistry = () => {
return {
readDapis: jest.fn(),
dapisCount: jest.fn(),
} satisfies Partial<DapiDataRegistry>;
};
3 changes: 2 additions & 1 deletion test/fixtures/mock-config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,8 @@ export const generateTestConfig = (): Config => ({
chains: {
'31337': {
contracts: {
Api3ServerV1: '',
Api3ServerV1: '0xe7f1725E7734CE288F8367e1Bb143E90bb3F0512',
DapiDataRegistry: '0xDD78254f864F97f65e2d86541BdaEf88A504D2B2',
},
providers: { hardhat: { url: 'http://127.0.0.1:8545' } },
__Temporary__DapiDataRegistry: {
Expand Down

0 comments on commit 66756e9

Please sign in to comment.