Skip to content

Commit

Permalink
refactor: extract event getting code to its own function / file
Browse files Browse the repository at this point in the history
- Merge all event getting code into a single function rather than
  multiple dependent hooks
- Add start of chain-specific event getting
- Small misc fixes
  • Loading branch information
rkalis committed Dec 11, 2024
1 parent 2465add commit 9a34059
Show file tree
Hide file tree
Showing 13 changed files with 156 additions and 129 deletions.
6 changes: 4 additions & 2 deletions biome.jsonc
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,8 @@
"options": {
"hooks": [{ "name": "useTransactionStore", "stableResult": true }]
}
}
},
"noUnusedImports": "warn" // We don't need red squiggly lines for unused imports, but we do want to remove them
},
"performance": {
"noAccumulatingSpread": "warn" // I think this only warrants a warning, as it's not a super big deal in most cases
Expand All @@ -48,7 +49,8 @@
"style": {
"noInferrableTypes": "off", // I prefer to be explicit about types
"noNonNullAssertion": "off", // TODO: something to improve later
"useNodejsImportProtocol": "off" // Node.js imports seem not te be supported by Next.js
"useNodejsImportProtocol": "off", // Node.js imports seem not te be supported by Next.js
"useImportType": "warn" // We don't need red squiggly lines for unused imports, but we do want to remove them
}
}
},
Expand Down
6 changes: 3 additions & 3 deletions cypress/e2e/chains.cy.ts
Original file line number Diff line number Diff line change
Expand Up @@ -191,7 +191,7 @@ describe('Chain Support', () => {
cy.get(Selectors.CONTROLS_SECTION, { timeout: 4_000 }).should('exist');

// Get the number of approvals from the UI and store it in a file to compare with production
if (Cypress.env('CHECK_REGRESSIONS') === 'true') {
if (Cypress.env('CHECK_REGRESSIONS')) {
cy.get(Selectors.TOTAL_ALLOWANCES)
.should('exist')
.invoke('text')
Expand All @@ -206,7 +206,7 @@ describe('Chain Support', () => {
});
}

if (Cypress.env('CHECK_EXPLORER') === 'true') {
if (Cypress.env('CHECK_EXPLORER')) {
// To test that the explorer link works, we navigate to the "Last Updated" URL and check that the address is present
const linkElement = cy.get(Selectors.LAST_UPDATED_LINK).first();
linkElement.invoke('attr', 'href').then((href) => {
Expand All @@ -221,7 +221,7 @@ describe('Chain Support', () => {
}
});

if (Cypress.env('CHECK_REGRESSIONS') === 'true') {
if (Cypress.env('CHECK_REGRESSIONS')) {
it('should return the same results as production', () => {
cy.visit(`https://revoke.cash/address/${fixtureAddress}?chainId=${chainId}`, { timeout: 10_000 });
cy.wait(1000); // Since App Router we now need this delay before the page is fully loaded -__-
Expand Down
10 changes: 10 additions & 0 deletions lib/chains/Chain.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import { getChain } from '@revoke.cash/chains';
import { ETHERSCAN_API_KEYS, ETHERSCAN_RATE_LIMITS, INFURA_API_KEY, RPC_OVERRIDES } from 'lib/constants';
import type { EtherscanPlatform, RateLimit } from 'lib/interfaces';
import type { PriceStrategy } from 'lib/price/PriceStrategy';
import { BackendLogsProvider, DivideAndConquerLogsProvider, type LogsProvider, ViemLogsProvider } from 'lib/providers';
import { isNullish } from 'lib/utils';
import { SECOND } from 'lib/utils/time';
import {
Expand Down Expand Up @@ -239,4 +240,13 @@ export class Chain {
getBackendPriceStrategy(): PriceStrategy | undefined {
return this.options.backendPriceStrategy;
}

private getUnderlyingLogsProvider(): BackendLogsProvider | ViemLogsProvider {
if (this.type !== SupportType.PROVIDER) return new BackendLogsProvider(this.chainId);
return new ViemLogsProvider(this.chainId, this.getLogsRpcUrl());
}

getLogsProvider(): LogsProvider {
return new DivideAndConquerLogsProvider(this.getUnderlyingLogsProvider());
}
}
104 changes: 104 additions & 0 deletions lib/chains/events.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
import { ERC721_ABI, PERMIT2_ABI } from 'lib/abis';
import eventsDB from 'lib/databases/events';
import { sortTokenEventsChronologically } from 'lib/utils';
import { isNullish } from 'lib/utils';
import { addressToTopic, apiLogin } from 'lib/utils';
import { type DocumentedChainId, getChainConfig } from 'lib/utils/chains';
import { parseApprovalForAllLog, parseApprovalLog, parsePermit2Log, parseTransferLog } from 'lib/utils/events';
import { type TokenEvent, generatePatchedAllowanceEvents } from 'lib/utils/events';
import { getOpenSeaProxyAddress } from 'lib/utils/whois';
import { type Address, getAbiItem, toEventSelector } from 'viem';

// Note: ideally I would have included this in the 'Chain' class, but this causes circular dependency issues nd issues with Edge runtime
// So we use this separate file instead to configure token event getting per chain.

export const getTokenEvents = async (chainId: DocumentedChainId, address: Address): Promise<TokenEvent[]> => {
const override = ChainOverrides[chainId];
if (override) return override(chainId, address);
return getTokenEventsDefault(chainId, address);
};

type TokenEventsGetter = (chainId: DocumentedChainId, address: Address) => Promise<TokenEvent[]>;

const ChainOverrides: Record<number, TokenEventsGetter> = {};

const getTokenEventsDefault = async (chainId: DocumentedChainId, address: Address): Promise<TokenEvent[]> => {
// Assemble prerequisites

const chain = getChainConfig(chainId);
const publicClient = chain.createViemPublicClient();
const logsProvider = chain.getLogsProvider();

const [openSeaProxyAddress, fromBlock, toBlock, isLoggedIn] = await Promise.all([
getOpenSeaProxyAddress(address),
0,
publicClient.getBlockNumber().then((blockNumber) => Number(blockNumber)),
apiLogin(),
]);

if (!isLoggedIn) {
throw new Error('Failed to create an API session');
}

// Create required event filters

const getErc721EventSelector = (eventName: 'Transfer' | 'Approval' | 'ApprovalForAll') => {
return toEventSelector(getAbiItem({ abi: ERC721_ABI, name: eventName }));
};

const getPermit2EventSelector = (eventName: 'Permit' | 'Approval' | 'Lockdown') => {
return toEventSelector(getAbiItem({ abi: PERMIT2_ABI, name: eventName }));
};

const addressTopic = addressToTopic(address);

const transferToFilter = { topics: [getErc721EventSelector('Transfer'), null, addressTopic], fromBlock, toBlock };
const transferFromFilter = { topics: [getErc721EventSelector('Transfer'), addressTopic], fromBlock, toBlock };
const approvalFilter = { topics: [getErc721EventSelector('Approval'), addressTopic], fromBlock, toBlock };
const approvalForAllFilter = {
topics: [getErc721EventSelector('ApprovalForAll'), addressTopic],
fromBlock,
toBlock,
};

const permit2ApprovalFilter = { topics: [getPermit2EventSelector('Approval'), addressTopic], fromBlock, toBlock };
const permit2PermitFilter = { topics: [getPermit2EventSelector('Permit'), addressTopic], fromBlock, toBlock };
const permit2LockdownFilter = { topics: [getPermit2EventSelector('Lockdown'), addressTopic], fromBlock, toBlock };

// Fetch events
const [transferTo, transferFrom, approval, approvalForAllUnpatched, permit2Approval, permit2Permit, permit2Lockdown] =
await Promise.all([
eventsDB.getLogs(logsProvider, transferToFilter, chainId, 'Transfer (to)'),
eventsDB.getLogs(logsProvider, transferFromFilter, chainId, 'Transfer (from)'),
eventsDB.getLogs(logsProvider, approvalFilter, chainId, 'Approval'),
eventsDB.getLogs(logsProvider, approvalForAllFilter, chainId, 'ApprovalForAll'),
eventsDB.getLogs(logsProvider, permit2ApprovalFilter, chainId, 'Permit2 Approval'),
eventsDB.getLogs(logsProvider, permit2PermitFilter, chainId, 'Permit2 Permit'),
eventsDB.getLogs(logsProvider, permit2LockdownFilter, chainId, 'Permit2 Lockdown'),
]);

// Manually patch the ApprovalForAll events
const approvalForAll = [
...approvalForAllUnpatched,
...generatePatchedAllowanceEvents(address, openSeaProxyAddress ?? undefined, [
...approval,
...approvalForAllUnpatched,
...transferFrom,
...transferTo,
]),
];

// Parse events. We put ApprovalForAll first to ensure that incorrect ERC721 contracts like CryptoStrikers are handled correctly
const parsedEvents = [
...approvalForAll.map((log) => parseApprovalForAllLog(log, chainId)),
...approval.map((log) => parseApprovalLog(log, chainId)),
...permit2Approval.map((log) => parsePermit2Log(log, chainId)),
...permit2Permit.map((log) => parsePermit2Log(log, chainId)),
...permit2Lockdown.map((log) => parsePermit2Log(log, chainId)),
...transferFrom.map((log) => parseTransferLog(log, chainId, address)),
...transferTo.map((log) => parseTransferLog(log, chainId, address)),
];

// We sort the events in reverse chronological order to ensure that the most recent events are processed first
return sortTokenEventsChronologically(parsedEvents.filter((event) => !isNullish(event))).reverse();
};
4 changes: 3 additions & 1 deletion lib/databases/events.ts
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,10 @@ class EventsDB extends Dexie {
// Note: It is always assumed that this function is called to get logs for the entire chain (i.e. from block 0 to 'latest')
// So we assume that the filter.fromBlock is always 0, and we only need to retrieve events between the last stored event and 'latest'
// This means that we can't use this function to get logs for a specific block range
async getLogs(logsProvider: LogsProvider, filter: Filter, chainId: number) {
async getLogs(logsProvider: LogsProvider, filter: Filter, chainId: number, nameTag?: string) {
const logs = await this.getLogsInternal(logsProvider, filter, chainId);

if (nameTag) console.log(`${nameTag} logs`, logs);
// We can uncomment this to filter the logs once more by block number after retrieving them from IndexedDB
// This is useful when we want to test the state of approvals at a different block by using a Tenderly fork
// return logs.filter((log) => log.blockNumber >= filter.fromBlock && log.blockNumber <= filter.toBlock);
Expand Down
102 changes: 10 additions & 92 deletions lib/hooks/ethereum/events/useEvents.tsx
Original file line number Diff line number Diff line change
@@ -1,96 +1,14 @@
import { ERC721_ABI } from 'lib/abis';
import { addressToTopic, isNullish, sortTokenEventsChronologically } from 'lib/utils';
import {
generatePatchedAllowanceEvents,
parseApprovalForAllLog,
parseApprovalLog,
parsePermit2Log,
parseTransferLog,
} from 'lib/utils/events';
import { useMemo } from 'react';
import { type Address, getAbiItem, toEventSelector } from 'viem';
import { useLogsFullBlockRange } from '../useLogsFullBlockRange';
import { useOpenSeaProxyAddress } from '../useOpenSeaProxyAddress';
import { usePermit2Events } from './usePermit2Events';
import { useQuery } from '@tanstack/react-query';
import { getTokenEvents } from 'lib/chains/events';
import { isNullish } from 'lib/utils';
import type { Address } from 'viem';

export const useEvents = (address: Address, chainId: number) => {
const { openSeaProxyAddress, isLoading: isOpenSeaProxyAddressLoading } = useOpenSeaProxyAddress(address);
const { data, isLoading, error } = useQuery({
queryKey: ['events', address, chainId],
queryFn: () => getTokenEvents(chainId, address),
enabled: !isNullish(address) && !isNullish(chainId),
});

const getErc721EventSelector = (eventName: 'Transfer' | 'Approval' | 'ApprovalForAll') => {
return toEventSelector(getAbiItem({ abi: ERC721_ABI, name: eventName }));
};

const addressTopic = address ? addressToTopic(address) : undefined;
const transferToFilter = addressTopic && { topics: [getErc721EventSelector('Transfer'), null, addressTopic] };
const transferFromFilter = addressTopic && { topics: [getErc721EventSelector('Transfer'), addressTopic] };
const approvalFilter = addressTopic && { topics: [getErc721EventSelector('Approval'), addressTopic] };
const approvalForAllFilter = addressTopic && { topics: [getErc721EventSelector('ApprovalForAll'), addressTopic] };

const {
data: transferTo,
isLoading: isTransferToLoading,
error: transferToError,
} = useLogsFullBlockRange('Transfer (to)', chainId, transferToFilter);

const {
data: transferFrom,
isLoading: isTransferFromLoading,
error: transferFromError,
} = useLogsFullBlockRange('Transfer (from)', chainId, transferFromFilter);

const {
data: approval,
isLoading: isApprovalLoading,
error: approvalError,
} = useLogsFullBlockRange('Approval', chainId, approvalFilter);

const {
data: approvalForAllUnpatched,
isLoading: isApprovalForAllLoading,
error: approvalForAllError,
} = useLogsFullBlockRange('ApprovalForAll', chainId, approvalForAllFilter);

const {
events: permit2Approval,
isLoading: isPermit2ApprovalLoading,
error: permit2ApprovalError,
} = usePermit2Events(address, chainId);

// Manually patch the ApprovalForAll events
const approvalForAll = useMemo(() => {
if (!transferFrom || !transferTo || !approval || !approvalForAllUnpatched) return undefined;
return [
...approvalForAllUnpatched,
...generatePatchedAllowanceEvents(address, openSeaProxyAddress ?? undefined, [
...approval,
...approvalForAllUnpatched,
...transferFrom,
...transferTo,
]),
];
}, [transferFrom, transferTo, approval, approvalForAllUnpatched, openSeaProxyAddress, address]);

const isEventsLoading = isTransferFromLoading || isTransferToLoading || isApprovalLoading || isApprovalForAllLoading;
const isLoading = isOpenSeaProxyAddressLoading || isEventsLoading || isPermit2ApprovalLoading;
const eventsError = transferFromError || transferToError || approvalError || approvalForAllError;
const error = eventsError || permit2ApprovalError;

const events = useMemo(() => {
if (!transferFrom || !transferTo || !approval || !approvalForAll || !permit2Approval) return undefined;
if (error || isLoading) return undefined;

const parsedEvents = [
// We put ApprovalForAll first to ensure that incorrect ERC721 contracts like CryptoStrikers are handled correctly
...approvalForAll.map((log) => parseApprovalForAllLog(log, chainId)),
...approval.map((log) => parseApprovalLog(log, chainId)),
...permit2Approval.map((log) => parsePermit2Log(log, chainId)),
...transferFrom.map((log) => parseTransferLog(log, chainId, address)),
...transferTo.map((log) => parseTransferLog(log, chainId, address)),
];

// We sort the events in reverse chronological order to ensure that the most recent events are processed first
return sortTokenEventsChronologically(parsedEvents.filter((event) => !isNullish(event))).reverse();
}, [transferFrom, transferTo, approval, approvalForAll, permit2Approval, error, isLoading, address, chainId]);

return { events, isLoading, error };
return { events: data, isLoading, error };
};
5 changes: 2 additions & 3 deletions lib/hooks/ethereum/useBlockNumber.tsx
Original file line number Diff line number Diff line change
@@ -1,13 +1,12 @@
import { useQuery } from '@tanstack/react-query';
import { createViemPublicClientForChain } from 'lib/utils/chains';
import { MINUTE } from 'lib/utils/time';
import { getBlockNumber } from 'wagmi/actions';
import { wagmiConfig } from './EthereumProvider';

// We add this custom useBlockNumber instead of using wagmi's so that we can easily invalidate the query when needed
export const useBlockNumber = (chainId: number) => {
return useQuery<number, Error>({
queryKey: ['blockNumber', chainId],
queryFn: async () => getBlockNumber(wagmiConfig, { chainId }).then(Number),
queryFn: async () => createViemPublicClientForChain(chainId).getBlockNumber().then(Number),
// Don't refresh the block number too often to avoid refreshing events too often, to avoid backend API rate limiting
gcTime: 1 * MINUTE,
staleTime: 1 * MINUTE,
Expand Down
7 changes: 1 addition & 6 deletions lib/hooks/ethereum/useLogs.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,14 @@ import eventsDB from 'lib/databases/events';
import { getLogsProvider } from 'lib/providers';
import { isNullish } from 'lib/utils';
import type { Filter, Log } from 'lib/utils/events';
import { useEffect } from 'react';
import { useApiSession } from '../useApiSession';

export const useLogs = (name: string, chainId: number, filter?: Filter) => {
const { isLoggedIn, loggingIn, error: loginError } = useApiSession();

const result = useQuery<Log[], Error>({
queryKey: ['logs', filter, chainId, isLoggedIn],
queryFn: async () => eventsDB.getLogs(getLogsProvider(chainId), filter!, chainId),
queryFn: async () => eventsDB.getLogs(getLogsProvider(chainId), filter!, chainId, name),
refetchOnWindowFocus: false,
// The same filter should always return the same logs
staleTime: Number.POSITIVE_INFINITY,
Expand All @@ -23,10 +22,6 @@ export const useLogs = (name: string, chainId: number, filter?: Filter) => {
!isNullish(filter?.topics),
});

useEffect(() => {
if (result.data) console.log(`${name} events`, result.data);
}, [result.data, name]);

const error = loginError ? new Error('Failed to create API session') : result.error;

return { ...result, isLoading: result.isLoading || loggingIn, error };
Expand Down
10 changes: 4 additions & 6 deletions lib/hooks/ethereum/useMarketplaces.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,7 @@ import { mapAsync } from 'lib/utils/promises';
import { MINUTE } from 'lib/utils/time';
import { useLayoutEffect, useState } from 'react';
import { type Address, type Hash, type WalletClient, getAbiItem, toEventSelector } from 'viem';
import { getBlockNumber } from 'wagmi/actions';
import { useAddressAllowances, useAddressPageContext } from '../page-context/AddressPageContext';
import { wagmiConfig } from './EthereumProvider';

export const useMarketplaces = () => {
const [marketplaces, setMarketplaces] = useState<Marketplace[]>([]);
Expand Down Expand Up @@ -115,7 +113,7 @@ export const useMarketplaces = () => {

const blockNumber = await queryClient.ensureQueryData({
queryKey: ['blockNumber', selectedChainId],
queryFn: async () => getBlockNumber(wagmiConfig, { chainId: selectedChainId }).then(Number),
queryFn: async () => createViemPublicClientForChain(selectedChainId).getBlockNumber().then(Number),
// Don't refresh the block number too often to avoid refreshing events too often, to avoid backend API rate limiting
gcTime: 1 * MINUTE,
staleTime: 1 * MINUTE,
Expand All @@ -135,13 +133,13 @@ export const useMarketplaces = () => {
staleTime: Number.POSITIVE_INFINITY,
});

const lastCancelled = logs?.sort(logSorterChronological)?.at(-1);
const timestamp = lastCancelled ? await blocksDB.getLogTimestamp(publicClient, lastCancelled) : undefined;
const lastCancelledLog = logs?.sort(logSorterChronological)?.at(-1);
const lastCancelled = lastCancelledLog ? await blocksDB.getTimeLog(publicClient, lastCancelledLog) : undefined;

return {
...marketplace,
chainId: selectedChainId,
lastCancelled: lastCancelled ? { ...lastCancelled, timestamp } : undefined,
lastCancelled,
allowances: allowances!.filter(
(allowance) => allowance.payload?.spender === marketplace.approvalFilterAddress,
),
Expand Down
8 changes: 2 additions & 6 deletions lib/hooks/useApiSession.tsx
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { useQuery } from '@tanstack/react-query';
import ky from 'lib/ky';
import { apiLogin } from 'lib/utils';
import { HOUR } from 'lib/utils/time';

export const useApiSession = () => {
Expand All @@ -9,11 +9,7 @@ export const useApiSession = () => {
error,
} = useQuery({
queryKey: ['login'],
queryFn: () =>
ky
.post('/api/login')
.json<any>()
.then((res) => !!res?.ok),
queryFn: apiLogin,
staleTime: 12 * HOUR,
refetchOnWindowFocus: true,
refetchOnReconnect: true,
Expand Down
Loading

0 comments on commit 9a34059

Please sign in to comment.