Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: keys-api interface #177

Merged
merged 4 commits into from
Jun 28, 2023
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 4 additions & 2 deletions src/common/alertmanager/alerts/CriticalMissedAttestations.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,10 @@ export class CriticalMissedAttestations extends Alert {
const nosStats = await this.storage.getUserNodeOperatorsStats(epoch);
const missedAttValidatorsCount = await this.storage.getValidatorCountWithMissedAttestationsLastNEpoch(epoch);
for (const noStats of nosStats.filter((o) => o.active_ongoing > this.config.get('CRITICAL_ALERTS_MIN_VAL_COUNT'))) {
const operator = this.operators.find((o) => +noStats.val_nos_id == o.index);
const missedAtt = missedAttValidatorsCount.find((a) => a.val_nos_id != null && +a.val_nos_id == operator.index);
const operator = this.operators.find((o) => +noStats.val_nos_module_id == o.module && +noStats.val_nos_id == o.index);
const missedAtt = missedAttValidatorsCount.find(
(a) => a.val_nos_id != null && +a.val_nos_module_id == operator.module && +a.val_nos_id == operator.index,
);
if (!missedAtt) continue;
if (missedAtt.amount > noStats.active_ongoing * VALIDATORS_WITH_MISSED_ATTESTATION_COUNT_THRESHOLD) {
result[operator.name] = { ongoing: noStats.active_ongoing, missedAtt: missedAtt.amount };
Expand Down
6 changes: 4 additions & 2 deletions src/common/alertmanager/alerts/CriticalMissedProposes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,10 @@ export class CriticalMissedProposes extends Alert {
const nosStats = await this.storage.getUserNodeOperatorsStats(epoch);
const proposes = await this.storage.getUserNodeOperatorsProposesStats(epoch); // ~12h range
for (const noStats of nosStats.filter((o) => o.active_ongoing > this.config.get('CRITICAL_ALERTS_MIN_VAL_COUNT'))) {
const operator = this.operators.find((o) => +noStats.val_nos_id == o.index);
const proposeStats = proposes.find((a) => a.val_nos_id != null && +a.val_nos_id == operator.index);
const operator = this.operators.find((o) => +noStats.val_nos_module_id == o.module && +noStats.val_nos_id == o.index);
const proposeStats = proposes.find(
(a) => a.val_nos_id != null && +a.val_nos_module_id == operator.module && +a.val_nos_id == operator.index,
);
if (!proposeStats) continue;
if (proposeStats.missed > proposeStats.all * VALIDATORS_WITH_MISSED_PROPOSALS_COUNT_THRESHOLD) {
result[operator.name] = { all: proposeStats.all, missed: proposeStats.missed };
Expand Down
4 changes: 2 additions & 2 deletions src/common/alertmanager/alerts/CriticalNegativeDelta.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@ export class CriticalNegativeDelta extends Alert {
const nosStats = await this.storage.getUserNodeOperatorsStats(epoch);
const negativeValidatorsCount = await this.storage.getValidatorsCountWithNegativeDelta(epoch);
for (const noStats of nosStats.filter((o) => o.active_ongoing > this.config.get('CRITICAL_ALERTS_MIN_VAL_COUNT'))) {
const operator = this.operators.find((o) => +noStats.val_nos_id == o.index);
const negDelta = negativeValidatorsCount.find((a) => +a.val_nos_id == operator.index);
const operator = this.operators.find((o) => +noStats.val_nos_module_id == o.module && +noStats.val_nos_id == o.index);
const negDelta = negativeValidatorsCount.find((a) => +a.val_nos_module_id == operator.module && +a.val_nos_id == operator.index);
if (!negDelta) continue;
if (negDelta.amount > noStats.active_ongoing * VALIDATORS_WITH_NEGATIVE_DELTA_COUNT_THRESHOLD) {
result[operator.name] = { ongoing: noStats.active_ongoing, negDelta: negDelta.amount };
Expand Down
4 changes: 2 additions & 2 deletions src/common/alertmanager/alerts/CriticalSlashing.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@ export class CriticalSlashing extends Alert {
const currOperators = await this.storage.getUserNodeOperatorsStats(epoch);
const prevOperators = await this.storage.getUserNodeOperatorsStats(epoch - 1); // compare with previous epoch
for (const currOperator of currOperators) {
const operator = this.operators.find((o) => +currOperator.val_nos_id == o.index);
const prevOperator = prevOperators.find((a) => a.val_nos_id == currOperator.val_nos_id);
const operator = this.operators.find((o) => +currOperator.val_nos_module_id == o.module && +currOperator.val_nos_id == o.index);
const prevOperator = prevOperators.find((a) => +a.val_nos_module_id == operator.module && +a.val_nos_id == operator.index);
// if count of slashed validators increased, we should alert about it
const prevSlashed = prevOperator ? prevOperator.slashed : 0;
if (currOperator.slashed > prevSlashed) {
Expand Down
20 changes: 20 additions & 0 deletions src/common/config/env.validation.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ export enum Network {
export enum ValidatorRegistrySource {
Lido = 'lido',
File = 'file',
KeysAPI = 'keysapi',
}

const toBoolean = (value: any): boolean => {
Expand Down Expand Up @@ -180,6 +181,25 @@ export class EnvironmentVariables {
@IsString()
public VALIDATOR_REGISTRY_LIDO_SOURCE_SQLITE_CACHE_PATH = './docker/validators/lido_mainnet.db';

@IsArray()
@ArrayMinSize(1)
@Transform(({ value }) => value.split(','))
@ValidateIf((vars) => vars.VALIDATOR_REGISTRY_SOURCE == ValidatorRegistrySource.KeysAPI && vars.NODE_ENV != Environment.test)
public VALIDATOR_REGISTRY_KEYSAPI_SOURCE_URLS = [];

@IsInt()
@Transform(({ value }) => parseInt(value, 10), { toClassOnly: true })
public VALIDATOR_REGISTRY_KEYSAPI_SOURCE_RETRY_DELAY_MS = 500;

@IsNumber()
@Min(5000)
@Transform(({ value }) => parseInt(value, 10), { toClassOnly: true })
public VALIDATOR_REGISTRY_KEYSAPI_SOURCE_RESPONSE_TIMEOUT = 30000;

@IsNumber()
@Transform(({ value }) => parseInt(value, 10), { toClassOnly: true })
public VALIDATOR_REGISTRY_KEYSAPI_SOURCE_MAX_RETRIES = 2;

/**
* Use a file with list of validators that are stuck and should be excluded from the monitoring metrics
*/
Expand Down
2 changes: 2 additions & 0 deletions src/common/prometheus/prometheus.constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@ export const METRIC_OUTGOING_EL_REQUESTS_DURATION_SECONDS = `outgoing_el_request
export const METRIC_OUTGOING_EL_REQUESTS_COUNT = `outgoing_el_requests_count`;
export const METRIC_OUTGOING_CL_REQUESTS_DURATION_SECONDS = `outgoing_cl_requests_duration_seconds`;
export const METRIC_OUTGOING_CL_REQUESTS_COUNT = `outgoing_cl_requests_count`;
export const METRIC_OUTGOING_KEYSAPI_REQUESTS_DURATION_SECONDS = `outgoing_keysapi_requests_duration_seconds`;
export const METRIC_OUTGOING_KEYSAPI_REQUESTS_COUNT = `outgoing_keysapi_requests_count`;
export const METRIC_TASK_DURATION_SECONDS = `task_duration_seconds`;
export const METRIC_TASK_RESULT_COUNT = `task_result_count`;
export const METRIC_DATA_ACTUALITY = `data_actuality`;
Expand Down
Loading