diff --git a/chart/templates/global-cm.yml b/chart/templates/global-cm.yml index 55be56bfd..a1ad2734a 100644 --- a/chart/templates/global-cm.yml +++ b/chart/templates/global-cm.yml @@ -5,6 +5,16 @@ metadata: labels: {{- include "relaynet-internet-gateway.labels" . | nindent 4 }} data: + GATEWAY_VERSION: {{ .Chart.Version }} + + LOG_LEVEL: {{ .Values.logging.level | quote }} + {{- if .Values.logging.target }} + LOG_TARGET: {{ .Values.logging.target }} + {{- end }} + {{- if .Values.logging.envName }} + LOG_ENV_NAME: {{ .Values.logging.envName }} + {{- end }} + NATS_SERVER_URL: {{ .Values.nats.serverUrl }} NATS_CLUSTER_ID: {{ .Values.nats.clusterId }} diff --git a/chart/templates/pohttp-deploy.yml b/chart/templates/pohttp-deploy.yml index 072e69e23..b26d0a0eb 100644 --- a/chart/templates/pohttp-deploy.yml +++ b/chart/templates/pohttp-deploy.yml @@ -41,8 +41,6 @@ spec: - name: REQUEST_ID_HEADER value: {{ .Values.proxyRequestIdHeader | quote }} {{- end }} - - name: LOG_LEVEL - value: {{ .Values.logLevel | quote }} envFrom: - configMapRef: name: {{ include "relaynet-internet-gateway.fullname" . }} diff --git a/chart/templates/poweb-deploy.yml b/chart/templates/poweb-deploy.yml index b680cfb79..21c179bfd 100644 --- a/chart/templates/poweb-deploy.yml +++ b/chart/templates/poweb-deploy.yml @@ -42,8 +42,6 @@ spec: - name: REQUEST_ID_HEADER value: {{ .Values.proxyRequestIdHeader | quote }} {{- end }} - - name: LOG_LEVEL - value: {{ .Values.logLevel | quote }} envFrom: - configMapRef: name: {{ include "relaynet-internet-gateway.fullname" . }} diff --git a/chart/values.dev.yml b/chart/values.dev.yml index 069fa0498..dcecf8d99 100644 --- a/chart/values.dev.yml +++ b/chart/values.dev.yml @@ -8,7 +8,8 @@ fullnameOverride: public-gateway tags: dev: true -logLevel: debug +logging: + level: debug ingress: enableTls: false diff --git a/chart/values.schema.json b/chart/values.schema.json index 7a04e127f..3ec6ff0c4 100644 --- a/chart/values.schema.json +++ b/chart/values.schema.json @@ -2,7 +2,7 @@ "$schema": "http://json-schema.org/schema#", "type": "object", "required": [ - "logLevel", + "logging", "publicAddress", "pdcQueue", "mongo", @@ -10,9 +10,22 @@ "vault" ], "properties": { - "logLevel": { - "type": "string", - "enum": ["debug", "info", "warn", "error"] + "logging": { + "type": "object", + "required": ["level"], + "properties": { + "level": { + "type": "string", + "enum": ["debug", "info", "warn", "error", "fatal"] + }, + "target": { + "type": "string", + "enum": ["gcp"] + }, + "envName": { + "type": "string" + } + } }, "ingress": { "type": "object", diff --git a/chart/values.yaml b/chart/values.yaml index 61224439e..af4b170f3 100644 --- a/chart/values.yaml +++ b/chart/values.yaml @@ -5,7 +5,8 @@ tags: dev: false -logLevel: info +logging: + level: info image: repository: ghcr.io/relaycorp/relaynet-internet-gateway diff --git a/docs/install.md b/docs/install.md index 408d006d6..21fd7a0b0 100644 --- a/docs/install.md +++ b/docs/install.md @@ -61,6 +61,9 @@ Check out [`relaycorp/cloud-gateway`](https://github.com/relaycorp/cloud-gateway | `ingress.annotations` | object | `{}` | Annotations for the ingress | | `ingress.enableTls` | boolean | `true` | Whether the ingress should use TLS. You still have to configure TLS through your cloud provider; see [GKE documentation](https://cloud.google.com/kubernetes-engine/docs/concepts/ingress), for example. | | `objectStore.backend` | string | | The type of object store used. Any value supported by [`@relaycorp/object-storage`](https://github.com/relaycorp/object-storage-js) (e.g., `s3`). | +| `logging.level` | string | `info` | The [log level](./instrumentation.md). | +| `logging.target` | string | | Any target supported by [@relaycorp/pino-cloud](https://www.npmjs.com/package/@relaycorp/pino-cloud); e.g., `gcp`. | +| `logging.envName` | string | `relaynet-internet-gateway` | A unique name for this instance of the gateway. Used by the `gcp` target as the _service name_ when pushing errors to Google Error Reporting, for example. | ### Component-specific options diff --git a/docs/instrumentation.md b/docs/instrumentation.md index b1f68dfed..014f48ab8 100644 --- a/docs/instrumentation.md +++ b/docs/instrumentation.md @@ -4,7 +4,7 @@ permalink: /instrumentation --- # Instrumentation -We use [pino](https://getpino.io/) to provide structured logs, which could in turn be consumed to provide [instrumentation](https://john-millikin.com/sre-school/instrumentation). +We use [pino](https://getpino.io/) with [`@relaycorp/pino-cloud`](https://www.npmjs.com/package/@relaycorp/pino-cloud) to provide structured logs, which could in turn be consumed to provide [instrumentation](https://john-millikin.com/sre-school/instrumentation). ## Common logging attributes @@ -22,4 +22,4 @@ We use log levels as follows: - `info`: Events for any outcome observed outside the gateway, or any unusual interaction with a backing service. - `warning`: Something has gone wrong but it's being handled gracefully. Triage can start on the next working day. - `error`: Something has gone wrong and triage must start within a few minutes. Wake up an SRE if necessary. -- `critical`: Not used. +- `fatal`: Not used. diff --git a/package-lock.json b/package-lock.json index ee0d53328..7109f74af 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1992,6 +1992,14 @@ "aws-sdk": "^2.831.0" } }, + "@relaycorp/pino-cloud": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@relaycorp/pino-cloud/-/pino-cloud-1.0.2.tgz", + "integrity": "sha512-YlYSmdQ6lBJLaOZ7UQ9X5y4T2wM24VsAMjUaVnv4+yTYvrm630NiV/w0Gnuzc0Fwd2nwDgNu6NeFf//bdYRLeg==", + "requires": { + "pino": "^6.11.0" + } + }, "@relaycorp/relaynet-core": { "version": "1.42.3", "resolved": "https://registry.npmjs.org/@relaycorp/relaynet-core/-/relaynet-core-1.42.3.tgz", diff --git a/package.json b/package.json index 4b82ceaf6..98ed2dd6f 100644 --- a/package.json +++ b/package.json @@ -17,7 +17,7 @@ "test:ci:unit": "run-s build test:ci:unit:jest", "test:ci:unit:jest": "jest --config jest.config.ci.js --coverage", "cov": "run-s build test:unit && opn coverage/lcov-report/index.html", - "clean": "trash build test" + "clean": "trash build test coverage" }, "engines": { "node": ">=12" @@ -70,6 +70,7 @@ "@relaycorp/cogrpc": "^1.3.1", "@relaycorp/keystore-vault": "^1.2.1", "@relaycorp/object-storage": "^1.3.2", + "@relaycorp/pino-cloud": "^1.0.2", "@relaycorp/relaynet-core": "^1.42.3", "@relaycorp/relaynet-pohttp": "^1.6.0", "@typegoose/typegoose": "^7.4.8", diff --git a/src/_test_utils.ts b/src/_test_utils.ts index a3ae60af5..4e3bac56b 100644 --- a/src/_test_utils.ts +++ b/src/_test_utils.ts @@ -72,20 +72,6 @@ export function mockSpy( return spy; } -/** - * @deprecated Use `makeMockLogging()` instead. - */ -export function mockPino(): pino.Logger { - const mockPinoLogger = { - debug: mockSpy(jest.fn()), - error: mockSpy(jest.fn()), - info: mockSpy(jest.fn()), - warn: mockSpy(jest.fn()), - }; - jest.mock('pino', () => jest.fn().mockImplementation(() => mockPinoLogger)); - return mockPinoLogger as any; -} - // tslint:disable-next-line:readonly-array export type MockLogSet = object[]; diff --git a/src/bin/pohttp-server.ts b/src/bin/pohttp-server.ts index 488dd38d2..5793d5d6d 100755 --- a/src/bin/pohttp-server.ts +++ b/src/bin/pohttp-server.ts @@ -1,7 +1,7 @@ // tslint:disable-next-line:no-var-requires require('make-promises-safe'); -import { runFastify } from '../services/fastifyUtils'; import { makeServer } from '../services/pohttp/server'; +import { runFastify } from '../utilities/fastify'; makeServer().then(runFastify); diff --git a/src/bin/poweb-server.ts b/src/bin/poweb-server.ts index 6306bcdaa..c27b6aeac 100755 --- a/src/bin/poweb-server.ts +++ b/src/bin/poweb-server.ts @@ -1,7 +1,7 @@ // tslint:disable-next-line:no-var-requires require('make-promises-safe'); -import { runFastify } from '../services/fastifyUtils'; import { makeServer } from '../services/poweb/server'; +import { runFastify } from '../utilities/fastify'; makeServer().then(runFastify); diff --git a/src/services/_test_utils.ts b/src/services/_test_utils.ts index 7154a346d..c3c8a6e32 100644 --- a/src/services/_test_utils.ts +++ b/src/services/_test_utils.ts @@ -15,7 +15,7 @@ import { Connection } from 'mongoose'; import * as stan from 'node-nats-streaming'; import { PdaChain } from '../_test_utils'; -import { HTTP_METHODS } from './fastifyUtils'; +import { HTTP_METHODS } from '../utilities/fastify'; export const TOMORROW = new Date(); TOMORROW.setDate(TOMORROW.getDate() + 1); diff --git a/src/services/cogrpc/server.spec.ts b/src/services/cogrpc/server.spec.ts index e775c8495..96d7b23c9 100644 --- a/src/services/cogrpc/server.spec.ts +++ b/src/services/cogrpc/server.spec.ts @@ -5,7 +5,8 @@ import { Logger } from 'pino'; import selfsigned from 'selfsigned'; import { makeMockLogging, mockSpy, partialPinoLog } from '../../_test_utils'; -import { configureMockEnvVars } from '../_test_utils'; +import * as logging from '../../utilities/logging'; +import { configureMockEnvVars, getMockContext } from '../_test_utils'; import { MAX_RAMF_MESSAGE_SIZE } from '../constants'; import { runServer } from './server'; import * as cogrpcService from './service'; @@ -42,6 +43,9 @@ const BASE_ENV_VARS = { }; const mockEnvVars = configureMockEnvVars(BASE_ENV_VARS); +const mockLogger = makeMockLogging().logger; +const mockMakeLogger = mockSpy(jest.spyOn(logging, 'makeLogger'), () => mockLogger); + describe('runServer', () => { test.each([ 'GATEWAY_KEY_ID', @@ -126,6 +130,16 @@ describe('runServer', () => { expect(mockServer.addService).toBeCalledWith(CargoRelayService, serviceImplementation); }); + test('Logger should be configured if custom logger is absent', async () => { + await runServer(); + + expect(mockMakeLogger).toBeCalledWith(); + const logger = getMockContext(mockMakeLogger).results[0].value; + expect(makeServiceImplementationSpy).toBeCalledWith( + expect.objectContaining({ baseLogger: logger }), + ); + }); + test('Health check service should be added', async () => { await runServer(); diff --git a/src/services/cogrpc/server.ts b/src/services/cogrpc/server.ts index 563ea83d9..f8c4dd1b4 100644 --- a/src/services/cogrpc/server.ts +++ b/src/services/cogrpc/server.ts @@ -2,9 +2,10 @@ import { CargoRelayService } from '@relaycorp/cogrpc'; import { get as getEnvVar } from 'env-var'; import { KeyCertPair, Server, ServerCredentials } from 'grpc'; import grpcHealthCheck from 'grpc-health-check'; -import pino, { Logger } from 'pino'; +import { Logger } from 'pino'; import * as selfsigned from 'selfsigned'; +import { makeLogger } from '../../utilities/logging'; import { MAX_RAMF_MESSAGE_SIZE } from '../constants'; import { makeServiceImplementation } from './service'; @@ -33,7 +34,7 @@ export async function runServer(logger?: Logger): Promise { 'grpc.max_receive_message_length': MAX_RECEIVED_MESSAGE_LENGTH, }); - const baseLogger = logger ?? pino(); + const baseLogger = logger ?? makeLogger(); const serviceImplementation = await makeServiceImplementation({ baseLogger, gatewayKeyIdBase64, diff --git a/src/services/crcQueueWorker.spec.ts b/src/services/crcQueueWorker.spec.ts index ca7088359..8f62d341d 100644 --- a/src/services/crcQueueWorker.spec.ts +++ b/src/services/crcQueueWorker.spec.ts @@ -18,11 +18,12 @@ import { import { Connection } from 'mongoose'; import * as stan from 'node-nats-streaming'; -import { mockPino, mockSpy, PdaChain } from '../_test_utils'; +import { makeMockLogging, MockLogging, mockSpy, partialPinoLog, PdaChain } from '../_test_utils'; import * as privateKeyStore from '../backingServices/keyStores'; import * as mongo from '../backingServices/mongo'; import { NatsStreamingClient } from '../backingServices/natsStreaming'; import * as objectStorage from '../backingServices/objectStorage'; +import * as logging from '../utilities/logging'; import { castMock, configureMockEnvVars, @@ -30,12 +31,10 @@ import { getMockInstance, mockStanMessage, } from './_test_utils'; +import { processIncomingCrcCargo } from './crcQueueWorker'; import * as mongoPublicKeyStore from './MongoPublicKeyStore'; import { ParcelStore } from './parcelStore'; -const mockLogger = mockPino(); -import { processIncomingCrcCargo } from './crcQueueWorker'; - //region Stan-related fixtures const STUB_WORKER_NAME = 'worker-name'; @@ -88,6 +87,7 @@ const mockStoreParcelFromPeerGateway = mockSpy( //endregion const BASE_ENV_VARS = { + GATEWAY_VERSION: '1', OBJECT_STORE_BUCKET, }; configureMockEnvVars(BASE_ENV_VARS); @@ -130,7 +130,19 @@ beforeAll(async () => { PARCEL_SERIALIZED = await PARCEL.serialize(CERT_CHAIN.pdaGranteePrivateKey); }); +let mockLogging: MockLogging; +beforeAll(() => { + mockLogging = makeMockLogging(); +}); +const mockMakeLogger = mockSpy(jest.spyOn(logging, 'makeLogger'), () => mockLogging.logger); + describe('Queue subscription', () => { + test('Logger should be configured', async () => { + await processIncomingCrcCargo(STUB_WORKER_NAME); + + expect(mockMakeLogger).toBeCalledWith(); + }); + test('Worker should subscribe to channel "crc-cargo"', async () => { await processIncomingCrcCargo(STUB_WORKER_NAME); @@ -193,14 +205,13 @@ test('Cargo with invalid payload should be logged and ignored', async () => { await processIncomingCrcCargo(STUB_WORKER_NAME); - expect(mockLogger.info).toBeCalledWith( - { + expect(mockLogging.logs).toContainEqual( + partialPinoLog('info', 'Cargo payload is invalid', { cargoId: cargo.id, err: expect.objectContaining({ message: expect.stringMatching(/Could not deserialize/) }), peerGatewayAddress: await cargo.senderCertificate.calculateSubjectPrivateAddress(), worker: STUB_WORKER_NAME, - }, - `Cargo payload is invalid`, + }), ); expect(stanMessage.ack).toBeCalledTimes(1); @@ -266,18 +277,17 @@ describe('Parcel processing', () => { await CERT_CHAIN.privateGatewayCert.calculateSubjectPrivateAddress(), MOCK_MONGOOSE_CONNECTION, mockNatsClient, - mockLogger, + mockLogging.logger, ); - expect(mockLogger.debug).toBeCalledWith( - { + expect(mockLogging.logs).toContainEqual( + partialPinoLog('debug', 'Parcel was stored', { cargoId: cargo.id, parcelId: PARCEL.id, parcelObjectKey: `parcels/${PARCEL.id}`, parcelSenderAddress: await PARCEL.senderCertificate.calculateSubjectPrivateAddress(), peerGatewayAddress: await CERT_CHAIN.privateGatewayCert.calculateSubjectPrivateAddress(), worker: STUB_WORKER_NAME, - }, - 'Parcel was stored', + }), ); }); @@ -291,16 +301,15 @@ describe('Parcel processing', () => { await processIncomingCrcCargo(STUB_WORKER_NAME); - expect(mockLogger.debug).toBeCalledWith( - { + expect(mockLogging.logs).toContainEqual( + partialPinoLog('debug', 'Ignoring previously processed parcel', { cargoId: cargo.id, parcelId: PARCEL.id, parcelObjectKey: null, parcelSenderAddress: await PARCEL.senderCertificate.calculateSubjectPrivateAddress(), peerGatewayAddress: await CERT_CHAIN.privateGatewayCert.calculateSubjectPrivateAddress(), worker: STUB_WORKER_NAME, - }, - 'Ignoring previously processed parcel', + }), ); }); @@ -313,14 +322,13 @@ describe('Parcel processing', () => { await processIncomingCrcCargo(STUB_WORKER_NAME); - expect(mockLogger.info).toBeCalledWith( - { + expect(mockLogging.logs).toContainEqual( + partialPinoLog('info', 'Parcel is invalid', { cargoId: cargo.id, - err: expect.any(InvalidMessageError), + err: expect.objectContaining({ type: InvalidMessageError.name }), peerGatewayAddress: await CERT_CHAIN.privateGatewayCert.calculateSubjectPrivateAddress(), worker: STUB_WORKER_NAME, - }, - 'Parcel is invalid', + }), ); }); @@ -394,14 +402,13 @@ test('Cargo containing invalid messages should be logged and ignored', async () await processIncomingCrcCargo(STUB_WORKER_NAME); const cargoSenderAddress = await CERT_CHAIN.privateGatewayCert.calculateSubjectPrivateAddress(); - expect(mockLogger.info).toBeCalledWith( - { + expect(mockLogging.logs).toContainEqual( + partialPinoLog('info', 'Cargo contains an invalid message', { cargoId: (await Cargo.deserialize(stubCargo1Serialized)).id, - error: expect.any(InvalidMessageError), + err: expect.objectContaining({ type: InvalidMessageError.name }), peerGatewayAddress: cargoSenderAddress, worker: STUB_WORKER_NAME, - }, - `Cargo contains an invalid message`, + }), ); }); diff --git a/src/services/crcQueueWorker.ts b/src/services/crcQueueWorker.ts index a896e2afa..2b1ddf7ee 100644 --- a/src/services/crcQueueWorker.ts +++ b/src/services/crcQueueWorker.ts @@ -12,18 +12,19 @@ import { get as getEnvVar } from 'env-var'; import pipe from 'it-pipe'; import { Connection } from 'mongoose'; import * as stan from 'node-nats-streaming'; -import pino from 'pino'; +import { Logger } from 'pino'; import { initVaultKeyStore } from '../backingServices/keyStores'; import { createMongooseConnectionFromEnv } from '../backingServices/mongo'; import { NatsStreamingClient } from '../backingServices/natsStreaming'; import { initObjectStoreFromEnv } from '../backingServices/objectStorage'; +import { makeLogger } from '../utilities/logging'; import { MongoPublicKeyStore } from './MongoPublicKeyStore'; import { ParcelStore } from './parcelStore'; -const LOGGER = pino(); - export async function processIncomingCrcCargo(workerName: string): Promise { + const logger = makeLogger(); + const natsStreamingClient = NatsStreamingClient.initFromEnv(workerName); const mongooseConnection = await createMongooseConnectionFromEnv(); @@ -45,7 +46,7 @@ export async function processIncomingCrcCargo(workerName: string): Promise // Vault is down or returned an unexpected response throw err; } - LOGGER.info( + logger.info( { cargoId: cargo.id, err, peerGatewayAddress, worker: workerName }, 'Cargo payload is invalid', ); @@ -57,9 +58,9 @@ export async function processIncomingCrcCargo(workerName: string): Promise let item: Parcel | ParcelCollectionAck; try { item = await CargoMessageSet.deserializeItem(itemSerialized); - } catch (error) { - LOGGER.info( - { cargoId: cargo.id, error, peerGatewayAddress, worker: workerName }, + } catch (err) { + logger.info( + { cargoId: cargo.id, err, peerGatewayAddress, worker: workerName }, 'Cargo contains an invalid message', ); continue; @@ -74,6 +75,7 @@ export async function processIncomingCrcCargo(workerName: string): Promise mongooseConnection, natsStreamingClient, workerName, + logger, ); } else { await parcelStore.deleteGatewayBoundParcel( @@ -109,6 +111,7 @@ async function processParcel( mongooseConnection: Connection, natsStreamingClient: NatsStreamingClient, workerName: string, + logger: Logger, ): Promise { let parcelObjectKey: string | null; try { @@ -118,18 +121,18 @@ async function processParcel( peerGatewayAddress, mongooseConnection, natsStreamingClient, - LOGGER, + logger, ); } catch (err) { if (err instanceof InvalidMessageError) { - LOGGER.info({ cargoId, err, peerGatewayAddress, worker: workerName }, 'Parcel is invalid'); + logger.info({ cargoId, err, peerGatewayAddress, worker: workerName }, 'Parcel is invalid'); return; } throw err; } - LOGGER.debug( + logger.debug( { cargoId, parcelId: parcel.id, diff --git a/src/services/internetBoundParcelsQueueWorker.spec.ts b/src/services/internetBoundParcelsQueueWorker.spec.ts index 7f5a77a99..b66f49b78 100644 --- a/src/services/internetBoundParcelsQueueWorker.spec.ts +++ b/src/services/internetBoundParcelsQueueWorker.spec.ts @@ -1,14 +1,19 @@ import * as pohttp from '@relaycorp/relaynet-pohttp'; import { EnvVarError } from 'env-var'; -import { arrayToAsyncIterable, mockPino, mockSpy } from '../_test_utils'; +import { + arrayToAsyncIterable, + makeMockLogging, + MockLogging, + mockSpy, + partialPinoLog, +} from '../_test_utils'; import { NatsStreamingClient } from '../backingServices/natsStreaming'; import * as objectStorage from '../backingServices/objectStorage'; +import * as logging from '../utilities/logging'; import { configureMockEnvVars, mockStanMessage, TOMORROW } from './_test_utils'; -import { ParcelStore, QueuedInternetBoundParcelMessage } from './parcelStore'; - -const MOCK_PINO = mockPino(); import { processInternetBoundParcels } from './internetBoundParcelsQueueWorker'; +import { ParcelStore, QueuedInternetBoundParcelMessage } from './parcelStore'; const OWN_POHTTP_ADDRESS = 'https://gateway.endpoint/'; @@ -49,6 +54,12 @@ const MOCK_DELETE_INTERNET_PARCEL = mockSpy( async () => undefined, ); +let mockLogging: MockLogging; +beforeAll(() => { + mockLogging = makeMockLogging(); +}); +const mockMakeLogger = mockSpy(jest.spyOn(logging, 'makeLogger'), () => mockLogging.logger); + describe('processInternetBoundParcels', () => { test.each(Object.keys(ENV_VARS))('Environment variable %s should be present', async (envVar) => { MOCK_ENV_VARS({ ...ENV_VARS, [envVar]: undefined }); @@ -58,6 +69,14 @@ describe('processInternetBoundParcels', () => { ).rejects.toBeInstanceOf(EnvVarError); }); + test('Logger should be configured', async () => { + MOCK_NATS_CLIENT.makeQueueConsumer.mockReturnValue(arrayToAsyncIterable([])); + + await processInternetBoundParcels(WORKER_NAME, OWN_POHTTP_ADDRESS); + + expect(mockMakeLogger).toBeCalledWith(); + }); + test('Expired parcels should be skipped and deleted from store', async () => { const aSecondAgo = new Date(); aSecondAgo.setSeconds(aSecondAgo.getSeconds() - 1); @@ -130,9 +149,10 @@ describe('processInternetBoundParcels', () => { expect(message.ack).toBeCalledTimes(1); expect(MOCK_DELETE_INTERNET_PARCEL).toBeCalledWith(QUEUE_MESSAGE_DATA.parcelObjectKey); - expect(MOCK_PINO.debug).toBeCalledWith( - { parcelObjectKey: QUEUE_MESSAGE_DATA.parcelObjectKey }, - 'Parcel was successfully delivered', + expect(mockLogging.logs).toContainEqual( + partialPinoLog('debug', 'Parcel was successfully delivered', { + parcelObjectKey: QUEUE_MESSAGE_DATA.parcelObjectKey, + }), ); }); @@ -146,9 +166,11 @@ describe('processInternetBoundParcels', () => { expect(message.ack).toBeCalledTimes(1); expect(MOCK_DELETE_INTERNET_PARCEL).toBeCalledWith(QUEUE_MESSAGE_DATA.parcelObjectKey); - expect(MOCK_PINO.info).toBeCalledWith( - { err, parcelObjectKey: QUEUE_MESSAGE_DATA.parcelObjectKey }, - 'Parcel was rejected as invalid', + expect(mockLogging.logs).toContainEqual( + partialPinoLog('info', 'Parcel was rejected as invalid', { + err: expect.objectContaining({ type: err.name }), + parcelObjectKey: QUEUE_MESSAGE_DATA.parcelObjectKey, + }), ); }); @@ -160,9 +182,11 @@ describe('processInternetBoundParcels', () => { await processInternetBoundParcels(WORKER_NAME, OWN_POHTTP_ADDRESS); - expect(MOCK_PINO.warn).toBeCalledWith( - { err, parcelObjectKey: QUEUE_MESSAGE_DATA.parcelObjectKey }, - 'Failed to deliver parcel', + expect(mockLogging.logs).toContainEqual( + partialPinoLog('warn', 'Failed to deliver parcel', { + err: expect.objectContaining({ type: err.name }), + parcelObjectKey: QUEUE_MESSAGE_DATA.parcelObjectKey, + }), ); expect(message.ack).not.toBeCalled(); expect(MOCK_DELETE_INTERNET_PARCEL).not.toBeCalled(); diff --git a/src/services/internetBoundParcelsQueueWorker.ts b/src/services/internetBoundParcelsQueueWorker.ts index 7b71f34ee..8c85f72cf 100644 --- a/src/services/internetBoundParcelsQueueWorker.ts +++ b/src/services/internetBoundParcelsQueueWorker.ts @@ -2,10 +2,10 @@ import { deliverParcel, PoHTTPInvalidParcelError } from '@relaycorp/relaynet-poh import { get as getEnvVar } from 'env-var'; import pipe from 'it-pipe'; import * as stan from 'node-nats-streaming'; -import pino from 'pino'; import { NatsStreamingClient } from '../backingServices/natsStreaming'; import { initObjectStoreFromEnv } from '../backingServices/objectStorage'; +import { makeLogger } from '../utilities/logging'; import { ParcelStore, QueuedInternetBoundParcelMessage } from './parcelStore'; interface ActiveParcelData { @@ -15,12 +15,12 @@ interface ActiveParcelData { readonly ack: () => void; } -const LOGGER = pino(); - export async function processInternetBoundParcels( workerName: string, ownPohttpAddress: string, ): Promise { + const logger = makeLogger(); + const parcelStoreBucket = getEnvVar('OBJECT_STORE_BUCKET').required().asString(); const parcelStore = new ParcelStore(initObjectStoreFromEnv(), parcelStoreBucket); @@ -53,7 +53,6 @@ export async function processInternetBoundParcels( parcelData.parcelObjectKey, ); - // tslint:disable-next-line:no-let let wasParcelDelivered = true; try { await deliverParcel(parcelData.parcelRecipientAddress, parcelSerialized, { @@ -62,12 +61,12 @@ export async function processInternetBoundParcels( } catch (err) { wasParcelDelivered = false; if (err instanceof PoHTTPInvalidParcelError) { - LOGGER.info( + logger.info( { err, parcelObjectKey: parcelData.parcelObjectKey }, 'Parcel was rejected as invalid', ); } else { - LOGGER.warn( + logger.warn( { err, parcelObjectKey: parcelData.parcelObjectKey }, 'Failed to deliver parcel', ); @@ -76,7 +75,7 @@ export async function processInternetBoundParcels( } if (wasParcelDelivered) { - LOGGER.debug( + logger.debug( { parcelObjectKey: parcelData.parcelObjectKey }, 'Parcel was successfully delivered', ); diff --git a/src/services/pohttp/routes.spec.ts b/src/services/pohttp/routes.spec.ts index 420af579f..1cd0eb189 100644 --- a/src/services/pohttp/routes.spec.ts +++ b/src/services/pohttp/routes.spec.ts @@ -64,6 +64,7 @@ jest.spyOn(ParcelStore, 'initFromEnv').mockReturnValue(mockParcelStore); describe('receiveParcel', () => { configureMockEnvVars({ ...MONGO_ENV_VARS, + GATEWAY_VERSION: '1.0.2', NATS_CLUSTER_ID: STUB_NATS_CLUSTER_ID, NATS_SERVER_URL: STUB_NATS_SERVER_URL, }); diff --git a/src/services/pohttp/routes.ts b/src/services/pohttp/routes.ts index 8c45d6337..62772c7c1 100644 --- a/src/services/pohttp/routes.ts +++ b/src/services/pohttp/routes.ts @@ -4,7 +4,7 @@ import { get as getEnvVar } from 'env-var'; import { FastifyInstance, FastifyReply } from 'fastify'; import { NatsStreamingClient } from '../../backingServices/natsStreaming'; -import { registerDisallowedMethods } from '../fastifyUtils'; +import { registerDisallowedMethods } from '../../utilities/fastify'; import { ParcelStore } from '../parcelStore'; export default async function registerRoutes( diff --git a/src/services/pohttp/server.spec.ts b/src/services/pohttp/server.spec.ts index ad930c1e5..5804a6e06 100644 --- a/src/services/pohttp/server.spec.ts +++ b/src/services/pohttp/server.spec.ts @@ -1,5 +1,5 @@ import { mockSpy } from '../../_test_utils'; -import * as fastifyUtils from '../fastifyUtils'; +import * as fastifyUtils from '../../utilities/fastify'; import { makeServer } from './server'; const mockFastifyInstance = {}; @@ -12,7 +12,6 @@ describe('makeServer', () => { test('Routes should be loaded', async () => { await makeServer(); - expect(mockConfigureFastify).toBeCalledTimes(1); expect(mockConfigureFastify).toBeCalledWith([require('./routes').default]); }); diff --git a/src/services/pohttp/server.ts b/src/services/pohttp/server.ts index 7e19892bd..7b4fdd139 100644 --- a/src/services/pohttp/server.ts +++ b/src/services/pohttp/server.ts @@ -1,6 +1,6 @@ import { FastifyInstance } from 'fastify'; -import { configureFastify } from '../fastifyUtils'; +import { configureFastify } from '../../utilities/fastify'; import routes from './routes'; /** diff --git a/src/services/poweb/_test_utils.ts b/src/services/poweb/_test_utils.ts index 04010c872..339482846 100644 --- a/src/services/poweb/_test_utils.ts +++ b/src/services/poweb/_test_utils.ts @@ -57,6 +57,7 @@ export function setUpCommonFixtures(): () => FixtureSet { mockEnvVars({ ...MONGO_ENV_VARS, GATEWAY_KEY_ID: gatewayCertificate.getSerialNumber().toString('base64'), + GATEWAY_VERSION: '1.0.2', }); }); diff --git a/src/services/poweb/healthcheck.ts b/src/services/poweb/healthcheck.ts index 64bfe1b6e..4a8894ffd 100644 --- a/src/services/poweb/healthcheck.ts +++ b/src/services/poweb/healthcheck.ts @@ -1,6 +1,6 @@ import { FastifyInstance } from 'fastify'; -import { registerDisallowedMethods } from '../fastifyUtils'; +import { registerDisallowedMethods } from '../../utilities/fastify'; export default async function registerRoutes( fastify: FastifyInstance, diff --git a/src/services/poweb/parcelDelivery.ts b/src/services/poweb/parcelDelivery.ts index afe2394a8..8f3db256e 100644 --- a/src/services/poweb/parcelDelivery.ts +++ b/src/services/poweb/parcelDelivery.ts @@ -9,8 +9,8 @@ import { FastifyInstance, FastifyLoggerInstance, FastifyReply } from 'fastify'; import { Connection } from 'mongoose'; import { NatsStreamingClient } from '../../backingServices/natsStreaming'; +import { registerDisallowedMethods } from '../../utilities/fastify'; import { retrieveOwnCertificates } from '../certs'; -import { registerDisallowedMethods } from '../fastifyUtils'; import { ParcelStore } from '../parcelStore'; import { CONTENT_TYPES } from './contentTypes'; import RouteOptions from './RouteOptions'; diff --git a/src/services/poweb/preRegistration.ts b/src/services/poweb/preRegistration.ts index 223ab521e..6858c7fd5 100644 --- a/src/services/poweb/preRegistration.ts +++ b/src/services/poweb/preRegistration.ts @@ -2,7 +2,7 @@ import { PrivateNodeRegistrationAuthorization } from '@relaycorp/relaynet-core'; import bufferToArray from 'buffer-to-arraybuffer'; import { FastifyInstance, FastifyReply } from 'fastify'; -import { registerDisallowedMethods } from '../fastifyUtils'; +import { registerDisallowedMethods } from '../../utilities/fastify'; import { CONTENT_TYPES } from './contentTypes'; import RouteOptions from './RouteOptions'; diff --git a/src/services/poweb/registration.ts b/src/services/poweb/registration.ts index a3cab3bf6..a7435ab55 100644 --- a/src/services/poweb/registration.ts +++ b/src/services/poweb/registration.ts @@ -9,8 +9,8 @@ import { import bufferToArray from 'buffer-to-arraybuffer'; import { FastifyInstance, FastifyReply } from 'fastify'; +import { registerDisallowedMethods } from '../../utilities/fastify'; import { sha256 } from '../../utils'; -import { registerDisallowedMethods } from '../fastifyUtils'; import { CONTENT_TYPES } from './contentTypes'; import RouteOptions from './RouteOptions'; diff --git a/src/services/poweb/server.spec.ts b/src/services/poweb/server.spec.ts index 43b03bd48..40b8bf3a1 100644 --- a/src/services/poweb/server.spec.ts +++ b/src/services/poweb/server.spec.ts @@ -1,7 +1,5 @@ -// tslint:disable:no-let - import { mockSpy } from '../../_test_utils'; -import * as fastifyUtils from '../fastifyUtils'; +import * as fastifyUtils from '../../utilities/fastify'; import { setUpCommonFixtures } from './_test_utils'; import RouteOptions from './RouteOptions'; import { makeServer } from './server'; diff --git a/src/services/poweb/server.ts b/src/services/poweb/server.ts index 5d4a4315d..0d5b324a4 100644 --- a/src/services/poweb/server.ts +++ b/src/services/poweb/server.ts @@ -3,7 +3,7 @@ import { get as getEnvVar } from 'env-var'; import { FastifyInstance, FastifyPluginCallback } from 'fastify'; import { initVaultKeyStore } from '../../backingServices/keyStores'; -import { configureFastify, FastifyLogger } from '../fastifyUtils'; +import { configureFastify, FastifyLogger } from '../../utilities/fastify'; import healthcheck from './healthcheck'; import parcelCollection from './parcelCollection'; import parcelDelivery from './parcelDelivery'; diff --git a/src/services/fastifyUtils.spec.ts b/src/utilities/fastify.spec.ts similarity index 79% rename from src/services/fastifyUtils.spec.ts rename to src/utilities/fastify.spec.ts index f748bc36b..add4ca9d8 100644 --- a/src/services/fastifyUtils.spec.ts +++ b/src/utilities/fastify.spec.ts @@ -3,9 +3,10 @@ import { fastify, FastifyInstance, FastifyPluginCallback } from 'fastify'; import pino from 'pino'; import { mockSpy, MONGO_ENV_VARS } from '../_test_utils'; -import { configureMockEnvVars, getMockContext, getMockInstance } from './_test_utils'; -import { MAX_RAMF_MESSAGE_SIZE } from './constants'; -import { configureFastify, runFastify } from './fastifyUtils'; +import { configureMockEnvVars, getMockContext, getMockInstance } from '../services/_test_utils'; +import { MAX_RAMF_MESSAGE_SIZE } from '../services/constants'; +import { configureFastify, runFastify } from './fastify'; +import * as logging from './logging'; const mockFastify: FastifyInstance = { listen: mockSpy(jest.fn()), @@ -22,43 +23,20 @@ afterAll(() => { const mockEnvVars = configureMockEnvVars(MONGO_ENV_VARS); +const mockMakeLogger = mockSpy(jest.spyOn(logging, 'makeLogger')); + const dummyRoutes: FastifyPluginCallback = () => null; describe('configureFastify', () => { test('Logger should be enabled by default', () => { configureFastify([dummyRoutes]); - const fastifyCallArgs = getMockContext(fastify).calls[0]; - expect(fastifyCallArgs[0]).toHaveProperty('logger', true); - }); - - test('Log level in LOG_LEVEL env var should be honoured if present', () => { - const loglevel = 'debug'; - mockEnvVars({ ...MONGO_ENV_VARS, LOG_LEVEL: loglevel }); - - configureFastify([dummyRoutes]); - - const fastifyCallArgs = getMockContext(fastify).calls[0]; - expect(fastifyCallArgs[0]).toHaveProperty('logger', { level: loglevel }); - }); - - test('Log level in LOG_LEVEL env var should be lower-cased if present', () => { - mockEnvVars({ ...MONGO_ENV_VARS, LOG_LEVEL: 'DEBUG' }); - - configureFastify([dummyRoutes]); - - const fastifyCallArgs = getMockContext(fastify).calls[0]; - expect(fastifyCallArgs[0]).toHaveProperty('logger', { level: 'debug' }); - }); - - test('LOG_LEVEL env var should be ignored if a custom logger is used', () => { - configureFastify([dummyRoutes], undefined, false); - - const fastifyCallArgs = getMockContext(fastify).calls[0]; - expect(fastifyCallArgs[0]).toHaveProperty('logger', false); + expect(mockMakeLogger).toBeCalledWith(); + const logger = getMockContext(mockMakeLogger).results[0].value; + expect(fastify).toBeCalledWith(expect.objectContaining({ logger })); }); - test('Custom logger should be supported', () => { + test('Custom logger should be honoured', () => { const customLogger = pino(); configureFastify([dummyRoutes], undefined, customLogger); diff --git a/src/services/fastifyUtils.ts b/src/utilities/fastify.ts similarity index 83% rename from src/services/fastifyUtils.ts rename to src/utilities/fastify.ts index b627a9aee..af57cc110 100644 --- a/src/services/fastifyUtils.ts +++ b/src/utilities/fastify.ts @@ -10,13 +10,14 @@ import { import { Logger } from 'pino'; import { getMongooseConnectionArgsFromEnv } from '../backingServices/mongo'; -import { MAX_RAMF_MESSAGE_SIZE } from './constants'; +import { MAX_RAMF_MESSAGE_SIZE } from '../services/constants'; +import { makeLogger } from './logging'; const DEFAULT_REQUEST_ID_HEADER = 'X-Request-Id'; const SERVER_PORT = 8080; const SERVER_HOST = '0.0.0.0'; -export type FastifyLogger = boolean | FastifyLoggerOptions | Logger; +export type FastifyLogger = FastifyLoggerOptions | Logger; export const HTTP_METHODS: readonly HTTPMethods[] = [ 'POST', @@ -55,11 +56,11 @@ export function registerDisallowedMethods( export async function configureFastify( routes: ReadonlyArray>, routeOptions?: RouteOptions, - logger: FastifyLogger = true, + logger?: FastifyLogger, ): Promise { const server = fastify({ bodyLimit: MAX_RAMF_MESSAGE_SIZE, - logger: getFinalLogger(logger), + logger: logger ?? makeLogger(), requestIdHeader: getEnvVar('REQUEST_ID_HEADER') .default(DEFAULT_REQUEST_ID_HEADER) .asString() @@ -80,14 +81,6 @@ export async function configureFastify { await fastifyInstance.listen({ host: SERVER_HOST, port: SERVER_PORT }); } diff --git a/src/utilities/logging.spec.ts b/src/utilities/logging.spec.ts new file mode 100644 index 000000000..39aaabd48 --- /dev/null +++ b/src/utilities/logging.spec.ts @@ -0,0 +1,100 @@ +import { getPinoOptions } from '@relaycorp/pino-cloud'; +import { EnvVarError } from 'env-var'; +import pino from 'pino'; + +import { configureMockEnvVars, getMockInstance } from '../services/_test_utils'; +import { makeLogger } from './logging'; + +const REQUIRED_ENV_VARS = { + GATEWAY_VERSION: '1.0.1', +}; +const mockEnvVars = configureMockEnvVars(REQUIRED_ENV_VARS); + +jest.mock('@relaycorp/pino-cloud', () => ({ + getPinoOptions: jest.fn().mockReturnValue({}), +})); + +describe('makeLogger', () => { + test('Log level should be info if LOG_LEVEL env var is absent', () => { + mockEnvVars(REQUIRED_ENV_VARS); + + const logger = makeLogger(); + + expect(logger).toHaveProperty('level', 'info'); + }); + + test('Log level in LOG_LEVEL env var should be honoured if present', () => { + const loglevel = 'debug'; + mockEnvVars({ ...REQUIRED_ENV_VARS, LOG_LEVEL: loglevel }); + + const logger = makeLogger(); + + expect(logger).toHaveProperty('level', loglevel); + }); + + test('Log level in LOG_LEVEL env var should be lower-cased if present', () => { + const loglevel = 'DEBUG'; + mockEnvVars({ ...REQUIRED_ENV_VARS, LOG_LEVEL: loglevel }); + + const logger = makeLogger(); + + expect(logger).toHaveProperty('level', loglevel.toLowerCase()); + }); + + test('GATEWAY_VERSION env var should be required', () => { + mockEnvVars({ ...REQUIRED_ENV_VARS, GATEWAY_VERSION: undefined }); + + expect(() => makeLogger()).toThrowWithMessage(EnvVarError, /GATEWAY_VERSION/); + }); + + test('Cloud logging options should be used', () => { + const messageKey = 'foo'; + getMockInstance(getPinoOptions).mockReturnValue({ messageKey }); + const logger = makeLogger(); + + expect(logger[pino.symbols.messageKeySym as any]).toEqual(messageKey); + }); + + test('App name should be set to LOG_ENV_NAME if present', () => { + const envName = 'env-name'; + mockEnvVars({ ...REQUIRED_ENV_VARS, LOG_ENV_NAME: envName }); + makeLogger(); + + expect(getPinoOptions).toBeCalledWith(undefined, expect.objectContaining({ name: envName })); + }); + + test('App name should be "relaynet-internet-gateway" if LOG_ENV_NAME if absent', () => { + makeLogger(); + + expect(getPinoOptions).toBeCalledWith( + undefined, + expect.objectContaining({ name: 'relaynet-internet-gateway' }), + ); + }); + + test('GATEWAY_VERSION should be passed to cloud logging config', () => { + makeLogger(); + + expect(getPinoOptions).toBeCalledWith( + undefined, + expect.objectContaining({ + version: REQUIRED_ENV_VARS.GATEWAY_VERSION, + }), + ); + }); + + test('LOG_TARGET env var should be honoured if present', () => { + const loggingTarget = 'the-logging-target'; + mockEnvVars({ ...REQUIRED_ENV_VARS, LOG_TARGET: loggingTarget }); + + makeLogger(); + + expect(getPinoOptions).toBeCalledWith(loggingTarget, expect.anything()); + }); + + test('Logging target should be unset if LOG_TARGET env var is absent', () => { + makeLogger(); + + expect(getPinoOptions).toBeCalledWith(undefined, expect.anything()); + }); +}); diff --git a/src/utilities/logging.ts b/src/utilities/logging.ts new file mode 100644 index 000000000..b663cc4c3 --- /dev/null +++ b/src/utilities/logging.ts @@ -0,0 +1,16 @@ +import { getPinoOptions, LoggingTarget } from '@relaycorp/pino-cloud'; +import { get as getEnvVar } from 'env-var'; +import pino, { Level, Logger } from 'pino'; + +const DEFAULT_APP_NAME = 'relaynet-internet-gateway'; + +export function makeLogger(): Logger { + const logTarget = getEnvVar('LOG_TARGET').asString(); + const gatewayVersion = getEnvVar('GATEWAY_VERSION').required().asString(); + const logEnvName = getEnvVar('LOG_ENV_NAME').default(DEFAULT_APP_NAME).asString(); + const appContext = { name: logEnvName, version: gatewayVersion }; + const cloudPinoOptions = getPinoOptions(logTarget as LoggingTarget, appContext); + + const logLevel = getEnvVar('LOG_LEVEL').default('info').asString().toLowerCase() as Level; + return pino({ ...cloudPinoOptions, level: logLevel }); +}