From 9b89a90d1fae34343a4818f55782b6eca5830848 Mon Sep 17 00:00:00 2001 From: Roman Perera Date: Thu, 20 Feb 2025 07:08:48 +0100 Subject: [PATCH 1/9] added v0.50.1 Cosmos SDK endpoints --- .env | 28 +++++------ alerter/src/api_wrappers/cosmos.py | 58 +++++++++++++++++++++- alerter/src/monitors/cosmos.py | 3 +- alerter/src/monitors/network/cosmos.py | 57 ++++++++++++++++++++- alerter/src/monitors/node/cosmos.py | 68 +++++++++++++++++++++++++- docker-compose.yml | 2 +- 6 files changed, 197 insertions(+), 19 deletions(-) diff --git a/.env b/.env index 6f6060b1..265f3afb 100644 --- a/.env +++ b/.env @@ -8,7 +8,7 @@ # make sure that it obeys the subnet rules inside the docker-compose.yml file # UI IP configuration -UI_ACCESS_IP=X.X.X.X +UI_ACCESS_IP=1.1.1.1 # Development configuration DEV_MODE=false @@ -18,56 +18,56 @@ DB_NAME=panicdb DB_PORT=27017 -DB_IP_REPLICA_1=172.18.0.2 +DB_IP_REPLICA_1=172.19.0.2 DB_IP_REPLICA_1_TEST=172.19.0.2 -DB_IP_REPLICA_2=172.18.0.3 +DB_IP_REPLICA_2=172.19.0.3 DB_IP_REPLICA_2_TEST=172.19.0.3 -DB_IP_REPLICA_3=172.18.0.4 +DB_IP_REPLICA_3=172.19.0.4 DB_IP_REPLICA_3_TEST=172.19.0.4 -DB_IP_REPLICA_STARTUP=172.18.0.5 +DB_IP_REPLICA_STARTUP=172.19.0.5 DB_IP_REPLICA_STARTUP_TEST=172.19.0.5 # Alerter configuration -ALERTER_IP=172.18.0.7 +ALERTER_IP=172.19.0.7 UNIQUE_ALERTER_IDENTIFIER=panic_alerter # Redis configuration -REDIS_IP=172.18.0.8 +REDIS_IP=172.19.0.8 REDIS_IP_TEST=172.19.0.8 REDIS_PORT=6379 REDIS_DB=10 REDIS_DB_TEST=11 # RabbitMQ configuration -RABBIT_IP=172.18.0.9 +RABBIT_IP=172.19.0.9 RABBIT_IP_TEST=172.19.0.9 RABBIT_PORT=5672 # Health Checker configuration -HEALTH_CHECKER_IP=172.18.0.10 +HEALTH_CHECKER_IP=172.19.0.10 # Tests configuration TESTS_IP=172.19.0.11 # UI configuration -UI_DASHBOARD_IP=172.18.0.12 +UI_DASHBOARD_IP=172.19.0.12 UI_DASHBOARD_PORT=3333 # API configuration -API_IP=172.18.0.13 +API_IP=172.19.0.13 API_IP_TEST=172.19.0.13 API_PORT=9000 API_PORT_TEST=9001 # Substrate API configuration -SUBSTRATE_API_IP=172.18.0.14 +SUBSTRATE_API_IP=172.19.0.14 SUBSTRATE_API_PORT=8080 # Migration configuration -MIGRATION_IP=172.18.0.15 +MIGRATION_IP=172.19.0.15 # Logs configuration - Log files with {} are Python template strings, where {} # is replaced with text that makes the log file name specific to the process @@ -122,4 +122,4 @@ ENABLE_LOG_ALERTS=True # Twilio Preferences TWIML= -TWIML_IS_URL=false \ No newline at end of file +TWIML_IS_URL=false diff --git a/alerter/src/api_wrappers/cosmos.py b/alerter/src/api_wrappers/cosmos.py index ce30cd05..c3cf63c2 100644 --- a/alerter/src/api_wrappers/cosmos.py +++ b/alerter/src/api_wrappers/cosmos.py @@ -34,7 +34,7 @@ def get_syncing(self, cosmos_rest_url: str) -> Dict: :param cosmos_rest_url: The Cosmos REST url of the data source :return: Retrieves data from the cosmos_rest_url/syncing endpoint """ - endpoint = cosmos_rest_url + '/syncing' + endpoint = cosmos_rest_url + '/cosmos/base/tendermint/v1beta1/syncing' return get_cosmos_json(endpoint=endpoint, logger=self.logger, verify=self.verify, timeout=self.timeout) @@ -87,6 +87,34 @@ def get_staking_validators_v0_42_6( return get_cosmos_json(endpoint=endpoint, logger=self.logger, params=params, verify=self.verify, timeout=self.timeout) + + def get_staking_validators_v0_50_1( + self, cosmos_rest_url: str, validator_address: str = None, + params: Dict = None) -> Dict: + """ + This function retrieves data from the + cosmos_rest_url/cosmos/staking/v1beta1/validators and + cosmos_rest_url/cosmos/staking/v1beta1/validators/{validatorAddr} + endpoints, depending on the inputted function parameters. Note that this + function is only compatible with v0.50.1 of the Cosmos SDK, for other + versions unexpected behaviour might occur. + :param cosmos_rest_url: The Cosmos REST url of the data source + :param params: Parameters that need to be added to the endpoint + :param validator_address: The address of the validator you want to query + :return: Retrieves data from the + : cosmos_rest_url/cosmos/staking/v1beta1/validators or + : cosmos_rest_url/cosmos/staking/v1beta1/validators/{ + : validatorAddr} endpoints + """ + cosmos_fn = ( + '/cosmos/staking/v1beta1/validators' if validator_address is None + else '/cosmos/staking/v1beta1/validators/{}'.format( + validator_address) + ) + endpoint = cosmos_rest_url + cosmos_fn + return get_cosmos_json(endpoint=endpoint, logger=self.logger, + params=params, verify=self.verify, + timeout=self.timeout) def get_proposals_v0_39_2( self, cosmos_rest_url: str, proposal_id: int = None, @@ -137,6 +165,34 @@ def get_proposals_v0_42_6( return get_cosmos_json(endpoint=endpoint, logger=self.logger, params=params, verify=self.verify, timeout=self.timeout) + + def get_proposals_v0_50_1( + self, cosmos_rest_url: str, proposal_id: int = None, + params: Dict = None) -> Dict: + """ + This function retrieves data from the + cosmos_rest_url/cosmos/gov/v1beta1/proposals and + cosmos_rest_url/cosmos/gov/v1beta1/proposals/{proposalId} + endpoints, depending on the inputted function parameters. Note that this + function is only compatible with v0.50.1 of the Cosmos SDK, for other + versions unexpected behaviour might occur. + :param cosmos_rest_url: The Cosmos REST url of the data source + :param params: Parameters that need to be added to the endpoint + :param proposal_id: The ID of the proposal you want to query + :return: Retrieves data from the + : cosmos_rest_url/cosmos/gov/v1beta1/proposals or + : cosmos_rest_url/cosmos/gov/v1beta1/proposals/{ + : proposalId} endpoints + """ + cosmos_fn = ( + '/cosmos/gov/v1/proposals' if proposal_id is None + else '/cosmos/gov/v1/proposals/{}'.format( + proposal_id) + ) + endpoint = cosmos_rest_url + cosmos_fn + return get_cosmos_json(endpoint=endpoint, logger=self.logger, + params=params, verify=self.verify, + timeout=self.timeout) def execute_with_checks(self, function, args: List[Any], node_name: str, sdk_version: str) -> Any: diff --git a/alerter/src/monitors/cosmos.py b/alerter/src/monitors/cosmos.py index f7742fd7..0d36ed8f 100644 --- a/alerter/src/monitors/cosmos.py +++ b/alerter/src/monitors/cosmos.py @@ -25,6 +25,7 @@ _REST_VERSION_COSMOS_SDK_0_39_2 = 'v0.39.2' _REST_VERSION_COSMOS_SDK_0_42_6 = 'v0.42.6' +_REST_VERSION_COSMOS_SDK_0_50_1 = 'v0.50.1' _VERSION_INCOMPATIBILITY_EXCEPTIONS = [ IncorrectJSONRetrievedException, CosmosSDKVersionIncompatibleException, TendermintRPCIncompatibleException @@ -48,7 +49,7 @@ def __init__(self, monitor_name: str, data_sources: List[CosmosNodeConfig], # This variable stores the latest REST version used to retrieve the # data. By default, it is set to v0.42.6 of the Cosmos SDK. - self._last_rest_retrieval_version = _REST_VERSION_COSMOS_SDK_0_42_6 + self._last_rest_retrieval_version = _REST_VERSION_COSMOS_SDK_0_50_1 @property def data_sources(self) -> List[CosmosNodeConfig]: diff --git a/alerter/src/monitors/network/cosmos.py b/alerter/src/monitors/network/cosmos.py index c8ceed40..302a50b5 100644 --- a/alerter/src/monitors/network/cosmos.py +++ b/alerter/src/monitors/network/cosmos.py @@ -8,7 +8,7 @@ from src.configs.nodes.cosmos import CosmosNodeConfig from src.message_broker.rabbitmq import RabbitMQApi from src.monitors.cosmos import ( - CosmosMonitor, _REST_VERSION_COSMOS_SDK_0_42_6, + _REST_VERSION_COSMOS_SDK_0_50_1, CosmosMonitor, _REST_VERSION_COSMOS_SDK_0_42_6, _REST_VERSION_COSMOS_SDK_0_39_2, _VERSION_INCOMPATIBILITY_EXCEPTIONS) from src.utils.constants.cosmos import ( PROPOSAL_STATUS_UNSPECIFIED, PROPOSAL_STATUS_DEPOSIT_PERIOD, @@ -197,6 +197,47 @@ def retrieval_process() -> Dict: return self._execute_cosmos_rest_retrieval_with_exceptions( retrieval_process, source_name, source_url, _REST_VERSION_COSMOS_SDK_0_42_6) + + def _get_cosmos_rest_v0_50_1_indirect_data( + self, source: CosmosNodeConfig) -> Dict: + """ + This function retrieves network specific metrics. To retrieve this + data we use version v0.50.1 of the Cosmos SDK for the REST server. + :param source: The chosen data source + :return: A dict containing all indirect metrics + :raises: CosmosSDKVersionIncompatibleException if the Cosmos SDK version + of the source is not compatible with v0.50.1 + : CosmosRestServerApiCallException if an error occurs during an + API call + : DataReadingException if data cannot be read from the source + : CannotConnectWithDataSourceException if we cannot connect with + the data source + : InvalidUrlException if the URL of the data source does not have + a valid schema + : IncorrectJSONRetrievedException if the structure of the data + returned by the endpoints is not as expected. This could be + both due to a Tendermint or Cosmos SDK update + """ + source_url = source.cosmos_rest_url + source_name = source.node_name + + def retrieval_process() -> Dict: + paginated_data = self._get_rest_data_with_pagination_keys( + self.cosmos_rest_server_api.get_proposals_v0_50_1, + [source_url, None], {}, source_name, + _REST_VERSION_COSMOS_SDK_0_50_1) + + parsed_proposals = {'proposals': []} + for page in paginated_data: + for proposal in page['proposals']: + parsed_proposals['proposals'].append( + self._parse_proposal(proposal)) + + return parsed_proposals + + return self._execute_cosmos_rest_retrieval_with_exceptions( + retrieval_process, source_name, source_url, + _REST_VERSION_COSMOS_SDK_0_50_1) def _get_cosmos_rest_indirect_data(self, source: CosmosNodeConfig, sdk_version: str) -> Dict: @@ -216,6 +257,9 @@ def _get_cosmos_rest_indirect_data(self, source: CosmosNodeConfig, return self._get_cosmos_rest_v0_39_2_indirect_data(source) elif sdk_version == _REST_VERSION_COSMOS_SDK_0_42_6: return self._get_cosmos_rest_v0_42_6_indirect_data(source) + elif sdk_version == _REST_VERSION_COSMOS_SDK_0_50_1: + return self._get_cosmos_rest_v0_50_1_indirect_data(source) + return { 'proposals': None @@ -269,6 +313,16 @@ def _get_cosmos_rest_v0_42_6_data(self) -> ( """ return self._get_cosmos_rest_version_data( _REST_VERSION_COSMOS_SDK_0_42_6) + + def _get_cosmos_rest_v0_50_1_data(self) -> ( + Dict, bool, Optional[Exception]): + """ + This function calls self._get_cosmos_rest_version_data with + _REST_VERSION_COSMOS_SDK_0_50_1 + :return: The return of self._get_cosmos_rest_version_data + """ + return self._get_cosmos_rest_version_data( + _REST_VERSION_COSMOS_SDK_0_50_1) def _get_cosmos_rest_data(self) -> (Dict, bool, Optional[Exception]): """ @@ -283,6 +337,7 @@ def _get_cosmos_rest_data(self) -> (Dict, bool, Optional[Exception]): supported_retrievals = { _REST_VERSION_COSMOS_SDK_0_39_2: self._get_cosmos_rest_v0_39_2_data, _REST_VERSION_COSMOS_SDK_0_42_6: self._get_cosmos_rest_v0_42_6_data, + _REST_VERSION_COSMOS_SDK_0_50_1: self._get_cosmos_rest_v0_50_1_data } # First check whether REST data can be obtained using the last REST diff --git a/alerter/src/monitors/node/cosmos.py b/alerter/src/monitors/node/cosmos.py index b74aa49f..8a2703d8 100644 --- a/alerter/src/monitors/node/cosmos.py +++ b/alerter/src/monitors/node/cosmos.py @@ -14,7 +14,7 @@ from src.configs.nodes.cosmos import CosmosNodeConfig from src.message_broker.rabbitmq import RabbitMQApi from src.monitors.cosmos import ( - CosmosMonitor, _REST_VERSION_COSMOS_SDK_0_42_6, + _REST_VERSION_COSMOS_SDK_0_50_1, CosmosMonitor, _REST_VERSION_COSMOS_SDK_0_42_6, _REST_VERSION_COSMOS_SDK_0_39_2, _VERSION_INCOMPATIBILITY_EXCEPTIONS) from src.utils.constants.cosmos import ( BOND_STATUS_BONDED, BOND_STATUS_UNBONDED, BOND_STATUS_UNBONDING, @@ -249,6 +249,58 @@ def retrieval_process() -> Dict: return self._execute_cosmos_rest_retrieval_with_exceptions( retrieval_process, source_name, source_url, _REST_VERSION_COSMOS_SDK_0_42_6) + + def _get_cosmos_rest_v0_50_1_indirect_data_validator( + self, source: CosmosNodeConfig) -> Dict: + """ + This function retrieves node specific metrics using a different node as + data source. We do not use the node directly since the node may be + offline or syncing, thus the data may be corrupt. Note that as a last + resource the manager may supply the node itself as data source. To + retrieve this data we use version v0.50.1 of the Cosmos SDK for the REST + server. NOTE: In this function we are assuming that the node being + monitored is a validator. + :param source: The chosen data source + :return: A dict containing all indirect metrics + :raises: CosmosSDKVersionIncompatibleException if the Cosmos SDK version + of the source is not compatible with v0.50.1 + : CosmosRestServerApiCallException if an error occurs during an + API call + : DataReadingException if data cannot be read from the source + : CannotConnectWithDataSourceException if we cannot connect with + the data source + : InvalidUrlException if the URL of the data source does not have + a valid schema + : IncorrectJSONRetrievedException if the structure of the data + returned by the endpoints is not as expected. This could be + both due to a Tendermint or Cosmos SDK update + """ + operator_address = self.node_config.operator_address + source_url = source.cosmos_rest_url + source_name = source.node_name + + def retrieval_process() -> Dict: + staking_validators = \ + self.cosmos_rest_server_api.execute_with_checks( + self.cosmos_rest_server_api.get_staking_validators_v0_50_1, + [source_url, operator_address, {}], source_name, + _REST_VERSION_COSMOS_SDK_0_50_1) + bond_status = self._parse_validator_status( + staking_validators['validator']['status']) + return { + 'bond_status': bond_status, + + # The 'jailed' keyword is normally exposed in + # cosmos/staking/v1beta1/validators for v0.50.1 of the Cosmos + # SDK only. If we encounter nodes on this version which do not + # expose it we might need to use + # /cosmos/slashing/v1beta1/signing_infos + 'jailed': staking_validators['validator']['jailed'], + } + + return self._execute_cosmos_rest_retrieval_with_exceptions( + retrieval_process, source_name, source_url, + _REST_VERSION_COSMOS_SDK_0_50_1) def _get_cosmos_rest_indirect_data(self, source: CosmosNodeConfig, sdk_version: str) -> Dict: @@ -276,6 +328,9 @@ def _get_cosmos_rest_indirect_data(self, source: CosmosNodeConfig, elif sdk_version == _REST_VERSION_COSMOS_SDK_0_42_6: return self._get_cosmos_rest_v0_42_6_indirect_data_validator( source) + elif sdk_version == _REST_VERSION_COSMOS_SDK_0_50_1: + return self._get_cosmos_rest_v0_50_1_indirect_data_validator( + source) else: return { 'bond_status': None, @@ -344,6 +399,16 @@ def _get_cosmos_rest_v0_42_6_data(self) -> ( """ return self._get_cosmos_rest_version_data( _REST_VERSION_COSMOS_SDK_0_42_6) + + def _get_cosmos_rest_v0_50_1_data(self) -> ( + Dict, bool, Optional[Exception]): + """ + This function calls self._get_cosmos_rest_version_data with + _REST_VERSION_COSMOS_SDK_0_50_1 + :return: The return of self._get_cosmos_rest_version_data + """ + return self._get_cosmos_rest_version_data( + _REST_VERSION_COSMOS_SDK_0_50_1) def _get_cosmos_rest_data(self) -> (Dict, bool, Optional[Exception]): """ @@ -358,6 +423,7 @@ def _get_cosmos_rest_data(self) -> (Dict, bool, Optional[Exception]): supported_retrievals = { _REST_VERSION_COSMOS_SDK_0_39_2: self._get_cosmos_rest_v0_39_2_data, _REST_VERSION_COSMOS_SDK_0_42_6: self._get_cosmos_rest_v0_42_6_data, + _REST_VERSION_COSMOS_SDK_0_50_1: self._get_cosmos_rest_v0_50_1_data, } # First check whether REST data can be obtained using the last REST diff --git a/docker-compose.yml b/docker-compose.yml index e3cbac4f..7c6d7da1 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -320,7 +320,7 @@ networks: ipam: driver: default config: - - subnet: 172.18.0.0/24 + - subnet: 172.19.0.0/24 volumes: db-data: From 427304b92e62f63046877b1de074efad75cc7f76 Mon Sep 17 00:00:00 2001 From: Roman Perera Date: Mon, 24 Feb 2025 18:14:14 +0100 Subject: [PATCH 2/9] fixed cosmos REST test / removed substrate-api (ONLY) container / .env set to DEV_MODE --- .env | 42 +- api/src/server.ts | 3061 ++++++++++++++++++----------------- docker-compose-tests.yml | 2 +- docker-compose.yml | 19 +- web-installer/src/server.js | 5 +- 5 files changed, 1617 insertions(+), 1512 deletions(-) diff --git a/.env b/.env index 265f3afb..58126986 100644 --- a/.env +++ b/.env @@ -11,63 +11,63 @@ UI_ACCESS_IP=1.1.1.1 # Development configuration -DEV_MODE=false +DEV_MODE=true # Mongo configuration DB_NAME=panicdb DB_PORT=27017 -DB_IP_REPLICA_1=172.19.0.2 -DB_IP_REPLICA_1_TEST=172.19.0.2 +DB_IP_REPLICA_1=172.18.0.2 +DB_IP_REPLICA_1_TEST=172.18.0.2 -DB_IP_REPLICA_2=172.19.0.3 -DB_IP_REPLICA_2_TEST=172.19.0.3 +DB_IP_REPLICA_2=172.18.0.3 +DB_IP_REPLICA_2_TEST=172.18.0.3 -DB_IP_REPLICA_3=172.19.0.4 -DB_IP_REPLICA_3_TEST=172.19.0.4 +DB_IP_REPLICA_3=172.18.0.4 +DB_IP_REPLICA_3_TEST=172.18.0.4 -DB_IP_REPLICA_STARTUP=172.19.0.5 -DB_IP_REPLICA_STARTUP_TEST=172.19.0.5 +DB_IP_REPLICA_STARTUP=172.18.0.5 +DB_IP_REPLICA_STARTUP_TEST=172.18.0.5 # Alerter configuration -ALERTER_IP=172.19.0.7 +ALERTER_IP=172.18.0.7 UNIQUE_ALERTER_IDENTIFIER=panic_alerter # Redis configuration -REDIS_IP=172.19.0.8 -REDIS_IP_TEST=172.19.0.8 +REDIS_IP=172.18.0.8 +REDIS_IP_TEST=172.18.0.8 REDIS_PORT=6379 REDIS_DB=10 REDIS_DB_TEST=11 # RabbitMQ configuration -RABBIT_IP=172.19.0.9 -RABBIT_IP_TEST=172.19.0.9 +RABBIT_IP=172.18.0.9 +RABBIT_IP_TEST=172.18.0.9 RABBIT_PORT=5672 # Health Checker configuration -HEALTH_CHECKER_IP=172.19.0.10 +HEALTH_CHECKER_IP=172.18.0.10 # Tests configuration -TESTS_IP=172.19.0.11 +TESTS_IP=172.18.0.11 # UI configuration -UI_DASHBOARD_IP=172.19.0.12 +UI_DASHBOARD_IP=172.18.0.12 UI_DASHBOARD_PORT=3333 # API configuration -API_IP=172.19.0.13 -API_IP_TEST=172.19.0.13 +API_IP=172.18.0.13 +API_IP_TEST=172.18.0.13 API_PORT=9000 API_PORT_TEST=9001 # Substrate API configuration -SUBSTRATE_API_IP=172.19.0.14 +SUBSTRATE_API_IP=172.18.0.14 SUBSTRATE_API_PORT=8080 # Migration configuration -MIGRATION_IP=172.19.0.15 +MIGRATION_IP=172.18.0.15 # Logs configuration - Log files with {} are Python template strings, where {} # is replaced with text that makes the log file name specific to the process diff --git a/api/src/server.ts b/api/src/server.ts index 498f7ae8..19ab8ced 100644 --- a/api/src/server.ts +++ b/api/src/server.ts @@ -1,180 +1,197 @@ import * as dotenv from "dotenv"; -import {readFile} from "./server/files"; +import { readFile } from "./server/files"; import path from "path"; import https from "https"; import { - AlertKeysDockerHubRepo, - AlertKeysGitHubRepo, - AlertKeysNode, - AlertKeysSystem, - AlertsOverviewAlertData, - AlertsOverviewInput, - AlertsOverviewResult, - GitHubKeys, - HttpsOptions, - isAlertsOverviewInputValid, - isRedisMetricsInputValid, - MetricsResult, - MonitorablesInfoResult, - RedisHashes, - RedisKeys, - RedisMetricsInput, - SystemKeys + AlertKeysDockerHubRepo, + AlertKeysGitHubRepo, + AlertKeysNode, + AlertKeysSystem, + AlertsOverviewAlertData, + AlertsOverviewInput, + AlertsOverviewResult, + GitHubKeys, + HttpsOptions, + isAlertsOverviewInputValid, + isRedisMetricsInputValid, + MetricsResult, + MonitorablesInfoResult, + RedisHashes, + RedisKeys, + RedisMetricsInput, + SystemKeys, } from "./server/types"; import { - CouldNotRetrieveDataFromMongo, - CouldNotRetrieveDataFromRedis, - EnvVariablesNotAvailable, - InvalidBaseChains, - InvalidEndpoint, - InvalidJsonSchema, - InvalidParameterValue, - InvalidValueRetrievedFromRedis, - MissingKeysInBody, - MongoClientNotInitialised, - RedisClientNotInitialised -} from './constant/errors' + CouldNotRetrieveDataFromMongo, + CouldNotRetrieveDataFromRedis, + EnvVariablesNotAvailable, + InvalidBaseChains, + InvalidEndpoint, + InvalidJsonSchema, + InvalidParameterValue, + InvalidValueRetrievedFromRedis, + MissingKeysInBody, + MongoClientNotInitialised, + RedisClientNotInitialised, +} from "./constant/errors"; import { - allElementsInList, - allElementsInListHaveTypeString, - errorJson, - fulfillWithTimeLimit, - getElementsNotInList, - missingValues, - resultJson, - toBool, - verifyNodeExporterPing, - verifyPrometheusPing, + allElementsInList, + allElementsInListHaveTypeString, + errorJson, + fulfillWithTimeLimit, + getElementsNotInList, + missingValues, + resultJson, + toBool, + verifyNodeExporterPing, + verifyPrometheusPing, } from "./server/utils"; import express from "express"; import cors from "cors"; import cookieParser from "cookie-parser"; import { - addPostfixToKeys, - addPrefixToKeys, - alertKeysChainSourced, - alertKeysChainSourcedWithUniqueIdentifier, - alertKeysClContractPrefix, - alertKeysClNodePrefix, - alertKeysCosmosNodePrefix, - alertKeysDockerHubPrefix, - alertKeysEvmNodePrefix, - alertKeysGitHubPrefix, - alertKeysSubstrateNodePrefix, - alertKeysSystemPrefix, - getAlertKeysDockerHubRepo, - getAlertKeysGitHubRepo, - getAlertKeysNode, - getAlertKeysSystem, - getGitHubKeys, - getRedisHashes, - getSystemKeys, - RedisInterface -} from "./server/redis" -import {MongoInterface, MonitorablesCollection} from "./server/mongo"; -import {MongoClientOptions} from "mongodb"; -import {GenericRoute} from "./v1/route/GenericRoute"; -import {baseChains, PingStatus, Severities, Status, testAlertMessage, Timeout} from "./constant/server"; -import {TimeoutError} from "./constant/server.feedback"; -import {MongoConnect} from "./v1/service/MongoConnect"; -import {event} from '@pagerduty/pdjs'; -import {ConfigRoute} from "./v1/route/ConfigRoute"; -import {InstallationRoute} from "./v1/route/InstallationRoute"; -import {BaseChainRoute} from "./v1/route/BaseChainRoute"; -import {ChannelRoute} from "./v1/route/ChannelRoute"; -import {GenericModel} from "./v1/entity/model/GenericModel"; -import {SeverityAlertSubconfigModel} from "./v1/entity/model/SeverityAlertSubconfigSchema"; -import {BaseChainModel} from "./v1/entity/model/BaseChainModel"; -import {Model} from "mongoose"; -import {MongooseUtil} from "./util/MongooseUtil"; -import {ThresholdAlertSubconfigModel} from "./v1/entity/model/ThresholdAlertSubconfigSchema"; -import {TimeWindowAlertSubconfigModel} from "./v1/entity/model/TimeWindowAlertSubconfigSchema"; - -const axios = require('axios'); -const opsgenie = require('opsgenie-sdk'); -const twilio = require('twilio'); -const {WebClient} = require('@slack/web-api'); -const Web3 = require('web3'); -const nodemailer = require('nodemailer'); -const swaggerUi = require('swagger-ui-express'); -const swaggerDocument = require('./swagger.json'); + addPostfixToKeys, + addPrefixToKeys, + alertKeysChainSourced, + alertKeysChainSourcedWithUniqueIdentifier, + alertKeysClContractPrefix, + alertKeysClNodePrefix, + alertKeysCosmosNodePrefix, + alertKeysDockerHubPrefix, + alertKeysEvmNodePrefix, + alertKeysGitHubPrefix, + alertKeysSubstrateNodePrefix, + alertKeysSystemPrefix, + getAlertKeysDockerHubRepo, + getAlertKeysGitHubRepo, + getAlertKeysNode, + getAlertKeysSystem, + getGitHubKeys, + getRedisHashes, + getSystemKeys, + RedisInterface, +} from "./server/redis"; +import { MongoInterface, MonitorablesCollection } from "./server/mongo"; +import { MongoClientOptions } from "mongodb"; +import { GenericRoute } from "./v1/route/GenericRoute"; +import { + baseChains, + PingStatus, + Severities, + Status, + testAlertMessage, + Timeout, +} from "./constant/server"; +import { TimeoutError } from "./constant/server.feedback"; +import { MongoConnect } from "./v1/service/MongoConnect"; +import { event } from "@pagerduty/pdjs"; +import { ConfigRoute } from "./v1/route/ConfigRoute"; +import { InstallationRoute } from "./v1/route/InstallationRoute"; +import { BaseChainRoute } from "./v1/route/BaseChainRoute"; +import { ChannelRoute } from "./v1/route/ChannelRoute"; +import { GenericModel } from "./v1/entity/model/GenericModel"; +import { SeverityAlertSubconfigModel } from "./v1/entity/model/SeverityAlertSubconfigSchema"; +import { BaseChainModel } from "./v1/entity/model/BaseChainModel"; +import { Model } from "mongoose"; +import { MongooseUtil } from "./util/MongooseUtil"; +import { ThresholdAlertSubconfigModel } from "./v1/entity/model/ThresholdAlertSubconfigSchema"; +import { TimeWindowAlertSubconfigModel } from "./v1/entity/model/TimeWindowAlertSubconfigSchema"; + +const axios = require("axios"); +const opsgenie = require("opsgenie-sdk"); +const twilio = require("twilio"); +const { WebClient } = require("@slack/web-api"); +const Web3 = require("web3"); +const nodemailer = require("nodemailer"); +const swaggerUi = require("swagger-ui-express"); +const swaggerDocument = require("./swagger.json"); // Use the environmental variables from the .env file dotenv.config(); // Import certificate files -const httpsKey: Buffer = readFile(path.join(__dirname, '../../', 'certificates', - 'key.pem')); -const httpsCert: Buffer = readFile(path.join(__dirname, '../../', - 'certificates', 'cert.pem')); +const httpsKey: Buffer = readFile( + path.join(__dirname, "../../", "certificates", "key.pem") +); +const httpsCert: Buffer = readFile( + path.join(__dirname, "../../", "certificates", "cert.pem") +); const httpsOptions: HttpsOptions = { - key: httpsKey, - cert: httpsCert, + key: httpsKey, + cert: httpsCert, }; // Server configuration const app = express(); -app.disable('x-powered-by'); +app.disable("x-powered-by"); app.use(express.json()); -app.use(express.static(path.join(__dirname, '../', 'build'))); +app.use(express.static(path.join(__dirname, "../", "build"))); app.use(cookieParser()); -app.use((err: any, req: express.Request, res: express.Response, - next: express.NextFunction) => { +app.use( + ( + err: any, + req: express.Request, + res: express.Response, + next: express.NextFunction + ) => { // This check makes sure this is a JSON parsing issue, but it might be // coming from any middleware, not just body-parser. - if (err instanceof SyntaxError && 'body' in err) { - console.error(err); - return res.sendStatus(Status.ERROR); // Bad request + if (err instanceof SyntaxError && "body" in err) { + console.error(err); + return res.sendStatus(Status.ERROR); // Bad request } next(); -}); + } +); //timeout -app.use((req: express.Request, res: express.Response, - next: express.NextFunction): void => { - +app.use( + ( + req: express.Request, + res: express.Response, + next: express.NextFunction + ): void => { let timeout = Timeout.MAX; if (req.query && req.query.timeout) { - timeout = parseInt(req.query.timeout as string) * 1000; - if (timeout < Timeout.MIN) { - timeout = Timeout.MIN; - } + timeout = parseInt(req.query.timeout as string) * 1000; + if (timeout < Timeout.MIN) { + timeout = Timeout.MIN; + } } res.setTimeout(timeout, () => { - const error = new TimeoutError(); - next(error); + const error = new TimeoutError(); + next(error); }); next(); -}); + } +); -app.use('/api-docs', swaggerUi.serve, swaggerUi.setup(swaggerDocument)); +app.use("/api-docs", swaggerUi.serve, swaggerUi.setup(swaggerDocument)); const allowedOrigins: string[] = []; const is_dev_mode = process.env.DEV_MODE && toBool(process.env.DEV_MODE); if (!process.env.UI_ACCESS_IP && !is_dev_mode) { - console.error('UI_ACCESS_IP environmental variable not specified,' + - ' stopping API.'); - process.exit(1); + console.error( + "UI_ACCESS_IP environmental variable not specified," + " stopping API." + ); + process.exit(1); } -const UI_PORT: string = process.env.UI_DASHBOARD_PORT ? - process.env.UI_DASHBOARD_PORT : '3333'; +const UI_PORT: string = process.env.UI_DASHBOARD_PORT + ? process.env.UI_DASHBOARD_PORT + : "3333"; -allowedOrigins.push( - `https://${process.env.UI_ACCESS_IP}:${UI_PORT}`); +allowedOrigins.push(`https://${process.env.UI_ACCESS_IP}:${UI_PORT}`); if (is_dev_mode) { - console.log('NOTE - Accepting connections from UI Dev Server.') - allowedOrigins.push( - `http://localhost:${UI_PORT}`); - allowedOrigins.push( - `https://localhost:${UI_PORT}`); - + console.log("NOTE - Accepting connections from UI Dev Server."); + allowedOrigins.push(`http://localhost:${UI_PORT}`); + allowedOrigins.push(`https://localhost:${UI_PORT}`); } -app.use(cors({origin: allowedOrigins})); +app.use(cors({ origin: allowedOrigins })); // Connect with Redis const redisHost = process.env.REDIS_IP || "localhost"; @@ -187,7 +204,7 @@ redisInterface.connect(); // Check the redis connection every 3 seconds. If the connection was dropped, // re-connect. const redisInterval = setInterval(() => { - redisInterface.connect(); + redisInterface.connect(); }, 3000); // Connect with Mongo @@ -195,18 +212,18 @@ const mongoHost = process.env.DB_IP || "localhost"; const mongoPort = parseInt(process.env.DB_PORT || "27017"); const mongoDB = process.env.DB_NAME || "panicdb"; const mongoOptions: MongoClientOptions = { - useNewUrlParser: true, - useUnifiedTopology: true, - socketTimeoutMS: 10000, - connectTimeoutMS: 10000, - serverSelectionTimeoutMS: 5000, + useNewUrlParser: true, + useUnifiedTopology: true, + socketTimeoutMS: 10000, + connectTimeoutMS: 10000, + serverSelectionTimeoutMS: 5000, }; const mongoInterface = new MongoInterface(mongoOptions, mongoHost, mongoPort); // Check the mongo connection every 3 seconds. If the connection was dropped, // re-connect. const mongoInterval = setInterval(async () => { - await mongoInterface.connect(); + await mongoInterface.connect(); }, 3000); MongoConnect.start().then(); @@ -215,15 +232,15 @@ MongoConnect.start().then(); // Please note that `SeverityAlertSubconfigModel` shares the `generics` collection with `GenericModel`. // For this reason in the logic below, `severity_alerts` is dependent on `generics`. const modelsAndFiles: Array<[Model, string]> = [ - [GenericModel, 'generics'], - [SeverityAlertSubconfigModel, 'severity_alerts'], - [ThresholdAlertSubconfigModel, 'threshold_alert'], - [TimeWindowAlertSubconfigModel, 'time_window_alert'], - [BaseChainModel, 'base_chains'] + [GenericModel, "generics"], + [SeverityAlertSubconfigModel, "severity_alerts"], + [ThresholdAlertSubconfigModel, "threshold_alert"], + [TimeWindowAlertSubconfigModel, "time_window_alert"], + [BaseChainModel, "base_chains"], ]; modelsAndFiles.forEach(async ([model, file]) => { - await MongooseUtil.populateModel(model, file); + await MongooseUtil.populateModel(model, file); }); // Routes @@ -237,1474 +254,1576 @@ new ChannelRoute(app); // This endpoint expects a list of base chains (cosmos, substrate, chainlink or // general) inside the body structure. -app.post('/server/mongo/monitorablesInfo', - async (req: express.Request, res: express.Response) => { - console.log('Received POST request for %s %s', req.url, req.body); - const baseChainsInput = req.body['baseChains']; - - // Check if some required keys are missing in the body object, if yes - // notify the client. - const missingKeysList: string[] = missingValues({ - baseChains: baseChainsInput - }); - if (missingKeysList.length !== 0) { - const err = new MissingKeysInBody(...missingKeysList); - res.status(err.code).send(errorJson(err.message)); - return; - } - // Check if the passed base chains are valid - if (Array.isArray(baseChainsInput)) { - if (!allElementsInList(baseChainsInput, baseChains)) { - const invalidBaseChains: string[] = getElementsNotInList( - baseChainsInput, baseChains); - const err = new InvalidBaseChains(...invalidBaseChains); - res.status(err.code).send(errorJson(err.message)); - return; - } - } else { - const invalidBaseChains: any[] = getElementsNotInList( - [baseChainsInput], baseChains); - const err = new InvalidBaseChains(...invalidBaseChains); - res.status(err.code).send(errorJson(err.message)); - return; - } - let result: MonitorablesInfoResult = resultJson({}); +app.post( + "/server/mongo/monitorablesInfo", + async (req: express.Request, res: express.Response) => { + console.log("Received POST request for %s %s", req.url, req.body); + const baseChainsInput = req.body["baseChains"]; - for (const [, baseChain] of Object.entries(baseChainsInput)) { - result.result[baseChain] = {}; - } + // Check if some required keys are missing in the body object, if yes + // notify the client. + const missingKeysList: string[] = missingValues({ + baseChains: baseChainsInput, + }); + if (missingKeysList.length !== 0) { + const err = new MissingKeysInBody(...missingKeysList); + res.status(err.code).send(errorJson(err.message)); + return; + } + // Check if the passed base chains are valid + if (Array.isArray(baseChainsInput)) { + if (!allElementsInList(baseChainsInput, baseChains)) { + const invalidBaseChains: string[] = getElementsNotInList( + baseChainsInput, + baseChains + ); + const err = new InvalidBaseChains(...invalidBaseChains); + res.status(err.code).send(errorJson(err.message)); + return; + } + } else { + const invalidBaseChains: any[] = getElementsNotInList( + [baseChainsInput], + baseChains + ); + const err = new InvalidBaseChains(...invalidBaseChains); + res.status(err.code).send(errorJson(err.message)); + return; + } + let result: MonitorablesInfoResult = resultJson({}); + + for (const [, baseChain] of Object.entries(baseChainsInput)) { + result.result[baseChain] = {}; + } - if (mongoInterface.client) { - try { - const db = mongoInterface.client.db(mongoDB); - if (baseChainsInput.length > 0) { - const collection = db.collection(MonitorablesCollection); - const query = {_id: {$in: baseChainsInput}}; - const docs = await collection.find(query).toArray(); - for (const doc of docs) { - const baseChainData: any = result.result[doc._id]; - delete doc._id; - for (const parentID in doc) { - const chain = doc[parentID]; - const chainName = chain.chain_name; - baseChainData[chainName] = { - parent_id: parentID, - monitored: {} - }; - delete chain.chain_name; - for (const sourceType in chain) { - const monitored = baseChainData[chainName].monitored; - const chainSource = chain[sourceType]; - monitored[sourceType] = []; - for (const sourceID in chain[sourceType]) { - monitored[sourceType].push({ - [sourceID]: chainSource[sourceID].name - }); - } - } - } - } + if (mongoInterface.client) { + try { + const db = mongoInterface.client.db(mongoDB); + if (baseChainsInput.length > 0) { + const collection = db.collection(MonitorablesCollection); + const query = { _id: { $in: baseChainsInput } }; + const docs = await collection.find(query).toArray(); + for (const doc of docs) { + const baseChainData: any = result.result[doc._id]; + delete doc._id; + for (const parentID in doc) { + const chain = doc[parentID]; + const chainName = chain.chain_name; + baseChainData[chainName] = { + parent_id: parentID, + monitored: {}, + }; + delete chain.chain_name; + for (const sourceType in chain) { + const monitored = baseChainData[chainName].monitored; + const chainSource = chain[sourceType]; + monitored[sourceType] = []; + for (const sourceID in chain[sourceType]) { + monitored[sourceType].push({ + [sourceID]: chainSource[sourceID].name, + }); } - res.status(Status.SUCCESS).send(result); - return; - } catch (err) { - console.error(err); - const retrievalErr = new CouldNotRetrieveDataFromMongo(); - res.status(retrievalErr.code).send(errorJson( - retrievalErr.message)); - return; + } } - } else { - // This is done just for the sake of completion, as it is very - // unlikely to occur. - const err = new MongoClientNotInitialised(); - res.status(err.code).send(errorJson(err.message)); - return; + } } + res.status(Status.SUCCESS).send(result); + return; + } catch (err) { + console.error(err); + const retrievalErr = new CouldNotRetrieveDataFromMongo(); + res.status(retrievalErr.code).send(errorJson(retrievalErr.message)); + return; + } + } else { + // This is done just for the sake of completion, as it is very + // unlikely to occur. + const err = new MongoClientNotInitialised(); + res.status(err.code).send(errorJson(err.message)); + return; + } + } +); + +app.post( + "/server/mongo/alerts", + async (req: express.Request, res: express.Response) => { + console.log("Received POST request for %s", req.url); + const { + chains, + severities, + sources, + minTimestamp, + maxTimestamp, + noOfAlerts, + } = req.body; + + // Check that all parameters have been sent + const missingKeysList: string[] = missingValues({ + chains, + severities, + sources, + minTimestamp, + maxTimestamp, + noOfAlerts, }); + if (missingKeysList.length !== 0) { + const err = new MissingKeysInBody(...missingKeysList); + res.status(err.code).send(errorJson(err.message)); + return; + } -app.post('/server/mongo/alerts', - async (req: express.Request, res: express.Response) => { - console.log('Received POST request for %s', req.url); - const { - chains, - severities, - sources, - minTimestamp, - maxTimestamp, - noOfAlerts - } = req.body; - - // Check that all parameters have been sent - const missingKeysList: string[] = missingValues({ - chains, - severities, - sources, - minTimestamp, - maxTimestamp, - noOfAlerts - }); - if (missingKeysList.length !== 0) { - const err = new MissingKeysInBody(...missingKeysList); - res.status(err.code).send(errorJson(err.message)); - return; - } + // --------------------- Input Validation ------------------- - // --------------------- Input Validation ------------------- + const arrayBasedStringParams = { chains, severities, sources }; + for (const [param, value] of Object.entries(arrayBasedStringParams)) { + if (!Array.isArray(value) || !allElementsInListHaveTypeString(value)) { + const err = new InvalidParameterValue(`req.body.${param}`); + res.status(err.code).send(errorJson(err.message)); + return; + } + } - const arrayBasedStringParams = {chains, severities, sources}; - for (const [param, value] of Object.entries(arrayBasedStringParams)) { - if (!Array.isArray(value) || - !allElementsInListHaveTypeString(value)) { - const err = new InvalidParameterValue( - `req.body.${param}`); - res.status(err.code).send(errorJson(err.message)); - return; - } - } + for (const severity of severities) { + if (!(severity in Severities)) { + const err = new InvalidParameterValue("req.body.severities"); + res.status(err.code).send(errorJson(err.message)); + return; + } + } - for (const severity of severities) { - if (!(severity in Severities)) { - const err = new InvalidParameterValue( - 'req.body.severities'); - res.status(err.code).send(errorJson(err.message)); - return; - } - } + const positiveFloats = { minTimestamp, maxTimestamp }; + for (const [param, value] of Object.entries(positiveFloats)) { + const parsedFloat = parseFloat(value); + if (isNaN(parsedFloat) || parsedFloat < 0) { + const err = new InvalidParameterValue(`req.body.${param}`); + res.status(err.code).send(errorJson(err.message)); + return; + } + } + const parsedMinTimestamp = parseFloat(minTimestamp); + const parsedMaxTimestamp = parseFloat(maxTimestamp); + + const parsedNoOfAlerts = parseInt(noOfAlerts); + if (isNaN(parsedNoOfAlerts) || parsedNoOfAlerts <= 0) { + const err = new InvalidParameterValue("req.body.noOfAlerts"); + res.status(err.code).send(errorJson(err.message)); + return; + } - const positiveFloats = {minTimestamp, maxTimestamp}; - for (const [param, value] of Object.entries(positiveFloats)) { - const parsedFloat = parseFloat(value); - if (isNaN(parsedFloat) || parsedFloat < 0) { - const err = new InvalidParameterValue( - `req.body.${param}`); - res.status(err.code).send(errorJson(err.message)); - return; - } - } - const parsedMinTimestamp = parseFloat(minTimestamp); - const parsedMaxTimestamp = parseFloat(maxTimestamp); - - const parsedNoOfAlerts = parseInt(noOfAlerts); - if (isNaN(parsedNoOfAlerts) || parsedNoOfAlerts <= 0) { - const err = new InvalidParameterValue( - 'req.body.noOfAlerts'); - res.status(err.code).send(errorJson(err.message)); - return; + let result = resultJson({ alerts: [] }); + try { + if (mongoInterface.client) { + const db = mongoInterface.client.db(mongoDB); + if (chains.length > 0) { + let queryList: any = []; + for (let i = 1; i < chains.length; i++) { + queryList.push({ $unionWith: chains[i] }); + } + queryList.push( + { $match: { doc_type: "alert" } }, + { $unwind: "$alerts" }, + { + $match: { + "alerts.severity": { $in: severities }, + "alerts.origin": { $in: sources }, + "alerts.timestamp": { + $gte: parsedMinTimestamp, + $lte: parsedMaxTimestamp, + }, + }, + }, + { $sort: { "alerts.timestamp": -1, _id: 1 } }, + { $limit: parsedNoOfAlerts }, + { $group: { _id: null, alerts: { $push: "$alerts" } } }, + { $project: { _id: 0, alerts: "$alerts" } } + ); + const collection = db.collection(chains[0]); + const docs = await collection.aggregate(queryList).toArray(); + for (const doc of docs) { + result.result.alerts = result.result.alerts.concat(doc.alerts); + } } + res.status(Status.SUCCESS).send(result); + return; + } else { + // This is done just for the sake of completion, as it is very + // unlikely to occur. + const err = new MongoClientNotInitialised(); + res.status(err.code).send(errorJson(err.message)); + return; + } + } catch (err) { + console.error(err); + const retrievalErr = new CouldNotRetrieveDataFromMongo(); + res.status(retrievalErr.code).send(errorJson(retrievalErr.message)); + return; + } + } +); - let result = resultJson({alerts: []}); - try { - if (mongoInterface.client) { - const db = mongoInterface.client.db(mongoDB); - if (chains.length > 0) { - let queryList: any = []; - for (let i = 1; i < chains.length; i++) { - queryList.push({$unionWith: chains[i]}) - } - queryList.push( - {$match: {doc_type: "alert"}}, - {$unwind: "$alerts"}, - { - $match: { - "alerts.severity": {$in: severities}, - "alerts.origin": {$in: sources}, - "alerts.timestamp": { - "$gte": parsedMinTimestamp, - "$lte": parsedMaxTimestamp - } - } - }, - {$sort: {"alerts.timestamp": -1, _id: 1}}, - {$limit: parsedNoOfAlerts}, - {$group: {_id: null, alerts: {$push: "$alerts"}}}, - {$project: {_id: 0, alerts: "$alerts"}}, - ); - const collection = db.collection(chains[0]); - const docs = await collection.aggregate(queryList) - .toArray(); - for (const doc of docs) { - result.result.alerts = result.result.alerts.concat( - doc.alerts) - } - } - res.status(Status.SUCCESS).send(result); - return; - } else { - // This is done just for the sake of completion, as it is very - // unlikely to occur. - const err = new MongoClientNotInitialised(); - res.status(err.code).send(errorJson(err.message)); - return; - } - } catch (err) { - console.error(err); - const retrievalErr = new CouldNotRetrieveDataFromMongo(); - res.status(retrievalErr.code).send(errorJson( - retrievalErr.message)); - return; - } +app.post( + "/server/mongo/metrics", + async (req: express.Request, res: express.Response) => { + console.log("Received POST request for %s", req.url); + const { + chains, + systems, + minTimestamp, + maxTimestamp, + noOfMetricsPerSource, + } = req.body; + + // Check that all parameters have been sent + const missingKeysList: string[] = missingValues({ + chains, + systems, + minTimestamp, + maxTimestamp, + noOfMetricsPerSource, }); + if (missingKeysList.length !== 0) { + const err = new MissingKeysInBody(...missingKeysList); + res.status(err.code).send(errorJson(err.message)); + return; + } -app.post('/server/mongo/metrics', - async (req: express.Request, res: express.Response) => { - console.log('Received POST request for %s', req.url); - const { - chains, - systems, - minTimestamp, - maxTimestamp, - noOfMetricsPerSource - } = req.body; - - // Check that all parameters have been sent - const missingKeysList: string[] = missingValues({ - chains, - systems, - minTimestamp, - maxTimestamp, - noOfMetricsPerSource - }); - if (missingKeysList.length !== 0) { - const err = new MissingKeysInBody(...missingKeysList); - res.status(err.code).send(errorJson(err.message)); - return; - } + // --------------------- Input Validation ------------------- - // --------------------- Input Validation ------------------- + const arrayBasedStringParams = { chains, systems }; + for (const [param, value] of Object.entries(arrayBasedStringParams)) { + if (!Array.isArray(value) || !allElementsInListHaveTypeString(value)) { + const err = new InvalidParameterValue(`req.body.${param}`); + res.status(err.code).send(errorJson(err.message)); + return; + } + } - const arrayBasedStringParams = {chains, systems}; - for (const [param, value] of Object.entries(arrayBasedStringParams)) { - if (!Array.isArray(value) || - !allElementsInListHaveTypeString(value)) { - const err = new InvalidParameterValue( - `req.body.${param}`); - res.status(err.code).send(errorJson(err.message)); - return; - } - } + const positiveFloats = { minTimestamp, maxTimestamp }; + for (const [param, value] of Object.entries(positiveFloats)) { + const parsedFloat = parseFloat(value); + if (isNaN(parsedFloat) || parsedFloat < 0) { + const err = new InvalidParameterValue(`req.body.${param}`); + res.status(err.code).send(errorJson(err.message)); + return; + } + } + const parsedMinTimestamp = parseFloat(minTimestamp); + const parsedMaxTimestamp = parseFloat(maxTimestamp); + + const parsedNoOfMetricsPerSource = parseInt(noOfMetricsPerSource); + if (isNaN(parsedNoOfMetricsPerSource) || parsedNoOfMetricsPerSource <= 0) { + const err = new InvalidParameterValue("req.body.noOfMetricsPerSource"); + res.status(err.code).send(errorJson(err.message)); + return; + } - const positiveFloats = {minTimestamp, maxTimestamp}; - for (const [param, value] of Object.entries(positiveFloats)) { - const parsedFloat = parseFloat(value); - if (isNaN(parsedFloat) || parsedFloat < 0) { - const err = new InvalidParameterValue( - `req.body.${param}`); - res.status(err.code).send(errorJson(err.message)); - return; - } - } - const parsedMinTimestamp = parseFloat(minTimestamp); - const parsedMaxTimestamp = parseFloat(maxTimestamp); - - const parsedNoOfMetricsPerSource = parseInt(noOfMetricsPerSource); - if (isNaN(parsedNoOfMetricsPerSource) || parsedNoOfMetricsPerSource <= - 0) { - const err = new InvalidParameterValue( - 'req.body.noOfMetricsPerSource'); - res.status(err.code).send(errorJson(err.message)); - return; - } + // In the future, we need to retrieve node metrics, hence adding a + // nodes field. Also, a node ID can be present in both the systems and + // the nodes fields. + + let result = resultJson({ metrics: {} }); + try { + if (mongoInterface.client) { + const db = mongoInterface.client.db(mongoDB); + if (chains.length > 0) { + var queryPromise = new Promise((resolve, _) => { + systems.forEach( + async (source: string, i: number): Promise => { + let queryList: any = []; + for (let i = 1; i < chains.length; i++) { + queryList.push({ $unionWith: chains[i] }); + } - // In the future, we need to retrieve node metrics, hence adding a - // nodes field. Also, a node ID can be present in both the systems and - // the nodes fields. - - let result = resultJson({metrics: {}}); - try { - if (mongoInterface.client) { - const db = mongoInterface.client.db(mongoDB); - if (chains.length > 0) { - var queryPromise = new Promise((resolve, _) => { - systems.forEach(async (source: string, - i: number): Promise => { - let queryList: any = []; - for (let i = 1; i < chains.length; i++) { - queryList.push({$unionWith: chains[i]}) - } - - if (!(source in result.result.metrics)) { - result.result.metrics[source] = [] - } - - const originSource = "$".concat(source); - const timestampSource = source.concat(".timestamp"); - queryList.push( - {$match: {doc_type: "system"}}, - {$unwind: originSource}, - { - $match: { - [timestampSource]: { - "$gte": parsedMinTimestamp, - "$lte": parsedMaxTimestamp - } - } - }, - {$sort: {"timestamp": -1, _id: 1}}, - {$limit: parsedNoOfMetricsPerSource}, - ); - const collection = db.collection(chains[0]); - const docs = await collection.aggregate(queryList) - .toArray(); - for (const doc of docs) { - result.result.metrics[source] = - result.result.metrics[source].concat( - doc[source]) - } - if (i === systems.length - 1) resolve(); - }); - }); - - queryPromise.then(() => { - res.status(Status.SUCCESS).send(result); - return; - }); - } else { - res.status(Status.SUCCESS).send(result); - return; + if (!(source in result.result.metrics)) { + result.result.metrics[source] = []; } - } else { - // This is done just for the sake of completion, as it is very - // unlikely to occur. - const err = new MongoClientNotInitialised(); - res.status(err.code).send(errorJson(err.message)); - return; - } - } catch (err) { - console.error(err); - const retrievalErr = new CouldNotRetrieveDataFromMongo(); - res.status(retrievalErr.code).send(errorJson( - retrievalErr.message)); + + const originSource = "$".concat(source); + const timestampSource = source.concat(".timestamp"); + queryList.push( + { $match: { doc_type: "system" } }, + { $unwind: originSource }, + { + $match: { + [timestampSource]: { + $gte: parsedMinTimestamp, + $lte: parsedMaxTimestamp, + }, + }, + }, + { $sort: { timestamp: -1, _id: 1 } }, + { $limit: parsedNoOfMetricsPerSource } + ); + const collection = db.collection(chains[0]); + const docs = await collection.aggregate(queryList).toArray(); + for (const doc of docs) { + result.result.metrics[source] = result.result.metrics[ + source + ].concat(doc[source]); + } + if (i === systems.length - 1) resolve(); + } + ); + }); + + queryPromise.then(() => { + res.status(Status.SUCCESS).send(result); return; + }); + } else { + res.status(Status.SUCCESS).send(result); + return; } - }); + } else { + // This is done just for the sake of completion, as it is very + // unlikely to occur. + const err = new MongoClientNotInitialised(); + res.status(err.code).send(errorJson(err.message)); + return; + } + } catch (err) { + console.error(err); + const retrievalErr = new CouldNotRetrieveDataFromMongo(); + res.status(retrievalErr.code).send(errorJson(retrievalErr.message)); + return; + } + } +); // ---------------------------------------- Redis Endpoints // This endpoint expects a list of parent ids inside the body structure. -app.post('/server/redis/alertsOverview', - async (req: express.Request, res: express.Response) => { - console.log('Received POST request for %s', req.url); - const parentIds: AlertsOverviewInput = req.body.parentIds; - - // Check if some required keys are missing in the body object, if yes - // notify the client. - const missingKeysList: string[] = missingValues({parentIds}); - if (missingKeysList.length !== 0) { - const err = new MissingKeysInBody(...missingKeysList); - res.status(err.code).send(errorJson(err.message)); - return; - } +app.post( + "/server/redis/alertsOverview", + async (req: express.Request, res: express.Response) => { + console.log("Received POST request for %s", req.url); + const parentIds: AlertsOverviewInput = req.body.parentIds; + + // Check if some required keys are missing in the body object, if yes + // notify the client. + const missingKeysList: string[] = missingValues({ parentIds }); + if (missingKeysList.length !== 0) { + const err = new MissingKeysInBody(...missingKeysList); + res.status(err.code).send(errorJson(err.message)); + return; + } + + // Check if the passed dict is valid + if (!isAlertsOverviewInputValid(parentIds)) { + const err = new InvalidJsonSchema("req.body.parentIds"); + res.status(err.code).send(errorJson(err.message)); + return; + } - // Check if the passed dict is valid - if (!isAlertsOverviewInputValid(parentIds)) { - const err = new InvalidJsonSchema("req.body.parentIds"); - res.status(err.code).send(errorJson(err.message)); + // Construct the redis hashes and keys. The keys are only used to check + // whether they are available within redis since we get all keys. + const redisHashes: RedisHashes = getRedisHashes(); + const redisHashesNamespace: RedisKeys = addPrefixToKeys( + redisHashes, + `${uniqueAlerterIdentifier}:` + ); + const redisHashesPostfix: RedisKeys = addPostfixToKeys( + redisHashesNamespace, + "_" + ); + const alertKeysSystem: AlertKeysSystem = getAlertKeysSystem(); + const alertKeysSystemPostfix: RedisKeys = addPostfixToKeys( + alertKeysSystem, + "_" + ); + const alertKeysNode: AlertKeysNode = getAlertKeysNode(); + const alertKeysNodePostfix: RedisKeys = addPostfixToKeys( + alertKeysNode, + "_" + ); + const alertKeysGitHubRepo: AlertKeysGitHubRepo = getAlertKeysGitHubRepo(); + const alertKeysGitHubRepoPostfix: RedisKeys = addPostfixToKeys( + alertKeysGitHubRepo, + "_" + ); + const alertKeysDockerHubRepo: AlertKeysDockerHubRepo = + getAlertKeysDockerHubRepo(); + const alertKeysDockerHubRepoPostfix: RedisKeys = addPostfixToKeys( + alertKeysDockerHubRepo, + "_" + ); + + let alertsData: AlertsOverviewAlertData[] = []; + let result: AlertsOverviewResult = resultJson({}); + + if (redisInterface.client) { + let encounteredError = false; + + // Using multi() means that all commands are only performed once + // exec() is called, and this is done atomically. + const redisMulti = redisInterface.client.multi(); + + for (const [parentId, sourcesObject] of Object.entries(parentIds)) { + const parentHash: string = `${redisHashesPostfix.parent}${parentId}`; + result.result[parentId] = { + info: 0, + critical: 0, + warning: 0, + error: 0, + problems: {}, + releases: {}, + tags: {}, + }; + + redisMulti.hgetall(parentHash, (err: Error | null, values: any) => { + if (err) { + console.error(err); + // Skip resolve if an error was already encountered + // since call is already resolved. + if (!encounteredError) { + encounteredError = true; + const retrievalErr = new CouldNotRetrieveDataFromRedis(err); + res + .status(retrievalErr.code) + .send(errorJson(retrievalErr.message)); + } return; - } + } + + if (values === null) { + values = {}; + } + + // Update info count with keys not found in retrieved keys. + sourcesObject.systems.forEach((systemId) => { + const constructedKeys: RedisKeys = addPostfixToKeys( + alertKeysSystemPostfix, + systemId + ); + Object.values(constructedKeys).forEach((key) => { + if (!(key in values)) { + result.result[parentId].info++; + } + }); + }); + sourcesObject.nodes.forEach((nodeId) => { + const constructedKeys: RedisKeys = addPostfixToKeys( + alertKeysNodePostfix, + nodeId + ); + Object.values(constructedKeys).forEach((key) => { + if (!(key in values)) { + result.result[parentId].info++; + } + }); + }); + sourcesObject.github_repos.forEach((repoId) => { + const constructedKeys: RedisKeys = addPostfixToKeys( + alertKeysGitHubRepoPostfix, + repoId + ); + Object.values(constructedKeys).forEach((key) => { + if (!(key in values)) { + result.result[parentId].info++; + } + }); + }); + sourcesObject.dockerhub_repos.forEach((repoId) => { + const constructedKeys: RedisKeys = addPostfixToKeys( + alertKeysDockerHubRepoPostfix, + repoId + ); + Object.values(constructedKeys).forEach((key) => { + if (!(key in values)) { + result.result[parentId].info++; + } + }); + }); + + for (const [key, value] of Object.entries(values)) { + // Skip checks if encountered error since + // call is already resolved. + if (encounteredError) { + break; + } + let found = false; + + sourcesObject.systems.forEach((systemId) => { + if ( + !found && + key.includes(systemId) && + key.includes(alertKeysSystemPrefix) + ) { + found = true; + alertsData.push({ + parentId: parentId, + monitorableId: systemId, + key: key, + value: value, + }); + } + }); + if (!found) { + sourcesObject.nodes.forEach((nodeId) => { + if ( + !found && + key.includes(nodeId) && + (key.includes(alertKeysClNodePrefix) || + key.includes(alertKeysEvmNodePrefix) || + key.includes(alertKeysCosmosNodePrefix) || + key.includes(alertKeysSubstrateNodePrefix) || + key.includes(alertKeysClContractPrefix)) + ) { + found = true; + alertsData.push({ + parentId: parentId, + monitorableId: nodeId, + key: key, + value: value, + }); + } + }); + } + if (!found) { + sourcesObject.github_repos.forEach((repoId) => { + if ( + !found && + key.includes(repoId) && + key.includes(alertKeysGitHubPrefix) + ) { + found = true; + alertsData.push({ + parentId: parentId, + monitorableId: repoId, + key: key, + value: value, + }); + } + }); + } + if (!found) { + sourcesObject.dockerhub_repos.forEach((repoId) => { + if ( + !found && + key.includes(repoId) && + key.includes(alertKeysDockerHubPrefix) + ) { + found = true; + alertsData.push({ + parentId: parentId, + monitorableId: repoId, + key: key, + value: value, + }); + } + }); + } - // Construct the redis hashes and keys. The keys are only used to check - // whether they are available within redis since we get all keys. - const redisHashes: RedisHashes = getRedisHashes(); - const redisHashesNamespace: RedisKeys = addPrefixToKeys( - redisHashes, `${uniqueAlerterIdentifier}:`); - const redisHashesPostfix: RedisKeys = addPostfixToKeys( - redisHashesNamespace, '_'); - const alertKeysSystem: AlertKeysSystem = getAlertKeysSystem(); - const alertKeysSystemPostfix: RedisKeys = addPostfixToKeys( - alertKeysSystem, '_'); - const alertKeysNode: AlertKeysNode = getAlertKeysNode(); - const alertKeysNodePostfix: RedisKeys = addPostfixToKeys( - alertKeysNode, '_'); - const alertKeysGitHubRepo: AlertKeysGitHubRepo = - getAlertKeysGitHubRepo(); - const alertKeysGitHubRepoPostfix: RedisKeys = - addPostfixToKeys(alertKeysGitHubRepo, '_'); - const alertKeysDockerHubRepo: AlertKeysDockerHubRepo = - getAlertKeysDockerHubRepo(); - const alertKeysDockerHubRepoPostfix: RedisKeys = - addPostfixToKeys(alertKeysDockerHubRepo, '_'); - - let alertsData: AlertsOverviewAlertData[] = [] - let result: AlertsOverviewResult = resultJson({}); - - if (redisInterface.client) { - let encounteredError = false; - - // Using multi() means that all commands are only performed once - // exec() is called, and this is done atomically. - const redisMulti = redisInterface.client.multi(); - - for (const [parentId, sourcesObject] of Object.entries(parentIds)) { - const parentHash: string = - `${redisHashesPostfix.parent}${parentId}` - result.result[parentId] = { - "info": 0, - "critical": 0, - "warning": 0, - "error": 0, - "problems": {}, - "releases": {}, - "tags": {}, - }; - - redisMulti.hgetall(parentHash, (err: Error | null, values: any) => { - if (err) { - console.error(err); - // Skip resolve if an error was already encountered - // since call is already resolved. - if (!encounteredError) { - encounteredError = true; - const retrievalErr = - new CouldNotRetrieveDataFromRedis(err); - res.status(retrievalErr.code).send(errorJson( - retrievalErr.message)); - } - return; - } - - if (values === null) { - values = {}; - } - - // Update info count with keys not found in retrieved keys. - sourcesObject.systems.forEach((systemId) => { - const constructedKeys: RedisKeys = addPostfixToKeys( - alertKeysSystemPostfix, systemId); - Object.values(constructedKeys).forEach((key) => { - if (!(key in values)) { - result.result[parentId].info++; - } - }); - }); - sourcesObject.nodes.forEach((nodeId) => { - const constructedKeys: RedisKeys = addPostfixToKeys( - alertKeysNodePostfix, nodeId); - Object.values(constructedKeys).forEach((key) => { - if (!(key in values)) { - result.result[parentId].info++; - } - }); - }); - sourcesObject.github_repos.forEach((repoId) => { - const constructedKeys: RedisKeys = addPostfixToKeys( - alertKeysGitHubRepoPostfix, repoId); - Object.values(constructedKeys).forEach((key) => { - if (!(key in values)) { - result.result[parentId].info++; - } - }); - }); - sourcesObject.dockerhub_repos.forEach((repoId) => { - const constructedKeys: RedisKeys = addPostfixToKeys( - alertKeysDockerHubRepoPostfix, repoId); - Object.values(constructedKeys).forEach((key) => { - if (!(key in values)) { - result.result[parentId].info++; - } - }); - }); - - for (const [key, value] of Object.entries(values)) { - // Skip checks if encountered error since - // call is already resolved. - if (encounteredError) { - break; - } - let found = false; - - sourcesObject.systems.forEach((systemId) => { - if (!found && key.includes(systemId) && - key.includes(alertKeysSystemPrefix)) { - found = true; - alertsData.push({ - parentId: parentId, - monitorableId: systemId, - key: key, - value: value - }) - } - }); - if (!found) { - sourcesObject.nodes.forEach((nodeId) => { - if (!found && key.includes(nodeId) && - (key.includes(alertKeysClNodePrefix) - || key.includes(alertKeysEvmNodePrefix) - || (key.includes( - alertKeysCosmosNodePrefix)) - || (key.includes( - alertKeysSubstrateNodePrefix)) - || (key.includes( - alertKeysClContractPrefix))) - ) { - found = true; - alertsData.push({ - parentId: parentId, - monitorableId: nodeId, - key: key, - value: value - }) - } - }); - } - if (!found) { - sourcesObject.github_repos.forEach((repoId) => { - if (!found && key.includes(repoId) && - key.includes(alertKeysGitHubPrefix)) { - found = true; - alertsData.push({ - parentId: parentId, - monitorableId: repoId, - key: key, - value: value - }) - } - }); - } - if (!found) { - sourcesObject.dockerhub_repos.forEach((repoId) => { - if (!found && key.includes(repoId) && - key.includes(alertKeysDockerHubPrefix)) { - found = true; - alertsData.push({ - parentId: parentId, - monitorableId: repoId, - key: key, - value: value - }) - } - }); - } - - if (!found && sourcesObject.include_chain_sourced_alerts) { - if (alertKeysChainSourced.includes(key) || - alertKeysChainSourcedWithUniqueIdentifier.some( - alertKey => key.includes(alertKey))) { - alertsData.push({ - parentId: parentId, - monitorableId: parentId, - key: key, - value: value - }) - } - } - } + if (!found && sourcesObject.include_chain_sourced_alerts) { + if ( + alertKeysChainSourced.includes(key) || + alertKeysChainSourcedWithUniqueIdentifier.some((alertKey) => + key.includes(alertKey) + ) + ) { + alertsData.push({ + parentId: parentId, + monitorableId: parentId, + key: key, + value: value, }); + } } + } + }); + } - redisMulti.exec((err: Error | null, _: any) => { - if (err) { - console.error(err); - // Skip resolve if an error was already encountered - // since call is already resolved. - if (!encounteredError) { - encounteredError = true; - const retrievalErr = - new CouldNotRetrieveDataFromRedis(err); - res.status(retrievalErr.code).send(errorJson( - retrievalErr.message)); - } - return + redisMulti.exec((err: Error | null, _: any) => { + if (err) { + console.error(err); + // Skip resolve if an error was already encountered + // since call is already resolved. + if (!encounteredError) { + encounteredError = true; + const retrievalErr = new CouldNotRetrieveDataFromRedis(err); + res.status(retrievalErr.code).send(errorJson(retrievalErr.message)); + } + return; + } + // Skip resolve if encountered error since call is already + // resolved. + if (!encounteredError) { + const currentTimestamp = Math.floor(Date.now() / 1000); + alertsData.forEach( + (data: { + parentId: string; + monitorableId: string; + key: string; + value: string; + }) => { + // Skip checks if encountered error since + // call is already resolved. + if (encounteredError) { + return; + } + let value: any = null; + try { + value = JSON.parse(data.value); + } catch (err) { + // This is done just for the sake of + // completion, as it is very unlikely + // to occur. + const invalidValueErr = new InvalidValueRetrievedFromRedis( + data.value + ); + res + .status(invalidValueErr.code) + .send(errorJson(invalidValueErr.message)); + encounteredError = true; + return; + } + if ( + value && + value.constructor === Object && + "message" in value && + "severity" in value && + "expiry" in value + ) { + // Add array of problems if not + // initialised yet and there is indeed + // problems. + if ( + value.severity !== Severities.INFO && + !result.result[data.parentId].problems[data.monitorableId] + ) { + result.result[data.parentId].problems[data.monitorableId] = + []; } - // Skip resolve if encountered error since call is already - // resolved. - if (!encounteredError) { - const currentTimestamp = Math.floor(Date.now() / 1000); - alertsData.forEach((data: { - parentId: string, - monitorableId: string, - key: string, - value: string - }) => { - // Skip checks if encountered error since - // call is already resolved. - if (encounteredError) { - return; - } - let value: any = null; - try { - value = JSON.parse(data.value); - } catch (err) { - // This is done just for the sake of - // completion, as it is very unlikely - // to occur. - const invalidValueErr = - new InvalidValueRetrievedFromRedis(data.value); - res.status(invalidValueErr.code).send(errorJson( - invalidValueErr.message)); - encounteredError = true; - return; - } - if (value && value.constructor === Object && - "message" in value && "severity" in - value && "expiry" in value) { - // Add array of problems if not - // initialised yet and there is indeed - // problems. - if (value.severity !== Severities.INFO && - !result.result[data.parentId].problems[ - data.monitorableId]) { - result.result[data.parentId].problems[ - data.monitorableId] = [] - } - // If the alerter has detected a new - // release add it to the list of - // releases - const newReleaseKey: string = - addPostfixToKeys(alertKeysGitHubRepoPostfix, - data.monitorableId).github_release; - if (data.key === newReleaseKey) { - result.result[data.parentId].releases[ - data.monitorableId] = value - } - // If the alerter has detected a tag - // change, add it to the list of tags - const dockerHubTagsKeys = - addPostfixToKeys(alertKeysDockerHubRepoPostfix, - data.monitorableId); - const changedTagsKeys = [ - dockerHubTagsKeys.dockerhub_new_tag, - dockerHubTagsKeys.dockerhub_updated_tag, - dockerHubTagsKeys.dockerhub_deleted_tag - ]; - if (changedTagsKeys.includes(data.key)) { - if (!(data.monitorableId in - result.result[data.parentId].tags)) { - result.result[data.parentId].tags[ - data.monitorableId] = { - new: {}, - updated: {}, - deleted: {} - } - } - switch (data.key) { - case changedTagsKeys[0]: - result.result[data.parentId].tags[ - data.monitorableId] - ['new'] = value; - break; - case changedTagsKeys[1]: - result.result[data.parentId].tags[ - data.monitorableId] - ['updated'] = value; - break; - case changedTagsKeys[2]: - result.result[data.parentId].tags[ - data.monitorableId] - ['deleted'] = value; - break; - } - } - if (value.expiry && currentTimestamp >= - value.expiry) { - result.result[data.parentId].info++; - } else { - // Increase the counter and save the - // problems. - if (value.severity === Severities.INFO) { - result.result[data.parentId].info++; - } else if (value.severity === - Severities.CRITICAL) { - result.result[data.parentId].critical++; - result.result[data.parentId].problems[ - data.monitorableId].push(value) - } else if (value.severity === - Severities.WARNING) { - result.result[data.parentId].warning++; - result.result[data.parentId].problems[ - data.monitorableId].push(value) - } else if ( - value.severity === Severities.ERROR) { - result.result[data.parentId].error++; - result.result[data.parentId].problems[ - data.monitorableId].push(value) - } - } - } else { - // This is done just for the sake of - // completion, as it is very unlikely - // to occur. - const err = - new InvalidValueRetrievedFromRedis( - value); - res.status(err.code) - .send(errorJson(err.message)); - encounteredError = true; - return; - } - }) - - // Skip resolve if encountered error since call is already - // resolved. - if (!encounteredError) { - res.status(Status.SUCCESS).send(result); - } + // If the alerter has detected a new + // release add it to the list of + // releases + const newReleaseKey: string = addPostfixToKeys( + alertKeysGitHubRepoPostfix, + data.monitorableId + ).github_release; + if (data.key === newReleaseKey) { + result.result[data.parentId].releases[data.monitorableId] = + value; + } + // If the alerter has detected a tag + // change, add it to the list of tags + const dockerHubTagsKeys = addPostfixToKeys( + alertKeysDockerHubRepoPostfix, + data.monitorableId + ); + const changedTagsKeys = [ + dockerHubTagsKeys.dockerhub_new_tag, + dockerHubTagsKeys.dockerhub_updated_tag, + dockerHubTagsKeys.dockerhub_deleted_tag, + ]; + if (changedTagsKeys.includes(data.key)) { + if ( + !(data.monitorableId in result.result[data.parentId].tags) + ) { + result.result[data.parentId].tags[data.monitorableId] = { + new: {}, + updated: {}, + deleted: {}, + }; + } + switch (data.key) { + case changedTagsKeys[0]: + result.result[data.parentId].tags[data.monitorableId][ + "new" + ] = value; + break; + case changedTagsKeys[1]: + result.result[data.parentId].tags[data.monitorableId][ + "updated" + ] = value; + break; + case changedTagsKeys[2]: + result.result[data.parentId].tags[data.monitorableId][ + "deleted" + ] = value; + break; + } + } + if (value.expiry && currentTimestamp >= value.expiry) { + result.result[data.parentId].info++; + } else { + // Increase the counter and save the + // problems. + if (value.severity === Severities.INFO) { + result.result[data.parentId].info++; + } else if (value.severity === Severities.CRITICAL) { + result.result[data.parentId].critical++; + result.result[data.parentId].problems[ + data.monitorableId + ].push(value); + } else if (value.severity === Severities.WARNING) { + result.result[data.parentId].warning++; + result.result[data.parentId].problems[ + data.monitorableId + ].push(value); + } else if (value.severity === Severities.ERROR) { + result.result[data.parentId].error++; + result.result[data.parentId].problems[ + data.monitorableId + ].push(value); + } } + } else { + // This is done just for the sake of + // completion, as it is very unlikely + // to occur. + const err = new InvalidValueRetrievedFromRedis(value); + res.status(err.code).send(errorJson(err.message)); + encounteredError = true; return; - }); - } else { - // This is done just for the sake of completion, as it is very - // unlikely to occur. - const err = new RedisClientNotInitialised(); - res.status(err.code).send(errorJson(err.message)); - return; + } + } + ); + + // Skip resolve if encountered error since call is already + // resolved. + if (!encounteredError) { + res.status(Status.SUCCESS).send(result); + } } - }); + return; + }); + } else { + // This is done just for the sake of completion, as it is very + // unlikely to occur. + const err = new RedisClientNotInitialised(); + res.status(err.code).send(errorJson(err.message)); + return; + } + } +); // This endpoint returns metrics and their values, for the requested sources // and their chains -app.post('/server/redis/metrics', - async (req: express.Request, res: express.Response) => { - console.log('Received POST request for %s', req.url); - const parentIds: RedisMetricsInput = req.body.parentIds; - - // Check if some required keys are missing in the body object, if yes - // notify the client. - const missingKeysList: string[] = missingValues({parentIds}); - if (missingKeysList.length !== 0) { - const err = new MissingKeysInBody(...missingKeysList); - res.status(err.code).send(errorJson(err.message)); - return; - } +app.post( + "/server/redis/metrics", + async (req: express.Request, res: express.Response) => { + console.log("Received POST request for %s", req.url); + const parentIds: RedisMetricsInput = req.body.parentIds; - // Check if the passed dict is valid - if (!isRedisMetricsInputValid(parentIds)) { - const err = new InvalidJsonSchema("req.body.parentIds"); - res.status(err.code).send(errorJson(err.message)); - return; - } + // Check if some required keys are missing in the body object, if yes + // notify the client. + const missingKeysList: string[] = missingValues({ parentIds }); + if (missingKeysList.length !== 0) { + const err = new MissingKeysInBody(...missingKeysList); + res.status(err.code).send(errorJson(err.message)); + return; + } - // Construct the redis keys inside a JSON object indexed by parent hash - // and the system/repo id. We also need a way to map the hash to the - // parent id. - const parentHashKeys: { - [key: string]: { [key: string]: string[] } - } = {}; - const parentHashId: { [key: string]: string } = {}; - - const redisHashes: RedisHashes = getRedisHashes(); - const redisHashesNamespace: RedisKeys = addPrefixToKeys( - redisHashes, `${uniqueAlerterIdentifier}:`); - const redisHashesPostfix: RedisKeys = addPostfixToKeys( - redisHashesNamespace, '_'); - - const metricKeysSystem: SystemKeys = getSystemKeys(); - const metricKeysSystemPostfix: RedisKeys = addPostfixToKeys( - metricKeysSystem, '_'); - - const metricKeysGitHub: GitHubKeys = getGitHubKeys(); - const metricKeysGitHubPostfix: RedisKeys = addPostfixToKeys( - metricKeysGitHub, '_'); - - for (const [parentId, sourcesObject] of Object.entries(parentIds)) { - const parentHash: string = redisHashesPostfix.parent + parentId; - parentHashKeys[parentHash] = {}; - parentHashId[parentHash] = parentId; - sourcesObject.systems.forEach((systemId) => { - const constructedKeys: RedisKeys = addPostfixToKeys( - metricKeysSystemPostfix, systemId); - parentHashKeys[parentHash][systemId] = Object.values( - constructedKeys) - }); - sourcesObject.repos.forEach((repoId) => { - const constructedKeys: RedisKeys = addPostfixToKeys( - metricKeysGitHubPostfix, repoId); - parentHashKeys[parentHash][repoId] = Object.values( - constructedKeys) - }); - } + // Check if the passed dict is valid + if (!isRedisMetricsInputValid(parentIds)) { + const err = new InvalidJsonSchema("req.body.parentIds"); + res.status(err.code).send(errorJson(err.message)); + return; + } - let result: MetricsResult = resultJson({}); - if (redisInterface.client) { - // Using multi() means that all commands are only performed once - // exec() is called, and this is done atomically. - const redisMulti = redisInterface.client.multi(); - let encounteredError = false; - for (const [parentHash, monitorableKeysObject] of Object.entries( - parentHashKeys)) { - const parentId: string = parentHashId[parentHash]; - result.result[parentId] = { - "system": {}, - "github": {}, - }; - for (const [monitorableId, keysList] of - Object.entries(monitorableKeysObject)) { - // Skip checks if encountered error since call - // is already resolved. - if (encounteredError) { - break; - } - redisMulti.hmget(parentHash, keysList, - (err: Error | null, values: any) => { - if (err) { - console.error(err); - // Skip resolve if an error was already - // encountered since call is already resolved. - if (!encounteredError) { - encounteredError = true; - const retrievalErr = - new CouldNotRetrieveDataFromRedis(err); - res.status(retrievalErr.code).send( - errorJson(retrievalErr.message)); - } - return; - } - keysList.forEach( - (key: string, i: number): void => { - // Skip checks if encountered error since - // call is already resolved. - if (encounteredError) { - return; - } - // Must be stringified JSON.parse does not - // parse `None` - let value: any = null; - try { - value = JSON.parse(JSON.stringify( - values[i])); - } catch (err) { - // This is done just for the sake of - // completion, as it is very unlikely - // to occur. - const invalidValueErr = - new InvalidValueRetrievedFromRedis( - values[i]); - res.status(invalidValueErr.code) - .send(errorJson( - invalidValueErr.message)); - encounteredError = true; - return; - } - if (parentIds[parentId].systems.includes( - monitorableId)) { - if (!(monitorableId in - result.result[parentId].system)) { - result.result[parentId] - .system[monitorableId] = - {} - } - result.result[parentId] - .system[monitorableId][key.replace( - '_' + monitorableId, - '')] = value; - } else if (parentIds[parentId].repos - .includes(monitorableId)) { - if (!(monitorableId in - result.result[parentId].github)) { - result.result[parentId] - .github[monitorableId] = - {} - } - result.result[parentId] - .github[monitorableId][key.replace( - '_' + monitorableId, - '')] = value; - } - - // In the future, we need to retrieve node - // metrics, where a node ID can be present - // in both the systems and the nodes - // fields. - }); - }) - } - } - redisMulti.exec((err: Error | null, _: any) => { - if (err) { - console.error(err); - // Skip resolve if an error was already encountered - // since call is already resolved. - if (!encounteredError) { - encounteredError = true; - const retrievalErr = - new CouldNotRetrieveDataFromRedis(err); - res.status(retrievalErr.code).send(errorJson( - retrievalErr.message)); - } - return - } - // Skip resolve if encountered error since call is already - // resolved. + // Construct the redis keys inside a JSON object indexed by parent hash + // and the system/repo id. We also need a way to map the hash to the + // parent id. + const parentHashKeys: { + [key: string]: { [key: string]: string[] }; + } = {}; + const parentHashId: { [key: string]: string } = {}; + + const redisHashes: RedisHashes = getRedisHashes(); + const redisHashesNamespace: RedisKeys = addPrefixToKeys( + redisHashes, + `${uniqueAlerterIdentifier}:` + ); + const redisHashesPostfix: RedisKeys = addPostfixToKeys( + redisHashesNamespace, + "_" + ); + + const metricKeysSystem: SystemKeys = getSystemKeys(); + const metricKeysSystemPostfix: RedisKeys = addPostfixToKeys( + metricKeysSystem, + "_" + ); + + const metricKeysGitHub: GitHubKeys = getGitHubKeys(); + const metricKeysGitHubPostfix: RedisKeys = addPostfixToKeys( + metricKeysGitHub, + "_" + ); + + for (const [parentId, sourcesObject] of Object.entries(parentIds)) { + const parentHash: string = redisHashesPostfix.parent + parentId; + parentHashKeys[parentHash] = {}; + parentHashId[parentHash] = parentId; + sourcesObject.systems.forEach((systemId) => { + const constructedKeys: RedisKeys = addPostfixToKeys( + metricKeysSystemPostfix, + systemId + ); + parentHashKeys[parentHash][systemId] = Object.values(constructedKeys); + }); + sourcesObject.repos.forEach((repoId) => { + const constructedKeys: RedisKeys = addPostfixToKeys( + metricKeysGitHubPostfix, + repoId + ); + parentHashKeys[parentHash][repoId] = Object.values(constructedKeys); + }); + } + + let result: MetricsResult = resultJson({}); + if (redisInterface.client) { + // Using multi() means that all commands are only performed once + // exec() is called, and this is done atomically. + const redisMulti = redisInterface.client.multi(); + let encounteredError = false; + for (const [parentHash, monitorableKeysObject] of Object.entries( + parentHashKeys + )) { + const parentId: string = parentHashId[parentHash]; + result.result[parentId] = { + system: {}, + github: {}, + }; + for (const [monitorableId, keysList] of Object.entries( + monitorableKeysObject + )) { + // Skip checks if encountered error since call + // is already resolved. + if (encounteredError) { + break; + } + redisMulti.hmget( + parentHash, + keysList, + (err: Error | null, values: any) => { + if (err) { + console.error(err); + // Skip resolve if an error was already + // encountered since call is already resolved. if (!encounteredError) { - res.status(Status.SUCCESS).send(result); + encounteredError = true; + const retrievalErr = new CouldNotRetrieveDataFromRedis(err); + res + .status(retrievalErr.code) + .send(errorJson(retrievalErr.message)); } return; - }); - } else { - // This is done just for the sake of completion, as it is very - // unlikely to occur. - const err = new RedisClientNotInitialised(); - res.status(err.code).send(errorJson(err.message)); - return; + } + keysList.forEach((key: string, i: number): void => { + // Skip checks if encountered error since + // call is already resolved. + if (encounteredError) { + return; + } + // Must be stringified JSON.parse does not + // parse `None` + let value: any = null; + try { + value = JSON.parse(JSON.stringify(values[i])); + } catch (err) { + // This is done just for the sake of + // completion, as it is very unlikely + // to occur. + const invalidValueErr = new InvalidValueRetrievedFromRedis( + values[i] + ); + res + .status(invalidValueErr.code) + .send(errorJson(invalidValueErr.message)); + encounteredError = true; + return; + } + if (parentIds[parentId].systems.includes(monitorableId)) { + if (!(monitorableId in result.result[parentId].system)) { + result.result[parentId].system[monitorableId] = < + SystemKeys + >{}; + } + result.result[parentId].system[monitorableId][ + key.replace("_" + monitorableId, "") + ] = value; + } else if (parentIds[parentId].repos.includes(monitorableId)) { + if (!(monitorableId in result.result[parentId].github)) { + result.result[parentId].github[monitorableId] = < + GitHubKeys + >{}; + } + result.result[parentId].github[monitorableId][ + key.replace("_" + monitorableId, "") + ] = value; + } + + // In the future, we need to retrieve node + // metrics, where a node ID can be present + // in both the systems and the nodes + // fields. + }); + } + ); } - }); + } + redisMulti.exec((err: Error | null, _: any) => { + if (err) { + console.error(err); + // Skip resolve if an error was already encountered + // since call is already resolved. + if (!encounteredError) { + encounteredError = true; + const retrievalErr = new CouldNotRetrieveDataFromRedis(err); + res.status(retrievalErr.code).send(errorJson(retrievalErr.message)); + } + return; + } + // Skip resolve if encountered error since call is already + // resolved. + if (!encounteredError) { + res.status(Status.SUCCESS).send(result); + } + return; + }); + } else { + // This is done just for the sake of completion, as it is very + // unlikely to occur. + const err = new RedisClientNotInitialised(); + res.status(err.code).send(errorJson(err.message)); + return; + } + } +); // ---------------------------------------- Ping Endpoints // ----------------- Common -app.post('/server/common/node-exporter', - async (req: express.Request, res: express.Response) => { - console.log('Received POST request for %s %s', req.url, req.body); - const nodeExporterUrl = req.body['url']; - - // Check if some required keys are missing in the body object, if yes - // notify the client. - const missingKeysList: string[] = missingValues({ - url: nodeExporterUrl - }); - if (missingKeysList.length !== 0) { - const err = new MissingKeysInBody(...missingKeysList); - res.status(err.code).send(errorJson(err.message)); - return; - } - - const url = `${nodeExporterUrl}`; +app.post( + "/server/common/node-exporter", + async (req: express.Request, res: express.Response) => { + console.log("Received POST request for %s %s", req.url, req.body); + const nodeExporterUrl = req.body["url"]; - axios.get(url, {timeout: 3000}).then((response) => { - if (verifyNodeExporterPing(response.data)) { - res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS)); - } else { - res.status(Status.ERROR).send(resultJson(PingStatus.ERROR)); - } - }).catch((err) => { - if (err.code === 'ECONNABORTED') { - res.status(Status.TIMEOUT).send(resultJson(PingStatus.TIMEOUT)); - } else { - console.error(`Axios error: ${err.message}`); - res.status(Status.ERROR).send(resultJson(PingStatus.ERROR)); - } - }); + // Check if some required keys are missing in the body object, if yes + // notify the client. + const missingKeysList: string[] = missingValues({ + url: nodeExporterUrl, }); + if (missingKeysList.length !== 0) { + const err = new MissingKeysInBody(...missingKeysList); + res.status(err.code).send(errorJson(err.message)); + return; + } -app.post('/server/common/prometheus', - async (req: express.Request, res: express.Response) => { - console.log('Received POST request for %s %s', req.url, req.body); - const url = req.body['url']; - const baseChain = req.body['baseChain']; - - // Check if some required keys are missing in the body object, if yes - // notify the client. - const missingKeysList: string[] = missingValues({url, baseChain}); - if (missingKeysList.length !== 0) { - const err = new MissingKeysInBody(...missingKeysList); - res.status(err.code).send(errorJson(err.message)); - return; - } + const url = `${nodeExporterUrl}`; - // At request level - const agent = new https.Agent({ - rejectUnauthorized: false - }); + axios + .get(url, { timeout: 3000 }) + .then((response) => { + if (verifyNodeExporterPing(response.data)) { + res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS)); + } else { + res.status(Status.ERROR).send(resultJson(PingStatus.ERROR)); + } + }) + .catch((err) => { + if (err.code === "ECONNABORTED") { + res.status(Status.TIMEOUT).send(resultJson(PingStatus.TIMEOUT)); + } else { + console.error(`Axios error: ${err.message}`); + res.status(Status.ERROR).send(resultJson(PingStatus.ERROR)); + } + }); + } +); - axios.get(url, {timeout: 3000, httpsAgent: agent}).then((response) => { - if (verifyPrometheusPing(response.data, baseChain)) { - res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS)); - } else { - res.status(Status.ERROR).send(resultJson(PingStatus.ERROR)); - } - }).catch((err) => { - if (err.code === 'ECONNABORTED') { - res.status(Status.TIMEOUT).send(resultJson(PingStatus.TIMEOUT)); - } else { - console.error(`Axios error: ${err.message}`); - res.status(Status.ERROR).send(resultJson(PingStatus.ERROR)); - } - }); - }); +app.post( + "/server/common/prometheus", + async (req: express.Request, res: express.Response) => { + console.log("Received POST request for %s %s", req.url, req.body); + const url = req.body["url"]; + const baseChain = req.body["baseChain"]; -// ----------------- Cosmos + // Check if some required keys are missing in the body object, if yes + // notify the client. + const missingKeysList: string[] = missingValues({ url, baseChain }); + if (missingKeysList.length !== 0) { + const err = new MissingKeysInBody(...missingKeysList); + res.status(err.code).send(errorJson(err.message)); + return; + } -app.post('/server/cosmos/rest', - async (req: express.Request, res: express.Response) => { - console.log('Received POST request for %s %s', req.url, req.body); - const cosmosRestUrl = req.body['url']; + // At request level + const agent = new https.Agent({ + rejectUnauthorized: false, + }); - // Check if some required keys are missing in the body object, if yes - // notify the client. - const missingKeysList: string[] = missingValues({ - url: cosmosRestUrl - }); - if (missingKeysList.length !== 0) { - const err = new MissingKeysInBody(...missingKeysList); - res.status(err.code).send(errorJson(err.message)); - return; + axios + .get(url, { timeout: 3000, httpsAgent: agent }) + .then((response) => { + if (verifyPrometheusPing(response.data, baseChain)) { + res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS)); + } else { + res.status(Status.ERROR).send(resultJson(PingStatus.ERROR)); + } + }) + .catch((err) => { + if (err.code === "ECONNABORTED") { + res.status(Status.TIMEOUT).send(resultJson(PingStatus.TIMEOUT)); + } else { + console.error(`Axios error: ${err.message}`); + res.status(Status.ERROR).send(resultJson(PingStatus.ERROR)); } + }); + } +); - const url = `${cosmosRestUrl}/node_info`; +// ----------------- Cosmos - axios.get(url, {timeout: 3000}).then((response) => { - if ('node_info' in response.data) { - res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS)); - } else { - res.status(Status.ERROR).send(resultJson(PingStatus.ERROR)); - } - }).catch((err) => { - if (err.code === 'ECONNABORTED') { - res.status(Status.TIMEOUT).send(resultJson(PingStatus.TIMEOUT)); - } else { - console.error(`Axios error: ${err.message}`); - res.status(Status.ERROR).send(resultJson(PingStatus.ERROR)); - } - }); +app.post( + "/server/cosmos/rest", + async (req: express.Request, res: express.Response) => { + console.log("Received POST request for %s %s", req.url, req.body); + const cosmosRestUrl = req.body["url"]; + + // Check if some required keys are missing in the body object, if yes + // notify the client. + const missingKeysList: string[] = missingValues({ + url: cosmosRestUrl, }); + if (missingKeysList.length !== 0) { + const err = new MissingKeysInBody(...missingKeysList); + res.status(err.code).send(errorJson(err.message)); + return; + } -app.post('/server/cosmos/tendermint-rpc', - async (req: express.Request, res: express.Response) => { - console.log('Received POST request for %s %s', req.url, req.body); - const tendermintRpcUrl = req.body['url']; + const url = `${cosmosRestUrl}/cosmos/base/tendermint/v1beta1/node_info`; - // Check if some required keys are missing in the body object, if yes - // notify the client. - const missingKeysList: string[] = missingValues({ - url: tendermintRpcUrl - }); - if (missingKeysList.length !== 0) { - const err = new MissingKeysInBody(...missingKeysList); - res.status(err.code).send(errorJson(err.message)); - return; + axios + .get(url, { timeout: 3000 }) + .then((response) => { + if ("default_node_info" in response.data) { + res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS)); + } else { + res.status(Status.ERROR).send(resultJson(PingStatus.ERROR)); + } + }) + .catch((err) => { + if (err.code === "ECONNABORTED") { + res.status(Status.TIMEOUT).send(resultJson(PingStatus.TIMEOUT)); + } else { + console.error(`Axios error: ${err.message}`); + res.status(Status.ERROR).send(resultJson(PingStatus.ERROR)); } + }); + } +); - const url = `${tendermintRpcUrl}/abci_info?`; +app.post( + "/server/cosmos/tendermint-rpc", + async (req: express.Request, res: express.Response) => { + console.log("Received POST request for %s %s", req.url, req.body); + const tendermintRpcUrl = req.body["url"]; - axios.get(url, {timeout: 3000}).then((response) => { - if ('jsonrpc' in response.data) { - res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS)); - } else { - res.status(Status.ERROR).send(resultJson(PingStatus.ERROR)); - } - }).catch((err) => { - if (err.code === 'ECONNABORTED') { - res.status(Status.TIMEOUT).send(resultJson(PingStatus.TIMEOUT)); - } else { - console.error(`Axios error: ${err.message}`); - res.status(Status.ERROR).send(resultJson(PingStatus.ERROR)); - } - }); + // Check if some required keys are missing in the body object, if yes + // notify the client. + const missingKeysList: string[] = missingValues({ + url: tendermintRpcUrl, }); + if (missingKeysList.length !== 0) { + const err = new MissingKeysInBody(...missingKeysList); + res.status(err.code).send(errorJson(err.message)); + return; + } -// ----------------- Substrate + const url = `${tendermintRpcUrl}/abci_info?`; -app.post('/server/substrate/websocket', - async (req: express.Request, res: express.Response) => { - console.log('Received POST request for %s %s', req.url, req.body); - const substrateWsUrl = req.body['url']; - // Check if some required keys are missing in the body object, if yes - // notify the client. - const missingKeysList: string[] = missingValues({ - url: substrateWsUrl - }); - if (missingKeysList.length !== 0) { - const err = new MissingKeysInBody(...missingKeysList); - res.status(err.code).send(errorJson(err.message)); - return; + axios + .get(url, { timeout: 3000 }) + .then((response) => { + if ("jsonrpc" in response.data) { + res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS)); + } else { + res.status(Status.ERROR).send(resultJson(PingStatus.ERROR)); } - - const substrateIp = process.env.SUBSTRATE_API_IP; - const substrateApi = process.env.SUBSTRATE_API_PORT; - - if (!substrateIp || !substrateApi) { - const err = new EnvVariablesNotAvailable('Substrate IP or Substrate API'); - console.error(err.message); - res.status(err.code).send(errorJson(err.message)); - return; + }) + .catch((err) => { + if (err.code === "ECONNABORTED") { + res.status(Status.TIMEOUT).send(resultJson(PingStatus.TIMEOUT)); + } else { + console.error(`Axios error: ${err.message}`); + res.status(Status.ERROR).send(resultJson(PingStatus.ERROR)); } + }); + } +); - // At request level - const agent = new https.Agent({ - rejectUnauthorized: false - }); - - const url = `https://${substrateIp}:${substrateApi}/api/rpc/system/syncState?websocket=${substrateWsUrl}`; +// ----------------- Substrate - axios.get(url, {timeout: 5000, httpsAgent: agent}).then((_) => { - res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS)); - }).catch((err) => { - if (err.code === 'ECONNABORTED') { - res.status(Status.TIMEOUT).send(resultJson(PingStatus.TIMEOUT)); - } else { - console.error(`Axios error: ${err.message}`); - res.status(Status.ERROR).send(resultJson(PingStatus.ERROR)); - } - }); +app.post( + "/server/substrate/websocket", + async (req: express.Request, res: express.Response) => { + console.log("Received POST request for %s %s", req.url, req.body); + const substrateWsUrl = req.body["url"]; + // Check if some required keys are missing in the body object, if yes + // notify the client. + const missingKeysList: string[] = missingValues({ + url: substrateWsUrl, }); + if (missingKeysList.length !== 0) { + const err = new MissingKeysInBody(...missingKeysList); + res.status(err.code).send(errorJson(err.message)); + return; + } -// ----------------- Ethereum + const substrateIp = process.env.SUBSTRATE_API_IP; + const substrateApi = process.env.SUBSTRATE_API_PORT; -app.post('/server/ethereum/rpc', - async (req: express.Request, res: express.Response) => { - console.log('Received POST request for %s %s', req.url, req.body); - const url = req.body['url']; - - // Check if some required keys are missing in the body object, if yes - // notify the client. - const missingKeysList: string[] = missingValues({url}); - if (missingKeysList.length !== 0) { - const err = new MissingKeysInBody(...missingKeysList); - res.status(err.code).send(errorJson(err.message)); - return; - } - - try { - const web3 = new Web3(new Web3.providers.HttpProvider(url)); - const data = await fulfillWithTimeLimit(web3.eth.getBlockNumber(), 3000, -1); + if (!substrateIp || !substrateApi) { + const err = new EnvVariablesNotAvailable("Substrate IP or Substrate API"); + console.error(err.message); + res.status(err.code).send(errorJson(err.message)); + return; + } - if (data === -1) { - res.status(Status.TIMEOUT).send(resultJson(PingStatus.TIMEOUT)); - } else { - res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS)); - } - } catch (err) { - console.error(`Web3 error: ${err.message}`); - res.status(Status.ERROR).send(resultJson(PingStatus.ERROR)); - } + // At request level + const agent = new https.Agent({ + rejectUnauthorized: false, }); -// ---------------------------------------- Send Test Alert Endpoints + const url = `https://${substrateIp}:${substrateApi}/api/rpc/system/syncState?websocket=${substrateWsUrl}`; -// ----------------- Channels - -app.post('/server/channels/opsgenie', - async (req: express.Request, res: express.Response) => { - console.log('Received POST request for %s %s', req.url, req.body); - const apiKey = req.body['apiKey']; - const eu = req.body['eu']; - - // Check if some required keys are missing in the body object, if yes - // notify the client. - const missingKeysList: string[] = missingValues({apiKey, eu}); - if (missingKeysList.length !== 0) { - const err = new MissingKeysInBody(...missingKeysList); - res.status(err.code).send(errorJson(err.message)); - return; + axios + .get(url, { timeout: 5000, httpsAgent: agent }) + .then((_) => { + res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS)); + }) + .catch((err) => { + if (err.code === "ECONNABORTED") { + res.status(Status.TIMEOUT).send(resultJson(PingStatus.TIMEOUT)); + } else { + console.error(`Axios error: ${err.message}`); + res.status(Status.ERROR).send(resultJson(PingStatus.ERROR)); } + }); + } +); - // If the eu=true set the host to the opsgenie EU url otherwise the sdk will - // run into an authentication error. - const host = toBool(String(eu)) ? 'https://api.eu.opsgenie.com' : 'https://api.opsgenie.com'; +// ----------------- Ethereum - // Create OpsGenie client and test alert message - opsgenie.configure({api_key: apiKey, host}); +app.post( + "/server/ethereum/rpc", + async (req: express.Request, res: express.Response) => { + console.log("Received POST request for %s %s", req.url, req.body); + const url = req.body["url"]; - // Test alert object - const alertObject = { - message: testAlertMessage, - description: testAlertMessage, - priority: 'P5', - }; + // Check if some required keys are missing in the body object, if yes + // notify the client. + const missingKeysList: string[] = missingValues({ url }); + if (missingKeysList.length !== 0) { + const err = new MissingKeysInBody(...missingKeysList); + res.status(err.code).send(errorJson(err.message)); + return; + } - // Send test alert - opsgenie.alertV2.create(alertObject, (err, _) => { - if (err) { - console.error(err); - res.status(Status.ERROR).send(resultJson(PingStatus.ERROR)); - } else { - res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS)); - } - }); - }); + try { + const web3 = new Web3(new Web3.providers.HttpProvider(url)); + const data = await fulfillWithTimeLimit( + web3.eth.getBlockNumber(), + 3000, + -1 + ); + + if (data === -1) { + res.status(Status.TIMEOUT).send(resultJson(PingStatus.TIMEOUT)); + } else { + res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS)); + } + } catch (err) { + console.error(`Web3 error: ${err.message}`); + res.status(Status.ERROR).send(resultJson(PingStatus.ERROR)); + } + } +); -app.post('/server/channels/slack', - async (req: express.Request, res: express.Response) => { - console.log('Received POST request for %s %s', req.url, req.body); - const botToken = req.body['botToken']; - const botChannelId = req.body['botChannelId']; - - // Check if some required keys are missing in the body object, if yes - // notify the client. - const missingKeysList: string[] = missingValues({botToken, botChannelId}); - if (missingKeysList.length !== 0) { - const err = new MissingKeysInBody(...missingKeysList); - res.status(err.code).send(errorJson(err.message)); - return; - } +// ---------------------------------------- Send Test Alert Endpoints - // Create Slack web client and test alert message - const client = new WebClient(botToken); +// ----------------- Channels - // Test alert object - const alertObject = { - text: testAlertMessage, - channel: botChannelId - }; +app.post( + "/server/channels/opsgenie", + async (req: express.Request, res: express.Response) => { + console.log("Received POST request for %s %s", req.url, req.body); + const apiKey = req.body["apiKey"]; + const eu = req.body["eu"]; - // Send test alert - try { - await client.chat.postMessage(alertObject); - } catch (err) { - console.error(err); - res.status(Status.ERROR).send(resultJson(PingStatus.ERROR)); - return; - } + // Check if some required keys are missing in the body object, if yes + // notify the client. + const missingKeysList: string[] = missingValues({ apiKey, eu }); + if (missingKeysList.length !== 0) { + const err = new MissingKeysInBody(...missingKeysList); + res.status(err.code).send(errorJson(err.message)); + return; + } + // If the eu=true set the host to the opsgenie EU url otherwise the sdk will + // run into an authentication error. + const host = toBool(String(eu)) + ? "https://api.eu.opsgenie.com" + : "https://api.opsgenie.com"; + + // Create OpsGenie client and test alert message + opsgenie.configure({ api_key: apiKey, host }); + + // Test alert object + const alertObject = { + message: testAlertMessage, + description: testAlertMessage, + priority: "P5", + }; + + // Send test alert + opsgenie.alertV2.create(alertObject, (err, _) => { + if (err) { + console.error(err); + res.status(Status.ERROR).send(resultJson(PingStatus.ERROR)); + } else { res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS)); + } }); + } +); -app.post('/server/channels/telegram', - async (req: express.Request, res: express.Response) => { - console.log('Received POST request for %s %s', req.url, req.body); - const botToken = req.body['botToken']; - const botChatId = req.body['botChatId']; - - // Check if some required keys are missing in the body object, if yes - // notify the client. - const missingKeysList: string[] = missingValues({botToken, botChatId}); - if (missingKeysList.length !== 0) { - const err = new MissingKeysInBody(...missingKeysList); - res.status(err.code).send(errorJson(err.message)); - return; - } +app.post( + "/server/channels/slack", + async (req: express.Request, res: express.Response) => { + console.log("Received POST request for %s %s", req.url, req.body); + const botToken = req.body["botToken"]; + const botChannelId = req.body["botChannelId"]; - const url = `https://api.telegram.org/bot${botToken}/sendMessage`; - const params = { - chat_id: botChatId, - text: testAlertMessage, - parse_mode: 'Markdown' - } + // Check if some required keys are missing in the body object, if yes + // notify the client. + const missingKeysList: string[] = missingValues({ botToken, botChannelId }); + if (missingKeysList.length !== 0) { + const err = new MissingKeysInBody(...missingKeysList); + res.status(err.code).send(errorJson(err.message)); + return; + } - axios.get(url, {timeout: 3000, params}).then(() => { - res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS)); - }).catch((err) => { - console.error(err); - if (err.code === 'ECONNABORTED') { - res.status(Status.TIMEOUT).send(resultJson(PingStatus.TIMEOUT)); - } else { - res.status(Status.ERROR).send(resultJson(PingStatus.ERROR)); - } - }); - }); + // Create Slack web client and test alert message + const client = new WebClient(botToken); + + // Test alert object + const alertObject = { + text: testAlertMessage, + channel: botChannelId, + }; + + // Send test alert + try { + await client.chat.postMessage(alertObject); + } catch (err) { + console.error(err); + res.status(Status.ERROR).send(resultJson(PingStatus.ERROR)); + return; + } -app.post('/server/channels/pagerduty', - async (req, res) => { - console.log('Received POST request for %s', req.url); - const integrationKey = req.body['integrationKey']; + res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS)); + } +); + +app.post( + "/server/channels/telegram", + async (req: express.Request, res: express.Response) => { + console.log("Received POST request for %s %s", req.url, req.body); + const botToken = req.body["botToken"]; + const botChatId = req.body["botChatId"]; // Check if some required keys are missing in the body object, if yes // notify the client. - const missingKeysList: string[] = missingValues({integrationKey}); + const missingKeysList: string[] = missingValues({ botToken, botChatId }); if (missingKeysList.length !== 0) { - const err = new MissingKeysInBody(...missingKeysList); - res.status(err.code).send(errorJson(err.message)); - return; + const err = new MissingKeysInBody(...missingKeysList); + res.status(err.code).send(errorJson(err.message)); + return; } - // Send test alert event - event({ - data: { - routing_key: integrationKey, - event_action: 'trigger', - payload: { - summary: testAlertMessage, - source: 'PANIC', - severity: 'info', - }, - } - }).then(() => { + const url = `https://api.telegram.org/bot${botToken}/sendMessage`; + const params = { + chat_id: botChatId, + text: testAlertMessage, + parse_mode: "Markdown", + }; + + axios + .get(url, { timeout: 3000, params }) + .then(() => { res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS)); - }).catch((err) => { + }) + .catch((err) => { console.error(err); - res.status(Status.ERROR).send(resultJson(PingStatus.ERROR)); + if (err.code === "ECONNABORTED") { + res.status(Status.TIMEOUT).send(resultJson(PingStatus.TIMEOUT)); + } else { + res.status(Status.ERROR).send(resultJson(PingStatus.ERROR)); + } + }); + } +); + +app.post("/server/channels/pagerduty", async (req, res) => { + console.log("Received POST request for %s", req.url); + const integrationKey = req.body["integrationKey"]; + + // Check if some required keys are missing in the body object, if yes + // notify the client. + const missingKeysList: string[] = missingValues({ integrationKey }); + if (missingKeysList.length !== 0) { + const err = new MissingKeysInBody(...missingKeysList); + res.status(err.code).send(errorJson(err.message)); + return; + } + + // Send test alert event + event({ + data: { + routing_key: integrationKey, + event_action: "trigger", + payload: { + summary: testAlertMessage, + source: "PANIC", + severity: "info", + }, + }, + }) + .then(() => { + res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS)); + }) + .catch((err) => { + console.error(err); + res.status(Status.ERROR).send(resultJson(PingStatus.ERROR)); }); }); -app.post('/server/channels/twilio', - async (req: express.Request, res: express.Response) => { - console.log('Received POST request for %s %s', req.url, req.body); - const accountSid = req.body['accountSid']; - const authToken = req.body['authToken']; - const twilioPhoneNumber = req.body['twilioPhoneNumber']; - const phoneNumberToDial = req.body['phoneNumberToDial']; - - // Check if some required keys are missing in the body object, if yes - // notify the client. - const missingKeysList: string[] = missingValues({ - accountSid, authToken, twilioPhoneNumber, phoneNumberToDial - }); - if (missingKeysList.length !== 0) { - const err = new MissingKeysInBody(...missingKeysList); - res.status(err.code).send(errorJson(err.message)); - return; - } +app.post( + "/server/channels/twilio", + async (req: express.Request, res: express.Response) => { + console.log("Received POST request for %s %s", req.url, req.body); + const accountSid = req.body["accountSid"]; + const authToken = req.body["authToken"]; + const twilioPhoneNumber = req.body["twilioPhoneNumber"]; + const phoneNumberToDial = req.body["phoneNumberToDial"]; - let client; + // Check if some required keys are missing in the body object, if yes + // notify the client. + const missingKeysList: string[] = missingValues({ + accountSid, + authToken, + twilioPhoneNumber, + phoneNumberToDial, + }); + if (missingKeysList.length !== 0) { + const err = new MissingKeysInBody(...missingKeysList); + res.status(err.code).send(errorJson(err.message)); + return; + } - try { - client = twilio(accountSid, authToken); - } catch (err) { - console.error(err); - res.status(Status.ERROR).send(resultJson(PingStatus.ERROR)); - return; - } + let client; - client.calls.create({ - twiml: '', - to: phoneNumberToDial, - from: twilioPhoneNumber - }).then(() => { - res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS)); - }).catch((err) => { - console.error(err); - res.status(Status.ERROR).send(resultJson(PingStatus.ERROR)); - }); - }); + try { + client = twilio(accountSid, authToken); + } catch (err) { + console.error(err); + res.status(Status.ERROR).send(resultJson(PingStatus.ERROR)); + return; + } -app.post('/server/channels/email', - async (req: express.Request, res: express.Response) => { - console.log('Received POST request for %s %s', req.url, req.body); - const smtp = req.body['smtp']; - const port = req.body['port']; - const from = req.body['from']; - const to = req.body['to']; - const username = req.body['username']; - const password = req.body['password']; - - // Check if some required keys are missing in the body object, if yes - // notify the client. - const missingKeysList: string[] = missingValues({ - smtp, port, from, to, username, password - }); - if (missingKeysList.length !== 0) { - const err = new MissingKeysInBody(...missingKeysList); - res.status(err.code).send(errorJson(err.message)); - return; - } + client.calls + .create({ + twiml: "", + to: phoneNumberToDial, + from: twilioPhoneNumber, + }) + .then(() => { + res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS)); + }) + .catch((err) => { + console.error(err); + res.status(Status.ERROR).send(resultJson(PingStatus.ERROR)); + }); + } +); - // Create mail transport (essentially an email client) - const transport = nodemailer.createTransport({ - host: smtp, - port, - auth: username && password ? - { - user: username, - pass: password - } : undefined, - }); +app.post( + "/server/channels/email", + async (req: express.Request, res: express.Response) => { + console.log("Received POST request for %s %s", req.url, req.body); + const smtp = req.body["smtp"]; + const port = req.body["port"]; + const from = req.body["from"]; + const to = req.body["to"]; + const username = req.body["username"]; + const password = req.body["password"]; - // If transporter valid, create and send test email - transport.verify((verifyTransportErr, _) => { - if (verifyTransportErr) { - console.error(verifyTransportErr); - res.status(Status.ERROR).send(resultJson(PingStatus.ERROR)); - return; + // Check if some required keys are missing in the body object, if yes + // notify the client. + const missingKeysList: string[] = missingValues({ + smtp, + port, + from, + to, + username, + password, + }); + if (missingKeysList.length !== 0) { + const err = new MissingKeysInBody(...missingKeysList); + res.status(err.code).send(errorJson(err.message)); + return; + } + + // Create mail transport (essentially an email client) + const transport = nodemailer.createTransport({ + host: smtp, + port, + auth: + username && password + ? { + user: username, + pass: password, } + : undefined, + }); - const message = { - from, - to, - subject: testAlertMessage, - text: testAlertMessage, - }; - - // Send test email - transport.sendMail(message, (sendMailErr, _) => { - if (sendMailErr) { - console.error(sendMailErr); - res.status(Status.ERROR).send(resultJson(PingStatus.ERROR)); - return; - } - res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS)); - }); - }); + // If transporter valid, create and send test email + transport.verify((verifyTransportErr, _) => { + if (verifyTransportErr) { + console.error(verifyTransportErr); + res.status(Status.ERROR).send(resultJson(PingStatus.ERROR)); + return; + } + + const message = { + from, + to, + subject: testAlertMessage, + text: testAlertMessage, + }; + + // Send test email + transport.sendMail(message, (sendMailErr, _) => { + if (sendMailErr) { + console.error(sendMailErr); + res.status(Status.ERROR).send(resultJson(PingStatus.ERROR)); + return; + } + res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS)); + }); }); + } +); // ----------------- Repositories -app.post('/server/repositories/github', +app.post( + "/server/repositories/github", async (req: express.Request, res: express.Response) => { - console.log('Received POST request for %s %s', req.url, req.body); - const repoName = req.body['name']; + console.log("Received POST request for %s %s", req.url, req.body); + const repoName = req.body["name"]; - // Check if some required keys are missing in the body object, if yes - // notify the client. - const missingKeysList: string[] = missingValues({ - repoName - }); - if (missingKeysList.length !== 0) { - const err = new MissingKeysInBody(...missingKeysList); - res.status(err.code).send(errorJson(err.message)); - return; - } + // Check if some required keys are missing in the body object, if yes + // notify the client. + const missingKeysList: string[] = missingValues({ + repoName, + }); + if (missingKeysList.length !== 0) { + const err = new MissingKeysInBody(...missingKeysList); + res.status(err.code).send(errorJson(err.message)); + return; + } - const url = `https://api.github.com/repos/${repoName}`; + const url = `https://api.github.com/repos/${repoName}`; - axios.get(url, {timeout: 3000}).then((response) => { - if(response.status === 200){ - res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS)); - } else { - res.status(Status.ERROR).send(resultJson(PingStatus.ERROR)); - } - }).catch((err) => { - if (err.code === 'ECONNABORTED') { - res.status(Status.TIMEOUT).send(resultJson(PingStatus.TIMEOUT)); - } else { - console.error(`Axios error: ${err.message}`); - res.status(Status.ERROR).send(resultJson(PingStatus.ERROR)); - } + axios + .get(url, { timeout: 3000 }) + .then((response) => { + if (response.status === 200) { + res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS)); + } else { + res.status(Status.ERROR).send(resultJson(PingStatus.ERROR)); + } + }) + .catch((err) => { + if (err.code === "ECONNABORTED") { + res.status(Status.TIMEOUT).send(resultJson(PingStatus.TIMEOUT)); + } else { + console.error(`Axios error: ${err.message}`); + res.status(Status.ERROR).send(resultJson(PingStatus.ERROR)); + } }); - }); + } +); -app.post('/server/repositories/dockerhub', +app.post( + "/server/repositories/dockerhub", async (req: express.Request, res: express.Response) => { - console.log('Received POST request for %s %s', req.url, req.body); - const repoName = req.body['name']; + console.log("Received POST request for %s %s", req.url, req.body); + const repoName = req.body["name"]; - // Check if some required keys are missing in the body object, if yes - // notify the client. - const missingKeysList: string[] = missingValues({ - repoName - }); - if (missingKeysList.length !== 0) { - const err = new MissingKeysInBody(...missingKeysList); - res.status(err.code).send(errorJson(err.message)); - return; - } + // Check if some required keys are missing in the body object, if yes + // notify the client. + const missingKeysList: string[] = missingValues({ + repoName, + }); + if (missingKeysList.length !== 0) { + const err = new MissingKeysInBody(...missingKeysList); + res.status(err.code).send(errorJson(err.message)); + return; + } - const url = `https://registry.hub.docker.com/v2/repositories/${repoName}`; + const url = `https://registry.hub.docker.com/v2/repositories/${repoName}`; - axios.get(url, {timeout: 3000}).then((response) => { - if('name' in response.data && 'user' in response.data){ - res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS)); - } else { - res.status(Status.ERROR).send(resultJson(PingStatus.ERROR)); - } - }).catch((err) => { - if (err.code === 'ECONNABORTED') { - res.status(Status.TIMEOUT).send(resultJson(PingStatus.TIMEOUT)); - } else { - console.error(`Axios error: ${err.message}`); - res.status(Status.ERROR).send(resultJson(PingStatus.ERROR)); - } + axios + .get(url, { timeout: 3000 }) + .then((response) => { + if ("name" in response.data && "user" in response.data) { + res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS)); + } else { + res.status(Status.ERROR).send(resultJson(PingStatus.ERROR)); + } + }) + .catch((err) => { + if (err.code === "ECONNABORTED") { + res.status(Status.TIMEOUT).send(resultJson(PingStatus.TIMEOUT)); + } else { + console.error(`Axios error: ${err.message}`); + res.status(Status.ERROR).send(resultJson(PingStatus.ERROR)); + } }); - }); - - - + } +); // ---------------------------------------- Server defaults -app.get('/server/*', async (req: express.Request, - res: express.Response) => { - console.log('Received GET request for %s', req.url); - const err: InvalidEndpoint = new InvalidEndpoint(req.url); - res.status(err.code).send(errorJson(err.message)); +app.get("/server/*", async (req: express.Request, res: express.Response) => { + console.log("Received GET request for %s", req.url); + const err: InvalidEndpoint = new InvalidEndpoint(req.url); + res.status(err.code).send(errorJson(err.message)); }); -app.post('/server/*', async (req: express.Request, - res: express.Response) => { - console.log('Received POST request for %s', req.url); - const err = new InvalidEndpoint(req.url); - res.status(err.code).send(errorJson(err.message)); +app.post("/server/*", async (req: express.Request, res: express.Response) => { + console.log("Received POST request for %s", req.url); + const err = new InvalidEndpoint(req.url); + res.status(err.code).send(errorJson(err.message)); }); // ---------------------------------------- Server redirects -app.get('/*', async (req: express.Request, res: express.Response) => { - res.redirect('/api-docs') +app.get("/*", async (req: express.Request, res: express.Response) => { + res.redirect("/api-docs"); }); -app.post('/*', async (req: express.Request, - res: express.Response) => { - res.redirect('/api-docs') +app.post("/*", async (req: express.Request, res: express.Response) => { + res.redirect("/api-docs"); }); // ---------------------------------------- Start listen @@ -1713,15 +1832,15 @@ const port = parseInt(process.env.API_PORT || "9001"); // Create Https server const server = https.createServer(httpsOptions, app); -server.once('error', function (err: any) { - if (err.code === 'EADDRINUSE') { - console.error('port is currently in use'); - } +server.once("error", function (err: any) { + if (err.code === "EADDRINUSE") { + console.error("port is currently in use"); + } }); // Listen for requests -server.listen(port, () => console.log('Listening on %s', port)); +server.listen(port, () => console.log("Listening on %s", port)); //TODO: Need to add authentication, even to the respective middleware functions -export {app, server, redisInterval, mongoInterval}; +export { app, server, redisInterval, mongoInterval }; diff --git a/docker-compose-tests.yml b/docker-compose-tests.yml index 790ad698..02b42e8e 100644 --- a/docker-compose-tests.yml +++ b/docker-compose-tests.yml @@ -169,7 +169,7 @@ networks: ipam: driver: default config: - - subnet: 172.19.0.0/24 + - subnet: 172.18.0.0/24 volumes: db-data: diff --git a/docker-compose.yml b/docker-compose.yml index 7c6d7da1..667bffba 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -235,7 +235,7 @@ services: - 'DB_IP=${DB_IP_REPLICA_1}' - 'DB_PORT=${DB_PORT}' - 'DB_NAME=${DB_NAME}' - - 'WAIT_HOSTS=${DB_IP_REPLICA_1}:${DB_PORT}, ${REDIS_IP}:${REDIS_PORT}, ${SUBSTRATE_API_IP}:${SUBSTRATE_API_PORT}' + - 'WAIT_HOSTS=${DB_IP_REPLICA_1}:${DB_PORT}, ${REDIS_IP}:${REDIS_PORT}' - 'UI_DASHBOARD_PORT=${UI_DASHBOARD_PORT}' - 'DEV_MODE=${DEV_MODE}' - 'UI_ACCESS_IP=${UI_ACCESS_IP}' @@ -257,7 +257,6 @@ services: depends_on: - redis - rs1 - - substrate-api ui: build: @@ -278,20 +277,6 @@ services: depends_on: - api - substrate-api: - environment: - - 'SUBSTRATE_API_PORT=${SUBSTRATE_API_PORT}' - build: - context: './' - dockerfile: './substrate-api/Dockerfile' - image: 'simplyvc/panic_substrate_api:1.3.0' - volumes: - - './certificates:/opt/panic/certificates' - restart: always - networks: - panic_net: - ipv4_address: '${SUBSTRATE_API_IP}' - migration: container_name: migration build: @@ -320,7 +305,7 @@ networks: ipam: driver: default config: - - subnet: 172.19.0.0/24 + - subnet: 172.18.0.0/24 volumes: db-data: diff --git a/web-installer/src/server.js b/web-installer/src/server.js index 60abcd3f..5e921805 100644 --- a/web-installer/src/server.js +++ b/web-installer/src/server.js @@ -1054,12 +1054,13 @@ app.post('/server/cosmos/rest', verify, async (req, res) => { return; } - const url = `${restUrl}/node_info`; + const url = `${restUrl}/cosmos/base/tendermint/v1beta1/node_info`; axios .get(url, { params: {} }) .then((response) => { - if ('node_info' in response.data) { + console.log(response.data); + if ("default_node_info" in response.data) { const msg = new msgs.MessagePong(); res.status(utils.SUCCESS_STATUS).send(utils.resultJson(msg.message)); } else { From 763e10844449fb031ce1a8c066f282468742a445 Mon Sep 17 00:00:00 2001 From: Roman Perera Date: Thu, 27 Feb 2025 18:58:23 +0100 Subject: [PATCH 3/9] fixed tendermint RPC json parsing error --- alerter/src/monitors/network/cosmos.py | 4 ++++ alerter/src/monitors/node/cosmos.py | 27 ++++++++++++++++---------- 2 files changed, 21 insertions(+), 10 deletions(-) diff --git a/alerter/src/monitors/network/cosmos.py b/alerter/src/monitors/network/cosmos.py index 302a50b5..e3661832 100644 --- a/alerter/src/monitors/network/cosmos.py +++ b/alerter/src/monitors/network/cosmos.py @@ -79,11 +79,15 @@ def _parse_proposal(proposal: Dict) -> Dict: proposal['content']['value']['title'] if 'value' in proposal['content'] else proposal['content']['title'] + if 'content' in proposal + else proposal['title'] ), 'description': ( proposal['content']['value']['description'] if 'value' in proposal['content'] else proposal['content']['description'] + if 'content' in proposal + else proposal['summary'] ) } diff --git a/alerter/src/monitors/node/cosmos.py b/alerter/src/monitors/node/cosmos.py index 8a2703d8..8782bbae 100644 --- a/alerter/src/monitors/node/cosmos.py +++ b/alerter/src/monitors/node/cosmos.py @@ -292,7 +292,7 @@ def retrieval_process() -> Dict: # The 'jailed' keyword is normally exposed in # cosmos/staking/v1beta1/validators for v0.50.1 of the Cosmos - # SDK only. If we encounter nodes on this version which do not + # SDK. If we encounter nodes on this version which do not # expose it we might need to use # /cosmos/slashing/v1beta1/signing_infos 'jailed': staking_validators['validator']['jailed'], @@ -612,14 +612,18 @@ def _validator_was_slashed(self, begin_block_events: List[Dict]) -> ( attributes = event['attributes'] for attribute in attributes: if 'key' in attribute and 'value' in attribute: - decoded_key = base64.b64decode(attribute['key']).decode( - 'utf-8') - decoded_value = base64.b64decode( - attribute['value']).decode('utf-8') - if str.lower(decoded_key) == "address": - event_address = bech32_to_address(decoded_value) - elif str.lower(decoded_key) == "burned_coins": - event_burned_coins = int(decoded_value) + # decoded_key = base64.b64decode(attribute['key']).decode( + # 'utf-8') + # decoded_value = base64.b64decode( + # attribute['value']).decode('utf-8') + # if str.lower(decoded_key) == "address": + # event_address = bech32_to_address(decoded_value) + # elif str.lower(decoded_key) == "burned_coins": + # event_burned_coins = int(decoded_value) + if str.lower(attribute['key']) == "address": + event_address = bech32_to_address(attribute['value']) + elif str.lower(attribute['key']) == "burned_coins": + event_burned_coins = int(attribute['value']) if event_address == self.validator_consensus_address: slashed = True @@ -683,7 +687,10 @@ def retrieval_process() -> Dict: [source_url, {'height': height_to_monitor}], source_name) slashed, slashed_amount = self._validator_was_slashed( - block_results_at_height['result']['begin_block_events']) + block_results_at_height['result']['begin_block_events'] + if 'begin_block_events' in block_results_at_height['result'] + else block_results_at_height['result']['finalize_block_events'] + ) if validator_was_active: previous_block_signatures = block_at_height['result'][ From 904aa3e015dd56f9934f09f86777a86e183f500a Mon Sep 17 00:00:00 2001 From: Roman Perera Date: Mon, 3 Mar 2025 22:11:47 +0100 Subject: [PATCH 4/9] Fixed Cosmos REST error + added tests for v0.50.1 --- alerter/src/api_wrappers/cosmos.py | 3 +- alerter/src/monitors/network/cosmos.py | 122 ++++----- alerter/src/monitors/node/cosmos.py | 25 +- alerter/test/monitors/node/test_cosmos.py | 314 +++++++++++++++++++--- alerter/test/monitors/test_cosmos.py | 38 +-- 5 files changed, 378 insertions(+), 124 deletions(-) diff --git a/alerter/src/api_wrappers/cosmos.py b/alerter/src/api_wrappers/cosmos.py index c3cf63c2..f62900c0 100644 --- a/alerter/src/api_wrappers/cosmos.py +++ b/alerter/src/api_wrappers/cosmos.py @@ -30,7 +30,8 @@ def __init__(self, logger: logging.Logger, verify: bool = False, def get_syncing(self, cosmos_rest_url: str) -> Dict: """ This function retrieves data from the cosmos_rest_url/syncing endpoint, - and is compatible with both v0.39.2 and v0.42.6 of the Cosmos SDK + and is *NOT* compatible with both v0.39.2 and v0.42.6, + but *ONLY* compatible with v0.50.1 of the Cosmos SDK :param cosmos_rest_url: The Cosmos REST url of the data source :return: Retrieves data from the cosmos_rest_url/syncing endpoint """ diff --git a/alerter/src/monitors/network/cosmos.py b/alerter/src/monitors/network/cosmos.py index e3661832..e143d75d 100644 --- a/alerter/src/monitors/network/cosmos.py +++ b/alerter/src/monitors/network/cosmos.py @@ -59,69 +59,69 @@ def chain_name(self) -> str: @staticmethod def _parse_proposal(proposal: Dict) -> Dict: - """ - This function parses the proposal retrieved from the source node and - returns the corresponding value to be used by the PANIC components. - Note that this function is compatible with both v0.39.2 and v0.42.6 of - the Cosmos SDK. - :param proposal: The proposal retrieved from the source node - :return: The corresponding proposal to be used by the PANIC components - :raises: KeyError if the structure of the proposal returned by the - endpoints is not as expected. - """ - parsed_proposal = { - 'proposal_id': ( - proposal['proposal_id'] - if 'proposal_id' in proposal - else proposal['id'] - ), - 'title': ( - proposal['content']['value']['title'] - if 'value' in proposal['content'] - else proposal['content']['title'] - if 'content' in proposal - else proposal['title'] - ), - 'description': ( - proposal['content']['value']['description'] - if 'value' in proposal['content'] - else proposal['content']['description'] - if 'content' in proposal - else proposal['summary'] + """ + This function parses the proposal retrieved from the source node and + returns the corresponding value to be used by the PANIC components. + Note that this function is compatible with both v0.39.2, v0.42.6 and v0.50.1 of + the Cosmos SDK. + :param proposal: The proposal retrieved from the source node + :return: The corresponding proposal to be used by the PANIC components + :raises: KeyError if the structure of the proposal returned by the + endpoints is not as expected. + """ + parsed_proposal = { + 'proposal_id': ( + proposal['proposal_id'] + if 'proposal_id' in proposal + else proposal['id'] + ), + 'title': ( + proposal['content']['value']['title'] + if 'content' in proposal and 'value' in proposal['content'] + else proposal['content']['title'] + if 'content' in proposal + else proposal['title'] + ), + 'description': ( + proposal['content']['value']['description'] + if 'content' in proposal and 'value' in proposal['content'] + else proposal['content']['description'] + if 'content' in proposal + else proposal['summary'] + ) + } + + status = ( + proposal['status'] + if 'status' in proposal + else proposal['proposal_status'] ) - } - status = ( - proposal['status'] - if 'status' in proposal - else proposal['proposal_status'] - ) - - if type(status) == str: - status = status.lower() - if status in [0, "proposal_status_unspecified", "unspecified"]: - parsed_proposal['status'] = PROPOSAL_STATUS_UNSPECIFIED - elif status in [1, "proposal_status_deposit_period", "deposit_period"]: - parsed_proposal['status'] = PROPOSAL_STATUS_DEPOSIT_PERIOD - elif status in [2, "proposal_status_voting_period", "voting_period"]: - parsed_proposal['status'] = PROPOSAL_STATUS_VOTING_PERIOD - elif status in [3, "proposal_status_passed", "passed"]: - parsed_proposal['status'] = PROPOSAL_STATUS_PASSED - elif status in [4, "proposal_status_rejected", "rejected"]: - parsed_proposal['status'] = PROPOSAL_STATUS_REJECTED - elif status in [5, "proposal_status_failed", "failed"]: - parsed_proposal['status'] = PROPOSAL_STATUS_FAILED - else: - parsed_proposal['status'] = PROPOSAL_STATUS_INVALID - - parsed_proposal['final_tally_result'] = proposal['final_tally_result'] - parsed_proposal['submit_time'] = proposal['submit_time'] - parsed_proposal['deposit_end_time'] = proposal['deposit_end_time'] - parsed_proposal['total_deposit'] = proposal['total_deposit'] - parsed_proposal['voting_start_time'] = proposal['voting_start_time'] - parsed_proposal['voting_end_time'] = proposal['voting_end_time'] - - return parsed_proposal + if type(status) == str: + status = status.lower() + if status in [0, "proposal_status_unspecified", "unspecified"]: + parsed_proposal['status'] = PROPOSAL_STATUS_UNSPECIFIED + elif status in [1, "proposal_status_deposit_period", "deposit_period"]: + parsed_proposal['status'] = PROPOSAL_STATUS_DEPOSIT_PERIOD + elif status in [2, "proposal_status_voting_period", "voting_period"]: + parsed_proposal['status'] = PROPOSAL_STATUS_VOTING_PERIOD + elif status in [3, "proposal_status_passed", "passed"]: + parsed_proposal['status'] = PROPOSAL_STATUS_PASSED + elif status in [4, "proposal_status_rejected", "rejected"]: + parsed_proposal['status'] = PROPOSAL_STATUS_REJECTED + elif status in [5, "proposal_status_failed", "failed"]: + parsed_proposal['status'] = PROPOSAL_STATUS_FAILED + else: + parsed_proposal['status'] = PROPOSAL_STATUS_INVALID + + parsed_proposal['final_tally_result'] = proposal['final_tally_result'] + parsed_proposal['submit_time'] = proposal['submit_time'] + parsed_proposal['deposit_end_time'] = proposal['deposit_end_time'] + parsed_proposal['total_deposit'] = proposal['total_deposit'] + parsed_proposal['voting_start_time'] = proposal['voting_start_time'] + parsed_proposal['voting_end_time'] = proposal['voting_end_time'] + + return parsed_proposal def _get_cosmos_rest_v0_39_2_indirect_data( self, source: CosmosNodeConfig) -> Dict: diff --git a/alerter/src/monitors/node/cosmos.py b/alerter/src/monitors/node/cosmos.py index 8782bbae..2bf6fa11 100644 --- a/alerter/src/monitors/node/cosmos.py +++ b/alerter/src/monitors/node/cosmos.py @@ -599,6 +599,12 @@ def _validator_was_slashed(self, begin_block_events: List[Dict]) -> ( # cannot be active, hence not slashed return False, None + def try_base64_decode(s): + try: + return base64.b64decode(s).decode('utf-8') + except (base64.binascii.Error, UnicodeDecodeError): + return s + slashed = False slashed_amount = None for event in begin_block_events: @@ -612,18 +618,13 @@ def _validator_was_slashed(self, begin_block_events: List[Dict]) -> ( attributes = event['attributes'] for attribute in attributes: if 'key' in attribute and 'value' in attribute: - # decoded_key = base64.b64decode(attribute['key']).decode( - # 'utf-8') - # decoded_value = base64.b64decode( - # attribute['value']).decode('utf-8') - # if str.lower(decoded_key) == "address": - # event_address = bech32_to_address(decoded_value) - # elif str.lower(decoded_key) == "burned_coins": - # event_burned_coins = int(decoded_value) - if str.lower(attribute['key']) == "address": - event_address = bech32_to_address(attribute['value']) - elif str.lower(attribute['key']) == "burned_coins": - event_burned_coins = int(attribute['value']) + decoded_key = try_base64_decode(attribute['key']) + decoded_value = try_base64_decode(attribute['value']) + + if str.lower(decoded_key) == "address": + event_address = bech32_to_address(decoded_value) + elif str.lower(decoded_key) == "burned_coins": + event_burned_coins = int(decoded_value) if event_address == self.validator_consensus_address: slashed = True diff --git a/alerter/test/monitors/node/test_cosmos.py b/alerter/test/monitors/node/test_cosmos.py index 3afdd97a..eb3d22be 100644 --- a/alerter/test/monitors/node/test_cosmos.py +++ b/alerter/test/monitors/node/test_cosmos.py @@ -60,6 +60,7 @@ def setUp(self) -> None: self.test_exception_3 = PANICException('test_exception_3', 3) self.sdk_version_0_39_2 = 'v0.39.2' self.sdk_version_0_42_6 = 'v0.42.6' + self.sdk_version_0_50_1 = 'v0.50.1' self.test_consensus_address = 'test_consensus_address' self.test_is_syncing = False self.test_is_peered_with_sentinel = True @@ -401,6 +402,21 @@ def test_get_cosmos_rest_v0_42_6_indirect_data_validator_return( self.test_monitor._get_cosmos_rest_v0_42_6_indirect_data_validator( self.data_sources[0]) self.assertEqual(expected_return, actual_return) + + @mock.patch.object(CosmosRestServerApiWrapper, + 'get_staking_validators_v0_50_1') + def test_get_cosmos_rest_v0_50_1_indirect_data_validator_return( + self, staking_validators_return, expected_return, + mock_staking_validators) -> None: + """ + We will check that the return is as expected for all cases + """ + mock_staking_validators.return_value = staking_validators_return + actual_return = \ + self.test_monitor._get_cosmos_rest_v0_50_1_indirect_data_validator( + self.data_sources[0]) + self.assertEqual(expected_return, actual_return) + def test_get_cosmos_rest_indirect_data_return_if_empty_source_url( self) -> None: @@ -419,6 +435,11 @@ def test_get_cosmos_rest_indirect_data_return_if_empty_source_url( actual_ret = self.test_monitor._get_cosmos_rest_indirect_data( self.data_sources[0], self.sdk_version_0_42_6) self.assertEqual(expected_ret, actual_ret) + + # Test for v0.50.1 + actual_ret = self.test_monitor._get_cosmos_rest_indirect_data( + self.data_sources[0], self.sdk_version_0_50_1) + self.assertEqual(expected_ret, actual_ret) def test_get_cosmos_rest_indirect_data_return_if_non_validator_node( self) -> None: @@ -437,17 +458,26 @@ def test_get_cosmos_rest_indirect_data_return_if_non_validator_node( actual_ret = self.test_monitor._get_cosmos_rest_indirect_data( self.data_sources[0], self.sdk_version_0_42_6) self.assertEqual(expected_ret, actual_ret) + + # Test for v0.50.1 + actual_ret = self.test_monitor._get_cosmos_rest_indirect_data( + self.data_sources[0], self.sdk_version_0_50_1) + self.assertEqual(expected_ret, actual_ret) @mock.patch.object(CosmosNodeMonitor, '_get_cosmos_rest_v0_39_2_indirect_data_validator') @mock.patch.object(CosmosNodeMonitor, '_get_cosmos_rest_v0_42_6_indirect_data_validator') + @mock.patch.object(CosmosNodeMonitor, + '_get_cosmos_rest_v0_50_1_indirect_data_validator') def test_get_cosmos_rest_indirect_data_return_if_validator_node( - self, mock_get_42_indirect, mock_get_39_indirect) -> None: + self, mock_get_42_indirect, mock_get_39_indirect, mock_get_50_indirect) -> None: mock_get_42_indirect.return_value = \ self.retrieved_cosmos_rest_indirect_data_1 mock_get_39_indirect.return_value = \ self.retrieved_cosmos_rest_indirect_data_2 + mock_get_50_indirect.return_value = \ + self.retrieved_cosmos_rest_indirect_data_2 expected_invalid_version = { 'bond_status': None, 'jailed': None @@ -486,12 +516,17 @@ def test_get_cosmos_rest_version_data_return_if_no_indirect_source_selected( self.sdk_version_0_42_6) actual_ret_v0_39_2 = self.test_monitor._get_cosmos_rest_version_data( self.sdk_version_0_39_2) + actual_ret_v0_50_1 = self.test_monitor._get_cosmos_rest_version_data( + self.sdk_version_0_50_1) self.assertEqual(({}, True, NoSyncedDataSourceWasAccessibleException( self.monitor_name, 'indirect Cosmos REST node')), actual_ret_v0_42_6) self.assertEqual(({}, True, NoSyncedDataSourceWasAccessibleException( self.monitor_name, 'indirect Cosmos REST node')), actual_ret_v0_39_2) + self.assertEqual(({}, True, NoSyncedDataSourceWasAccessibleException( + self.monitor_name, 'indirect Cosmos REST node')), + actual_ret_v0_50_1) @parameterized.expand([ (NodeIsDownException('node_name_1'),), @@ -517,8 +552,11 @@ def test_get_cosmos_rest_version_data_ret_if_node_not_reachable_at_rest_url( self.sdk_version_0_42_6) actual_ret_v0_39_2 = self.test_monitor._get_cosmos_rest_version_data( self.sdk_version_0_39_2) + actual_ret_v0_50_1 = self.test_monitor._get_cosmos_rest_version_data( + self.sdk_version_0_50_1) self.assertEqual(({}, True, err), actual_ret_v0_42_6) self.assertEqual(({}, True, err), actual_ret_v0_39_2) + self.assertEqual(({}, True, err), actual_ret_v0_50_1) @mock.patch.object(CosmosNodeMonitor, '_get_cosmos_rest_indirect_data') @mock.patch.object(CosmosNodeMonitor, '_cosmos_rest_reachable') @@ -544,12 +582,17 @@ def test_get_cosmos_rest_version_data_return_if_data_obtained_successfully( self.sdk_version_0_42_6) actual_ret_v0_39_2 = self.test_monitor._get_cosmos_rest_version_data( self.sdk_version_0_39_2) + actual_ret_v0_50_1 = self.test_monitor._get_cosmos_rest_version_data( + self.sdk_version_0_50_1) self.assertEqual( (self.retrieved_cosmos_rest_indirect_data_1, False, None), actual_ret_v0_42_6) self.assertEqual( (self.retrieved_cosmos_rest_indirect_data_2, False, None), actual_ret_v0_39_2) + self.assertEqual( + (self.retrieved_cosmos_rest_indirect_data_2, False, None), + actual_ret_v0_50_1) @parameterized.expand([ (CannotConnectWithDataSourceException('test_monitor', 'node_name_1', @@ -579,8 +622,12 @@ def test_get_cosmos_rest_version_data_ret_if_expected_err_in_data_retrieval( self.sdk_version_0_42_6) actual_ret_v0_39_2 = self.test_monitor._get_cosmos_rest_version_data( self.sdk_version_0_39_2) + actual_ret_v0_50_1 = self.test_monitor._get_cosmos_rest_version_data( + self.sdk_version_0_50_1) self.assertEqual(({}, True, err), actual_ret_v0_42_6) self.assertEqual(({}, True, err), actual_ret_v0_39_2) + self.assertEqual(({}, True, err), actual_ret_v0_50_1) + @mock.patch.object(CosmosNodeMonitor, '_get_cosmos_rest_version_data') def test_get_cosmos_rest_v0_39_2_data_calls_get_cosmos_rest_version_data( @@ -664,6 +711,47 @@ def test_get_cosmos_rest_v0_42_6_data_returns_get_cosmos_rest_ver_data_ret( actual_ret = self.test_monitor._get_cosmos_rest_v0_42_6_data() self.assertEqual(get_rest_version_data_ret, actual_ret) + @mock.patch.object(CosmosNodeMonitor, '_get_cosmos_rest_version_data') + def test_get_cosmos_rest_v0_50_1_data_calls_get_cosmos_rest_version_data( + self, mock_get_rest_version) -> None: + """ + In this test we will be checking that self._get_cosmos_rest_v0_50_1_data + calls self._get_cosmos_rest_version_data correctly. + """ + mock_get_rest_version.return_value = None + self.test_monitor._get_cosmos_rest_v0_50_1_data() + mock_get_rest_version.assert_called_once_with(self.sdk_version_0_50_1) + + @parameterized.expand([ + (({}, True, + CannotConnectWithDataSourceException('test_monitor', 'node_name_1', + 'err')),), + (({}, True, + DataReadingException('test_monitor', 'cosmos_rest_url_1')),), + (({}, True, InvalidUrlException('cosmos_rest_url_1')),), + (({}, True, + CosmosSDKVersionIncompatibleException('node_name_1', 'v0.42.6')),), + (({}, True, CosmosRestServerApiCallException('test_call', 'err_msg')),), + (({}, True, IncorrectJSONRetrievedException('REST', 'err')),), + (({}, True, NoSyncedDataSourceWasAccessibleException( + 'test_monitor_name', 'indirect Cosmos REST node')),), + (({}, True, NodeIsDownException('node_name_1')),), + (({'indirect_key': 34}, False, None),), + ]) + @mock.patch.object(CosmosNodeMonitor, '_get_cosmos_rest_version_data') + def test_get_cosmos_rest_v0_50_1_data_returns_get_cosmos_rest_ver_data_ret( + self, get_rest_version_data_ret, mock_get_rest_version) -> None: + """ + In this test we will be checking that self._get_cosmos_rest_v0_50_1_data + returns the value returned by self._get_cosmos_rest_version_data. We + will test for every possible return that + self._get_cosmos_rest_version_data might return using + parameterized.expand + """ + mock_get_rest_version.return_value = get_rest_version_data_ret + actual_ret = self.test_monitor._get_cosmos_rest_v0_50_1_data() + self.assertEqual(get_rest_version_data_ret, actual_ret) + @parameterized.expand([ (({}, True, CannotConnectWithDataSourceException('test_monitor', 'node_name_1', @@ -677,11 +765,12 @@ def test_get_cosmos_rest_v0_42_6_data_returns_get_cosmos_rest_ver_data_ret( (({}, True, NodeIsDownException('node_name_1')),), (({'indirect_key': 34}, False, None),), ]) + @mock.patch.object(CosmosNodeMonitor, '_get_cosmos_rest_v0_50_1_data') @mock.patch.object(CosmosNodeMonitor, '_get_cosmos_rest_v0_42_6_data') @mock.patch.object(CosmosNodeMonitor, '_get_cosmos_rest_v0_39_2_data') def test_get_cosmos_rest_data_uses_last_retrieval_fn_used_first( self, retrieval_ret, mock_get_cosmos_rest_v0_39_2, - mock_get_cosmos_rest_v0_42_6) -> None: + mock_get_cosmos_rest_v0_42_6, mock_get_cosmos_rest_v0_50_1) -> None: """ In this test we will check that first the self._get_cosmos_rest_data function first attempts to retrieve data using the last used retrieval @@ -693,31 +782,45 @@ def test_get_cosmos_rest_data_uses_last_retrieval_fn_used_first( """ mock_get_cosmos_rest_v0_39_2.return_value = retrieval_ret mock_get_cosmos_rest_v0_42_6.return_value = retrieval_ret - + mock_get_cosmos_rest_v0_50_1.return_value = retrieval_ret # Test for v0.39.2 self.test_monitor._last_rest_retrieval_version = self.sdk_version_0_39_2 self.test_monitor._get_cosmos_rest_data() mock_get_cosmos_rest_v0_39_2.assert_called_once() mock_get_cosmos_rest_v0_42_6.assert_not_called() + mock_get_cosmos_rest_v0_50_1.assert_not_called() mock_get_cosmos_rest_v0_39_2.reset_mock() mock_get_cosmos_rest_v0_42_6.reset_mock() + mock_get_cosmos_rest_v0_50_1.reset_mock() # Test for v0.42.6 self.test_monitor._last_rest_retrieval_version = self.sdk_version_0_42_6 self.test_monitor._get_cosmos_rest_data() mock_get_cosmos_rest_v0_39_2.assert_not_called() mock_get_cosmos_rest_v0_42_6.assert_called_once() + mock_get_cosmos_rest_v0_50_1.assert_not_called() + mock_get_cosmos_rest_v0_39_2.reset_mock() + mock_get_cosmos_rest_v0_42_6.reset_mock() + mock_get_cosmos_rest_v0_50_1.reset_mock() + + # Test for v0.50.1 + self.test_monitor._last_rest_retrieval_version = self.sdk_version_0_50_1 + self.test_monitor._get_cosmos_rest_data() + mock_get_cosmos_rest_v0_39_2.assert_not_called() + mock_get_cosmos_rest_v0_42_6.assert_not_called() + mock_get_cosmos_rest_v0_50_1.assert_called_once() @parameterized.expand([ (({}, True, IncorrectJSONRetrievedException('REST', 'err')),), (({}, True, CosmosSDKVersionIncompatibleException('node_name_1', 'version')),), ]) + @mock.patch.object(CosmosNodeMonitor, '_get_cosmos_rest_v0_50_1_data') @mock.patch.object(CosmosNodeMonitor, '_get_cosmos_rest_v0_42_6_data') @mock.patch.object(CosmosNodeMonitor, '_get_cosmos_rest_v0_39_2_data') def test_get_cosmos_rest_data_attempts_other_rets_if_last_incompatible( self, retrieval_ret, mock_get_cosmos_rest_v0_39_2, - mock_get_cosmos_rest_v0_42_6) -> None: + mock_get_cosmos_rest_v0_42_6, mock_get_cosmos_rest_v0_50_1) -> None: """ In this test we will check that other retrievals are performed if the last retrieval performed raises an incompatibility error @@ -726,21 +829,46 @@ def test_get_cosmos_rest_data_attempts_other_rets_if_last_incompatible( mock_get_cosmos_rest_v0_39_2.return_value = retrieval_ret mock_get_cosmos_rest_v0_42_6.return_value = \ (self.retrieved_cosmos_rest_data_1, False, None) + mock_get_cosmos_rest_v0_50_1.return_value = \ + (self.retrieved_cosmos_rest_data_1, False, None) self.test_monitor._last_rest_retrieval_version = self.sdk_version_0_39_2 self.test_monitor._get_cosmos_rest_data() mock_get_cosmos_rest_v0_39_2.assert_called_once() mock_get_cosmos_rest_v0_42_6.assert_called_once() + mock_get_cosmos_rest_v0_50_1.assert_called_once() mock_get_cosmos_rest_v0_39_2.reset_mock() mock_get_cosmos_rest_v0_42_6.reset_mock() + mock_get_cosmos_rest_v0_50_1.reset_mock() # Test for v0.42.6 mock_get_cosmos_rest_v0_39_2.return_value = \ (self.retrieved_cosmos_rest_data_1, False, None) mock_get_cosmos_rest_v0_42_6.return_value = retrieval_ret + mock_get_cosmos_rest_v0_50_1.return_value = \ + (self.retrieved_cosmos_rest_data_1, False, None) self.test_monitor._last_rest_retrieval_version = self.sdk_version_0_42_6 self.test_monitor._get_cosmos_rest_data() mock_get_cosmos_rest_v0_39_2.assert_called_once() mock_get_cosmos_rest_v0_42_6.assert_called_once() + mock_get_cosmos_rest_v0_50_1.assert_called_once() + mock_get_cosmos_rest_v0_39_2.reset_mock() + mock_get_cosmos_rest_v0_42_6.reset_mock() + mock_get_cosmos_rest_v0_50_1.reset_mock() + + # Test for v0.42.6 + mock_get_cosmos_rest_v0_39_2.return_value = \ + (self.retrieved_cosmos_rest_data_1, False, None) + mock_get_cosmos_rest_v0_42_6.return_value = \ + (self.retrieved_cosmos_rest_data_1, False, None) + mock_get_cosmos_rest_v0_50_1.return_value = retrieval_ret + self.test_monitor._last_rest_retrieval_version = self.sdk_version_0_50_1 + self.test_monitor._get_cosmos_rest_data() + mock_get_cosmos_rest_v0_39_2.assert_called_once() + mock_get_cosmos_rest_v0_42_6.assert_called_once() + mock_get_cosmos_rest_v0_50_1.assert_called_once() + mock_get_cosmos_rest_v0_39_2.reset_mock() + mock_get_cosmos_rest_v0_42_6.reset_mock() + mock_get_cosmos_rest_v0_50_1.reset_mock() @parameterized.expand([ (({}, True, @@ -755,11 +883,12 @@ def test_get_cosmos_rest_data_attempts_other_rets_if_last_incompatible( (({}, True, NodeIsDownException('node_name_1')),), (({'indirect_key': 34}, False, None),), ]) + @mock.patch.object(CosmosNodeMonitor, '_get_cosmos_rest_v0_50_1_data') @mock.patch.object(CosmosNodeMonitor, '_get_cosmos_rest_v0_42_6_data') @mock.patch.object(CosmosNodeMonitor, '_get_cosmos_rest_v0_39_2_data') def test_get_cosmos_rest_data_ret_retrieval_ret_if_no_incompatibility_err( self, retrieval_ret, mock_get_cosmos_rest_v0_39_2, - mock_get_cosmos_rest_v0_42_6) -> None: + mock_get_cosmos_rest_v0_42_6, mock_get_cosmos_rest_v0_50_1) -> None: """ In this test we will check that if data retrieval occurs without an incompatibility error being returned, then the function returns whatever @@ -769,20 +898,20 @@ def test_get_cosmos_rest_data_ret_retrieval_ret_if_no_incompatibility_err( and then for when the last retrieval used function returns an incompatibility error and another supported version is successful. """ - self.test_monitor._last_rest_retrieval_version = self.sdk_version_0_42_6 + self.test_monitor._last_rest_retrieval_version = self.sdk_version_0_50_1 # Test for when the last used retrieval function does not return an # incompatibility error - mock_get_cosmos_rest_v0_42_6.return_value = retrieval_ret + mock_get_cosmos_rest_v0_50_1.return_value = retrieval_ret actual_ret = self.test_monitor._get_cosmos_rest_data() self.assertEqual(retrieval_ret, actual_ret) - mock_get_cosmos_rest_v0_42_6.reset_mock() + mock_get_cosmos_rest_v0_50_1.reset_mock() # Test for when the last used retrieval function returns an # incompatibility error but other retrieval functions do not - mock_get_cosmos_rest_v0_42_6.return_value = \ + mock_get_cosmos_rest_v0_50_1.return_value = \ ({}, True, - CosmosSDKVersionIncompatibleException('node_name_1', 'v0.42.6')) + CosmosSDKVersionIncompatibleException('node_name_1', 'v0.50.1')) mock_get_cosmos_rest_v0_39_2.return_value = retrieval_ret actual_ret = self.test_monitor._get_cosmos_rest_data() self.assertEqual(retrieval_ret, actual_ret) @@ -792,11 +921,12 @@ def test_get_cosmos_rest_data_ret_retrieval_ret_if_no_incompatibility_err( (({}, True, CosmosSDKVersionIncompatibleException('node_name_1', 'version')),), ]) + @mock.patch.object(CosmosNodeMonitor, '_get_cosmos_rest_v0_50_1_data') @mock.patch.object(CosmosNodeMonitor, '_get_cosmos_rest_v0_42_6_data') @mock.patch.object(CosmosNodeMonitor, '_get_cosmos_rest_v0_39_2_data') def test_get_cosmos_rest_data_ret_if_incompatibility_issue_and_unsuccessful( self, retrieval_ret, mock_get_cosmos_rest_v0_39_2, - mock_get_cosmos_rest_v0_42_6) -> None: + mock_get_cosmos_rest_v0_42_6, mock_get_cosmos_rest_v0_50_1) -> None: """ In this test we will check that if incompatibility issues persist for every supported version, then the function returns ({}, True, @@ -806,6 +936,7 @@ def test_get_cosmos_rest_data_ret_if_incompatibility_issue_and_unsuccessful( """ mock_get_cosmos_rest_v0_39_2.return_value = retrieval_ret mock_get_cosmos_rest_v0_42_6.return_value = retrieval_ret + mock_get_cosmos_rest_v0_50_1.return_value = retrieval_ret actual_ret = self.test_monitor._get_cosmos_rest_data() expected_ret = ({}, True, CosmosRestServerDataCouldNotBeObtained( self.data_sources[2].node_name)) @@ -910,7 +1041,44 @@ def test_determine_last_height_monitored_tendermint( } ] } - }], [ + }, { + "jsonrpc": "2.0", + "id": -1, + "result": { + "block_height": "9313442", + "validators": [ + { + "address": "addr_5", + "pub_key": { + "type": "tendermint/PubKeyEd25519", + "value": "jzi1FcwjpprsVIqbXujev/Cfzwg7oFrybXmm/7jNeiI=" + }, + "voting_power": "121500000001", + "proposer_priority": "79425248532" + }, + { + "address": "addr_6", + "pub_key": { + "type": "tendermint/PubKeyEd25519", + "value": "CqXadXTNawX+OiayRorBsMebtQx35TttI3IspuzaN/Q=" + }, + "voting_power": "121500000001", + "proposer_priority": "-204933381423" + }, + { + "address": "addr_7", + "pub_key": { + "type": "tendermint/PubKeyEd25519", + "value": "nT9mxX1Ap7O6BdEnvs6ZUG7xuKD7kY0NWtu9GUZIBCk=" + }, + "voting_power": "121500000001", + "proposer_priority": "11980115931" + }, + ], + "count": "5", + "total": "5" + } +}], [ { "address": "addr_1", "voting_power": "43", @@ -926,7 +1094,34 @@ def test_determine_last_height_monitored_tendermint( { "address": "addr_4", "voting_power": "46", - } + }, + { + "address": "addr_5", + "pub_key": { + "type": "tendermint/PubKeyEd25519", + "value": "jzi1FcwjpprsVIqbXujev/Cfzwg7oFrybXmm/7jNeiI=" + }, + "voting_power": "121500000001", + "proposer_priority": "79425248532" + }, + { + "address": "addr_6", + "pub_key": { + "type": "tendermint/PubKeyEd25519", + "value": "CqXadXTNawX+OiayRorBsMebtQx35TttI3IspuzaN/Q=" + }, + "voting_power": "121500000001", + "proposer_priority": "-204933381423" + }, + { + "address": "addr_7", + "pub_key": { + "type": "tendermint/PubKeyEd25519", + "value": "nT9mxX1Ap7O6BdEnvs6ZUG7xuKD7kY0NWtu9GUZIBCk=" + }, + "voting_power": "121500000001", + "proposer_priority": "11980115931" + }, ],), ([{ "jsonrpc": "2.0", @@ -973,8 +1168,17 @@ def test_parse_validators_list_parses_correctly( { "address": "addr_4", "voting_power": "46", - } - ], 'addr_4', True,), + }, + { + "address": "addr_5", + "pub_key": { + "type": "tendermint/PubKeyEd25519", + "value": "jzi1FcwjpprsVIqbXujev/Cfzwg7oFrybXmm/7jNeiI=" + }, + "voting_power": "121500000001", + "proposer_priority": "79425248532" + }, + ], 'addr_5', True,), ([ { "address": "addr_1", @@ -991,8 +1195,17 @@ def test_parse_validators_list_parses_correctly( { "address": "addr_4", "voting_power": "46", - } - ], 'addr_5', False,), + }, + { + "address": "addr_5", + "pub_key": { + "type": "tendermint/PubKeyEd25519", + "value": "jzi1FcwjpprsVIqbXujev/Cfzwg7oFrybXmm/7jNeiI=" + }, + "voting_power": "121500000001", + "proposer_priority": "79425248532" + }, + ], 'addr_6', False,), ([ { "address": "addr_1", @@ -1027,7 +1240,16 @@ def test_parse_validators_list_parses_correctly( { "address": "addr_4", "voting_power": "46", - } + }, + { + "address": "addr_5", + "pub_key": { + "type": "tendermint/PubKeyEd25519", + "value": "jzi1FcwjpprsVIqbXujev/Cfzwg7oFrybXmm/7jNeiI=" + }, + "voting_power": "121500000001", + "proposer_priority": "79425248532" + }, ], "", False,), ([], "addr_1", False,), ]) @@ -1042,15 +1264,13 @@ def test_is_validator_active_returns_correctly( self.assertEqual(expected_return, actual_return) @parameterized.expand([ - ([ - {"type": "transfer"}, + ([ {"type": "transfer"}, { "type": "slash", "attributes": [ { "key": "YWRkcmVzcw==", - "value": 'Y29zbW9zdmFsY29uczEwdjdzcmE2NW1sdXl3bmtzdWR2' - 'Z3p0NzV4bHNmOHp3dXpzcThyMw==', + "value": "Y29zbW9zdmFsY29uczEwdjdzcmE2NW1sdXl3bmtzdWR2Z3p0NzV4bHNmOHp3dXpzcThyMw==", "index": True }, { @@ -1074,17 +1294,27 @@ def test_is_validator_active_returns_correctly( "index": True } ] - } + }, ], '7B3D01F754DFF8474ED0E358812FD437E09389DC', (True, None),), ([ - {"type": "transfer"}, + { + "type": "slash", + "attributes": [ + { + "key": "address", + "value": "cosmosvalcons10v7sra65mluywnksudvgzt75xlsf8zwuzsq8r3", + "index": True + } + ] + } + ], '7B3D01F754DFF8474ED0E358812FD437E09389DC', (True, None),), + ([ {"type": "transfer"}, { "type": "slash", "attributes": [ { "key": "YWRkcmVzcw==", - "value": 'Y29zbW9zdmFsY29uczEwdjdzcmE2NW1sdXl3b' - 'mtzdWR2Z3p0NzV4bHNmOHp3dXpzcThyMw==', + "value": "Y29zbW9zdmFsY29uczEwdjdzcmE2NW1sdXl3bmtzdWR2Z3p0NzV4bHNmOHp3dXpzcThyMw==", "index": True }, { @@ -1109,8 +1339,7 @@ def test_is_validator_active_returns_correctly( "attributes": [ { "key": "YWRkcmVzcw==", - "value": 'Y29zbW9zdmFsY29uczEwdjdzcmE2NW1sdXl3bmt' - 'zdWR2Z3p0NzV4bHNmOHp3dXpzcThyMw==', + "value": "Y29zbW9zdmFsY29uczEwdjdzcmE2NW1sdXl3bmtzdWR2Z3p0NzV4bHNmOHp3dXpzcThyMw==", "index": True }, { @@ -1129,7 +1358,25 @@ def test_is_validator_active_returns_correctly( "index": True } ] - }, + } + ], '7B3D01F754DFF8474ED0E358812FD437E09389DC', (True, 2000),), + ([ + {"type": "transfer"}, + { + "type": "slash", + "attributes": [ + { + "key": "address", + "value": "cosmosvalcons10v7sra65mluywnksudvgzt75xlsf8zwuzsq8r3", + "index": True + }, + { + "key": "burned_coins", + "value": "2000", + "index": True + } + ] + } ], '7B3D01F754DFF8474ED0E358812FD437E09389DC', (True, 2000),), ([ {"type": "transfer"}, @@ -1168,7 +1415,7 @@ def test_is_validator_active_returns_correctly( "index": True } ] - }, + } ], 'addr_1', (False, None),), ([ {"type": "transfer"}, @@ -1226,6 +1473,7 @@ def test_validator_was_slashed_returns_correctly( Given a number of scenarios we will check that the _validator_was_slashed function will correctly determine if a validator was slashed or not, and will give the amount if available. + Couln't determine if the values where base64 encoded or not, so implemented both cases """ self.test_monitor._validator_consensus_address = consensus_address actual_return = self.test_monitor._validator_was_slashed( @@ -1343,7 +1591,7 @@ def test_get_tendermint_rpc_archive_data_validator_return( mock_get_block_results.side_effect = [ { 'result': { - 'begin_block_events': [ + 'finalize_block_events': [ { "type": "slash", "attributes": [ @@ -1371,7 +1619,7 @@ def test_get_tendermint_rpc_archive_data_validator_return( }, { 'result': { - 'begin_block_events': [ + 'finalize_block_events': [ { "type": "slash", "attributes": [ @@ -1399,7 +1647,7 @@ def test_get_tendermint_rpc_archive_data_validator_return( }, { 'result': { - 'begin_block_events': [ + 'finalize_block_events': [ { "type": "slash", "attributes": [ diff --git a/alerter/test/monitors/test_cosmos.py b/alerter/test/monitors/test_cosmos.py index 176f613a..0329e0b2 100644 --- a/alerter/test/monitors/test_cosmos.py +++ b/alerter/test/monitors/test_cosmos.py @@ -90,6 +90,7 @@ def setUp(self) -> None: connection_check_time_interval=self.connection_check_time_interval) self.sdk_version_0_39_2 = 'v0.39.2' self.sdk_version_0_42_6 = 'v0.42.6' + self.sdk_version_0_50_1 = 'v0.50.1' # Some dummy retrieval data self.rest_ret_1 = { @@ -176,8 +177,11 @@ def test_tendermint_rpc_api_returns_tendermint_rpc_api(self) -> None: def test_last_rest_retrieval_version_returns_last_rest_retrieval_version( self) -> None: - # First we will check that last_rest_retrieval_version is set to v0.42.6 + # First we will check that last_rest_retrieval_version is set to v0.50.1 # on __init__ + self.assertEqual(self.sdk_version_0_50_1, + self.test_monitor.last_rest_retrieval_version) + self.test_monitor._last_rest_retrieval_version = self.sdk_version_0_42_6 self.assertEqual(self.sdk_version_0_42_6, self.test_monitor.last_rest_retrieval_version) self.test_monitor._last_rest_retrieval_version = self.sdk_version_0_39_2 @@ -293,7 +297,7 @@ def test_select_cosmos_rest_node_selects_first_reachable_synced_node( """ mock_execute_with_checks.return_value = {"syncing": False} actual = self.test_monitor._select_cosmos_rest_node( - self.data_sources, self.sdk_version_0_39_2) + self.data_sources, self.sdk_version_0_50_1) self.assertEqual(self.data_sources[0], actual) @mock.patch.object(CosmosRestServerApiWrapper, 'execute_with_checks') @@ -310,7 +314,7 @@ def test_select_cosmos_rest_node_does_not_select_syncing_nodes( {"syncing": False} ] actual = self.test_monitor._select_cosmos_rest_node( - self.data_sources, self.sdk_version_0_39_2) + self.data_sources, self.sdk_version_0_50_1) self.assertEqual(self.data_sources[2], actual) @parameterized.expand([ @@ -322,7 +326,7 @@ def test_select_cosmos_rest_node_does_not_select_syncing_nodes( (IncompleteRead('test'),), (ChunkedEncodingError('test'),), (ProtocolError('test'),), - (CosmosSDKVersionIncompatibleException('test_node', 'v0.39.2'),), + (CosmosSDKVersionIncompatibleException('test_node', 'v0.50.1'),), (CosmosRestServerApiCallException('test_call', 'err_msg'),), (KeyError('test'),), ]) @@ -343,7 +347,7 @@ def test_select_cosmos_rest_node_does_not_select_nodes_raising_expected_err( {"syncing": False} ] actual = self.test_monitor._select_cosmos_rest_node( - self.data_sources, self.sdk_version_0_39_2) + self.data_sources, self.sdk_version_0_50_1) self.assertEqual(self.data_sources[2], actual) @parameterized.expand([ @@ -355,7 +359,7 @@ def test_select_cosmos_rest_node_does_not_select_nodes_raising_expected_err( (IncompleteRead('test'),), (ChunkedEncodingError('test'),), (ProtocolError('test'),), - (CosmosSDKVersionIncompatibleException('test_node', 'v0.39.2'),), + (CosmosSDKVersionIncompatibleException('test_node', 'v0.50.1'),), (CosmosRestServerApiCallException('test_call', 'err_msg'),), (KeyError('test'),), ]) @@ -374,7 +378,7 @@ def test_select_cosmos_rest_node_returns_None_if_no_node_selected( {"syncing": True} ] actual = self.test_monitor._select_cosmos_rest_node( - self.data_sources, self.sdk_version_0_39_2) + self.data_sources, self.sdk_version_0_50_1) self.assertIsNone(actual) @mock.patch.object(TendermintRpcApiWrapper, 'execute_with_checks') @@ -478,7 +482,7 @@ def test_cosmos_rest_reachable_returns_true_None_if_data_ret_successful( mock_execute_with_checks.side_effect = {"syncing": False} actual_reachable, actual_data_retrieval_exception = \ self.test_monitor._cosmos_rest_reachable(self.data_sources[2], - self.sdk_version_0_39_2) + self.sdk_version_0_50_1) self.assertTrue(actual_reachable) self.assertIsNone(actual_data_retrieval_exception) @@ -491,7 +495,7 @@ def test_cosmos_rest_reachable_returns_true_None_if_data_ret_successful( (IncompleteRead('test'), DataReadingException,), (ChunkedEncodingError('test'), DataReadingException,), (ProtocolError('test'), DataReadingException,), - (CosmosSDKVersionIncompatibleException('test_node', 'v0.39.2'), + (CosmosSDKVersionIncompatibleException('test_node', 'v0.50.1'), CosmosSDKVersionIncompatibleException,), (CosmosRestServerApiCallException('test_call', 'err_msg'), CosmosRestServerApiCallException,), @@ -509,7 +513,7 @@ def test_cosmos_rest_reachable_returns_false_err_if_expected_err_raised( mock_execute_with_checks.side_effect = raised_exception actual_reachable, actual_data_retrieval_exception = \ self.test_monitor._cosmos_rest_reachable(self.data_sources[2], - self.sdk_version_0_39_2) + self.sdk_version_0_50_1) self.assertFalse(actual_reachable) self.assertIsInstance(actual_data_retrieval_exception, returned_exception_type) @@ -523,7 +527,7 @@ def test_fn(): source_url = self.data_sources[0].cosmos_rest_url actual_ret = \ self.test_monitor._execute_cosmos_rest_retrieval_with_exceptions( - test_fn, source_name, source_url, self.sdk_version_0_39_2 + test_fn, source_name, source_url, self.sdk_version_0_50_1 ) self.assertEqual(self.test_data_dict, actual_ret) @@ -536,7 +540,7 @@ def test_fn(): (IncompleteRead('test'), DataReadingException,), (ChunkedEncodingError('test'), DataReadingException,), (ProtocolError('test'), DataReadingException,), - (CosmosSDKVersionIncompatibleException('test_node', 'v0.39.2'), + (CosmosSDKVersionIncompatibleException('test_node', 'v0.50.1'), CosmosSDKVersionIncompatibleException,), (CosmosRestServerApiCallException('test_call', 'err_msg'), CosmosRestServerApiCallException,), @@ -552,7 +556,7 @@ def test_fn(): self.assertRaises( expected_raised_exception, self.test_monitor._execute_cosmos_rest_retrieval_with_exceptions, - test_fn, source_name, source_url, self.sdk_version_0_39_2 + test_fn, source_name, source_url, self.sdk_version_0_50_1 ) @parameterized.expand([ @@ -646,7 +650,7 @@ def test_fn(): mock_execute.side_effect = [self.rest_ret_1, self.rest_ret_2] node_name = self.data_sources[0].node_name actual_ret = self.test_monitor._get_rest_data_with_pagination_keys( - test_fn, [], {}, node_name, self.sdk_version_0_42_6) + test_fn, [], {}, node_name, self.sdk_version_0_50_1) self.assertEqual([self.rest_ret_1, self.rest_ret_2], actual_ret) @mock.patch.object(CosmosRestServerApiWrapper, 'execute_with_checks') @@ -673,13 +677,13 @@ def test_fn(): } ] self.test_monitor._get_rest_data_with_pagination_keys( - test_fn, test_args, test_params, node_name, self.sdk_version_0_42_6) + test_fn, test_args, test_params, node_name, self.sdk_version_0_50_1) calls = mock_execute.call_args_list self.assertEqual(2, len(calls)) mock_execute.assert_has_calls([ - call(test_fn, test_args_first, node_name, self.sdk_version_0_42_6), - call(test_fn, test_args_second, node_name, self.sdk_version_0_42_6), + call(test_fn, test_args_first, node_name, self.sdk_version_0_50_1), + call(test_fn, test_args_second, node_name, self.sdk_version_0_50_1), ]) @mock.patch.object(TendermintRpcApiWrapper, 'execute_with_checks') From 41985ecea4861f23d2240234f575bb10a4ed9315 Mon Sep 17 00:00:00 2001 From: Roman Perera Date: Wed, 5 Mar 2025 16:37:54 +0100 Subject: [PATCH 5/9] fixed prometheus endpoints --- alerter/src/alerter/alerts/network/cosmos.py | 4 +- alerter/src/monitorables/networks/cosmos.py | 8 +- alerter/src/monitors/node/cosmos.py | 18 +- alerter/src/utils/constants/data.py | 4 +- .../data_transformers/node/test_cosmos.py | 6 +- alerter/test/monitors/node/test_cosmos.py | 20 +- api/src/server/utils.ts | 2 +- api/tests/server/utils.test.ts | 476 ++++++++++-------- 8 files changed, 305 insertions(+), 233 deletions(-) diff --git a/alerter/src/alerter/alerts/network/cosmos.py b/alerter/src/alerter/alerts/network/cosmos.py index 984d2e4a..667471c6 100644 --- a/alerter/src/alerter/alerts/network/cosmos.py +++ b/alerter/src/alerter/alerts/network/cosmos.py @@ -42,8 +42,8 @@ def __init__(self, origin_name: str, proposal_id: int, title: str, "No votes: {}\n" "No with veto votes: {}\n" ).format(proposal_id, status, origin_name, title, - final_tally_result['yes'], final_tally_result['abstain'], - final_tally_result['no'], final_tally_result['no_with_veto']) + final_tally_result['yes_count'], final_tally_result['abstain_count'], + final_tally_result['no_count'], final_tally_result['no_with_veto_count']) super().__init__( AlertCode.ProposalConcludedAlert, alert_msg, severity, timestamp, parent_id, origin_id, diff --git a/alerter/src/monitorables/networks/cosmos.py b/alerter/src/monitorables/networks/cosmos.py index 6e200626..8d3c72f9 100644 --- a/alerter/src/monitorables/networks/cosmos.py +++ b/alerter/src/monitorables/networks/cosmos.py @@ -31,10 +31,10 @@ def _are_new_proposals_valid(new_proposals: List[Dict]) -> bool: 'description': str, 'status': str, 'final_tally_result': { - 'yes': Or(float, None), - 'abstain': Or(float, None), - 'no': Or(float, None), - 'no_with_veto': Or(float, None), + 'yes_count': Or(float, None), + 'abstain_count': Or(float, None), + 'no_count': Or(float, None), + 'no_with_veto_count': Or(float, None), }, 'submit_time': float, 'deposit_end_time': float, diff --git a/alerter/src/monitors/node/cosmos.py b/alerter/src/monitors/node/cosmos.py index 2bf6fa11..b775f3a2 100644 --- a/alerter/src/monitors/node/cosmos.py +++ b/alerter/src/monitors/node/cosmos.py @@ -83,12 +83,12 @@ def __init__(self, monitor_name: str, node_config: CosmosNodeConfig, ] # --------------------------- PROMETHEUS ------------------------------- - # tendermint_consensus_validator_power needs to be set as optional + # consensus_validator_power needs to be set as optional # because it is non-existent for nodes which are not in the validator # set. self._prometheus_metrics = { - 'tendermint_consensus_latest_block_height': 'strict', - 'tendermint_consensus_validator_power': 'optional', + 'consensus_latest_block_height': 'strict', + 'consensus_validator_power': 'optional', } # -------------------------- TENDERMINT RPC --------------------------- @@ -1012,8 +1012,8 @@ def _process_retrieved_prometheus_data(self, data: Dict) -> Dict: # were set to be optional, so first we need to check if the value is # None. one_value_subset_metrics = [ - 'tendermint_consensus_latest_block_height', - 'tendermint_consensus_validator_power', + 'consensus_latest_block_height', + 'consensus_validator_power', ] for metric in one_value_subset_metrics: value = None @@ -1024,17 +1024,17 @@ def _process_retrieved_prometheus_data(self, data: Dict) -> Dict: self.logger.debug("%s %s: %s", self.node_config, metric, value) processed_data['result']['data'][metric] = value - # If the tendermint_consensus_validator_power is None it means that the + # If the consensus_validator_power is None it means that the # metric could not be obtained, hence the node is not in the validator # set. This means that we can set the metric to 0 as the node has no # voting power. voting_power = processed_data['result']['data'][ - 'tendermint_consensus_validator_power'] + 'consensus_validator_power'] if voting_power is None: self.logger.debug("%s %s converted to %s", self.node_config, - 'tendermint_consensus_validator_power', 0) + 'consensus_validator_power', 0) processed_data['result']['data'][ - 'tendermint_consensus_validator_power'] = 0 + 'consensus_validator_power'] = 0 return processed_data diff --git a/alerter/src/utils/constants/data.py b/alerter/src/utils/constants/data.py index 5ee106ca..1ffbda16 100644 --- a/alerter/src/utils/constants/data.py +++ b/alerter/src/utils/constants/data.py @@ -28,8 +28,8 @@ 'run_status_update_total_errors': 'total_errored_job_runs', } RAW_TO_TRANSFORMED_COSMOS_NODE_PROM_METRICS = { - 'tendermint_consensus_latest_block_height': 'current_height', - 'tendermint_consensus_validator_power': 'voting_power', + 'consensus_latest_block_height': 'current_height', + 'consensus_validator_power': 'voting_power', } INT_CHAINLINK_METRICS = ['current_height', 'total_block_headers_received', diff --git a/alerter/test/data_transformers/node/test_cosmos.py b/alerter/test/data_transformers/node/test_cosmos.py index d56f6a2e..315a1a0b 100644 --- a/alerter/test/data_transformers/node/test_cosmos.py +++ b/alerter/test/data_transformers/node/test_cosmos.py @@ -114,8 +114,8 @@ def setUp(self) -> None: 'operator_address': self.test_operator_address, }, 'data': { - 'tendermint_consensus_latest_block_height': 10000.0, - 'tendermint_consensus_validator_power': 345456.0, + 'consensus_latest_block_height': 10000.0, + 'consensus_validator_power': 345456.0, }, } }, @@ -186,7 +186,7 @@ def setUp(self) -> None: self.raw_data_example_result_options_None = copy.deepcopy( self.raw_data_example_result_all) self.raw_data_example_result_options_None['prometheus'][ - 'result']['data']['tendermint_consensus_validator_power'] = None + 'result']['data']['consensus_validator_power'] = None self.raw_data_example_result_options_None['tendermint_rpc']['result'][ 'data']['historical'] = None self.raw_data_example_result_options_None['tendermint_rpc']['result'][ diff --git a/alerter/test/monitors/node/test_cosmos.py b/alerter/test/monitors/node/test_cosmos.py index eb3d22be..10cad67c 100644 --- a/alerter/test/monitors/node/test_cosmos.py +++ b/alerter/test/monitors/node/test_cosmos.py @@ -69,23 +69,23 @@ def setUp(self) -> None: # --------------- Data retrieval variables and examples --------------- # Prometheus self.prometheus_metrics = { - 'tendermint_consensus_latest_block_height': 'strict', - 'tendermint_consensus_validator_power': 'optional', + 'consensus_latest_block_height': 'strict', + 'consensus_validator_power': 'optional', } self.retrieved_prometheus_data_example_1 = { - 'tendermint_consensus_latest_block_height': { + 'consensus_latest_block_height': { '{"chain_id": "cosmoshub-4"}': 8137538.0 }, - 'tendermint_consensus_validator_power': { + 'consensus_validator_power': { '{"chain_id": "cosmoshub-4", "validator_address": ' '"7B3D01F754DFF8474ED0E358812FD437E09389DC"}': 725315.0 } } self.retrieved_prometheus_data_example_2 = { - 'tendermint_consensus_latest_block_height': { + 'consensus_latest_block_height': { '{"chain_id": "cosmoshub-4"}': 538.0 }, - 'tendermint_consensus_validator_power': { + 'consensus_validator_power': { '{"chain_id": "cosmoshub-4", "validator_address": ' '"7B3D01F754DFF8474ED0E358812FD437E09389DC"}': None } @@ -154,12 +154,12 @@ def setUp(self) -> None: # Processed retrieved data example self.processed_prometheus_data_example_1 = { - 'tendermint_consensus_latest_block_height': 8137538.0, - 'tendermint_consensus_validator_power': 725315.0, + 'consensus_latest_block_height': 8137538.0, + 'consensus_validator_power': 725315.0, } self.processed_prometheus_data_example_2 = { - 'tendermint_consensus_latest_block_height': 538.0, - 'tendermint_consensus_validator_power': 0, + 'consensus_latest_block_height': 538.0, + 'consensus_validator_power': 0, } # Test monitor instance diff --git a/api/src/server/utils.ts b/api/src/server/utils.ts index a74eb69e..b87bfd82 100644 --- a/api/src/server/utils.ts +++ b/api/src/server/utils.ts @@ -37,7 +37,7 @@ export const allElementsInListHaveTypeString = (list: any[]): boolean => { export const getPrometheusMetricFromBaseChain = (baseChain: string): string => { switch (baseChain.toLowerCase()) { case 'cosmos': - return 'tendermint_consensus_height' + return 'consensus_height' case 'chainlink': return 'max_unconfirmed_blocks' default: diff --git a/api/tests/server/utils.test.ts b/api/tests/server/utils.test.ts index cade8cad..6ed39b3e 100644 --- a/api/tests/server/utils.test.ts +++ b/api/tests/server/utils.test.ts @@ -1,224 +1,296 @@ import { - allElementsInList, - allElementsInListHaveTypeString, - fulfillWithTimeLimit, - getElementsNotInList, - missingValues, - resultJson, - toBool, - verifyNodeExporterPing, - verifyPrometheusPing + allElementsInList, + allElementsInListHaveTypeString, + fulfillWithTimeLimit, + getElementsNotInList, + missingValues, + resultJson, + toBool, + verifyNodeExporterPing, + verifyPrometheusPing, } from "../../src/server/utils"; -describe('resultJson', () => { - it.each([ - ['true'], [123], [{test: 'test'}], [[1, 2, 3]] - ])('Should return a wrapped object with %s as value (result)', - (value: any) => { - const ret: any = resultJson(value); - expect(ret).toEqual({result: value}); - }); +describe("resultJson", () => { + it.each([["true"], [123], [{ test: "test" }], [[1, 2, 3]]])( + "Should return a wrapped object with %s as value (result)", + (value: any) => { + const ret: any = resultJson(value); + expect(ret).toEqual({ result: value }); + } + ); }); -describe('errorJson', () => { - it.each([ - ['true'], [123], [{test: 'test'}], [[1, 2, 3]] - ])('Should return a wrapped object with %s as value (error)', - (value: any) => { - const ret: any = resultJson(value); - expect(ret).toEqual({result: value}); - }); +describe("errorJson", () => { + it.each([["true"], [123], [{ test: "test" }], [[1, 2, 3]]])( + "Should return a wrapped object with %s as value (error)", + (value: any) => { + const ret: any = resultJson(value); + expect(ret).toEqual({ result: value }); + } + ); }); -describe('toBool', () => { - it.each([ - ['true'], ['yes'], ['y'] - ])('Should return true when passing %s', - (booleanString: string) => { - const ret: boolean = toBool(booleanString); - expect(ret).toEqual(true); - }); - - it.each([ - ['false'], ['no'], ['n'] - ])('Should return false when passing %s', - (booleanString: string) => { - const ret: boolean = toBool(booleanString); - expect(ret).toEqual(false); - }); +describe("toBool", () => { + it.each([["true"], ["yes"], ["y"]])( + "Should return true when passing %s", + (booleanString: string) => { + const ret: boolean = toBool(booleanString); + expect(ret).toEqual(true); + } + ); + + it.each([["false"], ["no"], ["n"]])( + "Should return false when passing %s", + (booleanString: string) => { + const ret: boolean = toBool(booleanString); + expect(ret).toEqual(false); + } + ); }); -describe('missingValues', () => { - it.each([ - [['test1'], {test1: null}], - [['test1'], {test1: undefined}], - [['test1'], {test1: null, test2: {}}], - [['test1', 'test2'], {test1: null, test2: null}], - [['test3'], {test1: 'a', test2: 1, test3: null}], - [['test1', 'test2', 'test3'], { - test1: null, - test2: undefined, - test3: undefined - }], - [['test1', 'test3'], {test1: null, test2: {test: 1}, test3: null}], - ])('Should return %s when passing %s', - (listOfMissingValues: string[], object: any) => { - const ret: string[] = missingValues(object) - expect(ret).toEqual(listOfMissingValues); - }); +describe("missingValues", () => { + it.each([ + [["test1"], { test1: null }], + [["test1"], { test1: undefined }], + [["test1"], { test1: null, test2: {} }], + [["test1", "test2"], { test1: null, test2: null }], + [["test3"], { test1: "a", test2: 1, test3: null }], + [ + ["test1", "test2", "test3"], + { + test1: null, + test2: undefined, + test3: undefined, + }, + ], + [["test1", "test3"], { test1: null, test2: { test: 1 }, test3: null }], + ])( + "Should return %s when passing %s", + (listOfMissingValues: string[], object: any) => { + const ret: string[] = missingValues(object); + expect(ret).toEqual(listOfMissingValues); + } + ); }); -describe('allElementsInList', () => { - it.each([ - [[1, 2, 3], [1, 2, 3]], - [[2], [1, 2]], - [['test2', 'test'], ['test', 'test2']], - [[null, 1], ['test', null, 1]], - ])('Should return true when %s are all in %s', - (elements: any[], list: any[]) => { - const ret: boolean = allElementsInList(elements, list) - expect(ret).toEqual(true); - }); - - it.each([ - [[3, 1], [1, 2]], - [[1, 2, 3], [4, 5, 6]], - [['test', 'test3'], ['test2', 'test']], - [[null, 1, 'test'], ['test', 1]], - ])('Should return false when %s are not all in %s', - (elements: any[], list: any[]) => { - const ret: boolean = allElementsInList(elements, list) - expect(ret).toEqual(false); - }); +describe("allElementsInList", () => { + it.each([ + [ + [1, 2, 3], + [1, 2, 3], + ], + [[2], [1, 2]], + [ + ["test2", "test"], + ["test", "test2"], + ], + [ + [null, 1], + ["test", null, 1], + ], + ])( + "Should return true when %s are all in %s", + (elements: any[], list: any[]) => { + const ret: boolean = allElementsInList(elements, list); + expect(ret).toEqual(true); + } + ); + + it.each([ + [ + [3, 1], + [1, 2], + ], + [ + [1, 2, 3], + [4, 5, 6], + ], + [ + ["test", "test3"], + ["test2", "test"], + ], + [ + [null, 1, "test"], + ["test", 1], + ], + ])( + "Should return false when %s are not all in %s", + (elements: any[], list: any[]) => { + const ret: boolean = allElementsInList(elements, list); + expect(ret).toEqual(false); + } + ); }); -describe('getElementsNotInList', () => { - it.each([ - [[3], [3, 1], [1, 2]], - [[1, 2, 3], [1, 2, 3], [4, 5, 6]], - [['test3'], ['test', 'test3'], ['test2', 'test']], - [[null], [null, 1, 'test'], ['test', 1]], - ])('Should return %s when %s and %s are passed', - (listRet: any[], elements: any[], list: any[]) => { - const ret: any[] = getElementsNotInList(elements, list) - expect(ret).toEqual(listRet); - }); +describe("getElementsNotInList", () => { + it.each([ + [[3], [3, 1], [1, 2]], + [ + [1, 2, 3], + [1, 2, 3], + [4, 5, 6], + ], + [["test3"], ["test", "test3"], ["test2", "test"]], + [[null], [null, 1, "test"], ["test", 1]], + ])( + "Should return %s when %s and %s are passed", + (listRet: any[], elements: any[], list: any[]) => { + const ret: any[] = getElementsNotInList(elements, list); + expect(ret).toEqual(listRet); + } + ); }); -describe('allElementsInListHaveTypeString', () => { - it.each([ - [[]], [['test', 'test2']], [['a', 'b', 'c']] - ])('Should return true when all elements in %s are strings', - (list: any[]) => { - const ret: boolean = allElementsInListHaveTypeString(list) - expect(ret).toEqual(true); - }); - - it.each([ - [[1, 2, 3]], [['test', 1]], [['test', null]] - ])('Should return false when not all elements in %s are strings', - (list: any[]) => { - const ret: boolean = allElementsInListHaveTypeString(list) - expect(ret).toEqual(false); - }); +describe("allElementsInListHaveTypeString", () => { + it.each([[[]], [["test", "test2"]], [["a", "b", "c"]]])( + "Should return true when all elements in %s are strings", + (list: any[]) => { + const ret: boolean = allElementsInListHaveTypeString(list); + expect(ret).toEqual(true); + } + ); + + it.each([[[1, 2, 3]], [["test", 1]], [["test", null]]])( + "Should return false when not all elements in %s are strings", + (list: any[]) => { + const ret: boolean = allElementsInListHaveTypeString(list); + expect(ret).toEqual(false); + } + ); }); -describe('verifyPrometheusPing', () => { - it.each([ - ['tendermint_consensus_height', 'cosmos'], - ['max_unconfirmed_blocks[test]', 'chainlink'], - ['testing\nmax_unconfirmed_blocks 123', 'chainlink'], - ['tendermint_consensus_height[test]\ntest_metric', 'cosmos'], - ['', 'other_chain'], - ])('Should return true when metric of a given base chain is in prometheus ping data', - (prometheusPingData: string, baseChain: string) => { - const ret: boolean = verifyPrometheusPing(prometheusPingData, baseChain); - expect(ret).toEqual(true); - }); - - it.each([ - ['', 'cosmos'], - ['testing_metric', 'chainlink'], - ['testing_metric[test]\ntesting_metric', 'cosmos'], - ])('Should return false when metric of a given base chain is not in prometheus ping data', - (prometheusPingData: string, baseChain: string) => { - const ret: boolean = verifyPrometheusPing(prometheusPingData, baseChain); - expect(ret).toEqual(false); - }); +describe("verifyPrometheusPing", () => { + it.each([ + ["consensus_height", "cosmos"], + ["max_unconfirmed_blocks[test]", "chainlink"], + ["testing\nmax_unconfirmed_blocks 123", "chainlink"], + ["consensus_height[test]\ntest_metric", "cosmos"], + ["", "other_chain"], + ])( + "Should return true when metric of a given base chain is in prometheus ping data", + (prometheusPingData: string, baseChain: string) => { + const ret: boolean = verifyPrometheusPing(prometheusPingData, baseChain); + expect(ret).toEqual(true); + } + ); + + it.each([ + ["", "cosmos"], + ["testing_metric", "chainlink"], + ["testing_metric[test]\ntesting_metric", "cosmos"], + ])( + "Should return false when metric of a given base chain is not in prometheus ping data", + (prometheusPingData: string, baseChain: string) => { + const ret: boolean = verifyPrometheusPing(prometheusPingData, baseChain); + expect(ret).toEqual(false); + } + ); }); -describe('verifyNodeExporterPing', () => { - it.each([ - ['node_cpu_seconds_total'], - ['node_cpu_seconds_total\ntesting'], - ['testing\nnode_cpu_seconds_total'], - ['testing\nnode_cpu_seconds_total 123'], - ])('Should return true when node_cpu_seconds_total is in node exporter ping data', - (nodeExporterPingData: string) => { - const ret: boolean = verifyNodeExporterPing(nodeExporterPingData); - expect(ret).toEqual(true); - }); - - it.each([ - [''], - ['testing_metric'], - ['testing_metric[test]\ntesting_metric'], - ['node_cpu_seconds_total[test]'], - ])('Should return false when node_cpu_seconds_total is not in node exporter ping data', - (nodeExporterPingData: string) => { - const ret: boolean = verifyNodeExporterPing(nodeExporterPingData); - expect(ret).toEqual(false); - }); +describe("verifyNodeExporterPing", () => { + it.each([ + ["node_cpu_seconds_total"], + ["node_cpu_seconds_total\ntesting"], + ["testing\nnode_cpu_seconds_total"], + ["testing\nnode_cpu_seconds_total 123"], + ])( + "Should return true when node_cpu_seconds_total is in node exporter ping data", + (nodeExporterPingData: string) => { + const ret: boolean = verifyNodeExporterPing(nodeExporterPingData); + expect(ret).toEqual(true); + } + ); + + it.each([ + [""], + ["testing_metric"], + ["testing_metric[test]\ntesting_metric"], + ["node_cpu_seconds_total[test]"], + ])( + "Should return false when node_cpu_seconds_total is not in node exporter ping data", + (nodeExporterPingData: string) => { + const ret: boolean = verifyNodeExporterPing(nodeExporterPingData); + expect(ret).toEqual(false); + } + ); }); -describe('fulfillWithTimeLimit', () => { - it.each([ - [new Promise((resolve, _reject) => { - resolve(1); - }), 100], - [new Promise((resolve, _reject) => { - setTimeout(() => { - resolve(1); - }, 1); - }), 100], - [new Promise((resolve, _reject) => { - setTimeout(() => { - resolve(1); - }, 50); - }), 100], - [new Promise((resolve, _reject) => { - setTimeout(() => { - resolve(1); - }, 99); - }), 100], - ])('Should return task\'s result if task finishes within time limit.', - (promise: Promise, timeLimit: number) => { - const ret = fulfillWithTimeLimit(promise, timeLimit, null); - expect(ret).not.toEqual(null); - }); - - it.each([ - [new Promise((resolve, _reject) => { - resolve(100); - }), 1], - [new Promise((resolve, _reject) => { - setTimeout(() => { - resolve(1); - }, 100); - }), 50], - [new Promise((resolve, _reject) => { - setTimeout(() => { - resolve(1); - }, 100); - }), 99], - [new Promise((resolve, _reject) => { - setTimeout(() => { - resolve(1); - }, 100); - }), 0], - ])('Should return failure value if task did not finish within time limit.', - (promise: Promise, timeLimit: number) => { - const ret = fulfillWithTimeLimit(promise, timeLimit, null); - expect(ret).not.toEqual(null); - }); +describe("fulfillWithTimeLimit", () => { + it.each([ + [ + new Promise((resolve, _reject) => { + resolve(1); + }), + 100, + ], + [ + new Promise((resolve, _reject) => { + setTimeout(() => { + resolve(1); + }, 1); + }), + 100, + ], + [ + new Promise((resolve, _reject) => { + setTimeout(() => { + resolve(1); + }, 50); + }), + 100, + ], + [ + new Promise((resolve, _reject) => { + setTimeout(() => { + resolve(1); + }, 99); + }), + 100, + ], + ])( + "Should return task's result if task finishes within time limit.", + (promise: Promise, timeLimit: number) => { + const ret = fulfillWithTimeLimit(promise, timeLimit, null); + expect(ret).not.toEqual(null); + } + ); + + it.each([ + [ + new Promise((resolve, _reject) => { + resolve(100); + }), + 1, + ], + [ + new Promise((resolve, _reject) => { + setTimeout(() => { + resolve(1); + }, 100); + }), + 50, + ], + [ + new Promise((resolve, _reject) => { + setTimeout(() => { + resolve(1); + }, 100); + }), + 99, + ], + [ + new Promise((resolve, _reject) => { + setTimeout(() => { + resolve(1); + }, 100); + }), + 0, + ], + ])( + "Should return failure value if task did not finish within time limit.", + (promise: Promise, timeLimit: number) => { + const ret = fulfillWithTimeLimit(promise, timeLimit, null); + expect(ret).not.toEqual(null); + } + ); }); From 54423f805253c26352657f4bf3e93edeb33b6465 Mon Sep 17 00:00:00 2001 From: Roman Perera Date: Mon, 17 Mar 2025 11:26:20 +0100 Subject: [PATCH 6/9] persistent db volumes --- docker-compose.yml | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 667bffba..18274008 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,4 +1,4 @@ -version: '3.7' +version: '3.8' services: # 3 replica sets: primary rs1 @@ -12,6 +12,8 @@ services: max-size: "10m" restart: always command: mongod --replSet replica-set + volumes: + - mongo-data-rs1:/data/db networks: panic_net: ipv4_address: '${DB_IP_REPLICA_1}' @@ -26,6 +28,8 @@ services: max-size: "10m" restart: always command: mongod --replSet replica-set + volumes: + - mongo-data-rs2:/data/db networks: panic_net: ipv4_address: '${DB_IP_REPLICA_2}' @@ -40,6 +44,8 @@ services: max-size: "10m" restart: always command: mongod --replSet replica-set + volumes: + - mongo-data-rs3:/data/db networks: panic_net: ipv4_address: '${DB_IP_REPLICA_3}' @@ -85,6 +91,8 @@ services: panic_net: ipv4_address: '${REDIS_IP}' restart: always + volumes: + - redis-data:/data container_name: 'redis' rabbitmq: @@ -308,4 +316,7 @@ networks: - subnet: 172.18.0.0/24 volumes: - db-data: + mongo-data-rs1: + mongo-data-rs2: + mongo-data-rs3: + redis-data: From 122162d6bab7099b0c116981ab40d4f84d7a04ae Mon Sep 17 00:00:00 2001 From: Roman Perera Date: Sat, 19 Apr 2025 14:38:06 +0200 Subject: [PATCH 7/9] updated containers node's versions, mongodb & mongoose to latest, REACTIVATED substrate container --- api/src/v1/rest/config/ConfigResource.ts | 626 +++++++++++---------- api/tests/v1/config/config.odm.test.ts | 659 ++++++++++++----------- api/tests/v1/config/odm.mock.service.ts | 647 +++++++++++----------- api/tests/v1/util/util.odm.test.ts | 143 ++--- 4 files changed, 1056 insertions(+), 1019 deletions(-) diff --git a/api/src/v1/rest/config/ConfigResource.ts b/api/src/v1/rest/config/ConfigResource.ts index 9f6061c5..b6370e46 100644 --- a/api/src/v1/rest/config/ConfigResource.ts +++ b/api/src/v1/rest/config/ConfigResource.ts @@ -1,321 +1,351 @@ import express from "express"; -import {ObjectID} from "mongodb"; -import mongoose, {Document} from "mongoose"; -import {Config} from "../../../../../entities/ts/Config"; -import {GenericDocument} from "../../../constant/mongoose"; +import { ObjectId } from "mongodb"; +import mongoose, { Document } from "mongoose"; +import { Config } from "../../../../../entities/ts/Config"; +import { GenericDocument } from "../../../constant/mongoose"; import { - CouldNotRemoveDataFromDB, - CouldNotRetrieveDataFromDB, - CouldNotSaveDataToDB, - DuplicateWarning, - InvalidIDError, - MissingParameterWarning, - NotFoundWarning, - ValidationDataError + CouldNotRemoveDataFromDB, + CouldNotRetrieveDataFromDB, + CouldNotSaveDataToDB, + DuplicateWarning, + InvalidIDError, + MissingParameterWarning, + NotFoundWarning, + ValidationDataError, } from "../../../constant/server.feedback"; -import {MongooseUtil} from "../../../util/MongooseUtil"; -import {ResponseError, ResponseNoContent, ResponseSuccess} - from "../../entity/io/ResponseData"; -import {ConfigModel} from "../../entity/model/ConfigModel"; -import {ConfigRepository} from "../../entity/repository/ConfigRepository"; -import {BaseChainRepository} - from "../../entity/repository/BaseChainRepository"; -import {ObjectUtil} from "../../../util/ObjectUtil"; -import {EmailRepository} from "../../entity/repository/ChannelRepository"; -import {ConfigOldModel} from "../../entity/model/ConfigOldModel"; +import { MongooseUtil } from "../../../util/MongooseUtil"; +import { + ResponseError, + ResponseNoContent, + ResponseSuccess, +} from "../../entity/io/ResponseData"; +import { ConfigModel } from "../../entity/model/ConfigModel"; +import { ConfigRepository } from "../../entity/repository/ConfigRepository"; +import { BaseChainRepository } from "../../entity/repository/BaseChainRepository"; +import { ObjectUtil } from "../../../util/ObjectUtil"; +import { EmailRepository } from "../../entity/repository/ChannelRepository"; +import { ConfigOldModel } from "../../entity/model/ConfigOldModel"; /** * Resource Controller for Panic Configuration */ export class ConfigResource { - private configRepo: ConfigRepository = null; - private baseChainRepo: BaseChainRepository = null; - private channelRepo: EmailRepository = null; - - constructor() { - this.configRepo = new ConfigRepository(); - this.baseChainRepo = new BaseChainRepository(); - this.channelRepo = new EmailRepository(); + private configRepo: ConfigRepository = null; + private baseChainRepo: BaseChainRepository = null; + private channelRepo: EmailRepository = null; + + constructor() { + this.configRepo = new ConfigRepository(); + this.baseChainRepo = new BaseChainRepository(); + this.channelRepo = new EmailRepository(); + } + + /** + * Get a list of Configurations from Database in config collection + * + * @param req Request from Express + * @param res Response from Express + */ + public async getAll( + req: express.Request, + res: express.Response + ): Promise { + try { + const configs = await this.configRepo.findAll(); + + const response = new ResponseSuccess(res); + response.send(configs); + } catch (err: any) { + console.error(err); + const error = new CouldNotRetrieveDataFromDB(); + const response = new ResponseError(res, error); + response.send(); } - - /** - * Get a list of Configurations from Database in config collection - * - * @param req Request from Express - * @param res Response from Express - */ - public async getAll(req: express.Request, - res: express.Response): Promise { - try { - const configs = await this.configRepo.findAll(); - - const response = new ResponseSuccess(res); - response.send(configs); - } catch (err: any) { - console.error(err); - const error = new CouldNotRetrieveDataFromDB(); - const response = new ResponseError(res, error); - response.send(); - } + } + + /** + * Get configuration by id from Database in config collection + * + * @param req Request from Express + * @param res Response from Express + */ + public async getItem( + req: express.Request, + res: express.Response + ): Promise { + try { + const responseError = this.isMissingParam(req, res); + if (responseError instanceof ResponseError) { + responseError.send(); + return; + } + + const config = await this.getConfigById(res, req.params.id); + if (config instanceof ResponseError) { + config.send(); + return; + } + + const response = new ResponseSuccess(res); + response.send(config); + } catch (err: any) { + console.error(err); + const error = new CouldNotRetrieveDataFromDB(); + const response = new ResponseError(res, error); + response.send(); } - - /** - * Get configuration by id from Database in config collection - * - * @param req Request from Express - * @param res Response from Express - */ - public async getItem(req: express.Request, - res: express.Response): Promise { - try { - const responseError = this.isMissingParam(req, res); - if (responseError instanceof ResponseError) { - responseError.send(); - return; - } - - const config = await this.getConfigById(res, req.params.id); - if (config instanceof ResponseError) { - config.send(); - return; - } - - const response = new ResponseSuccess(res); - response.send(config); - } catch (err: any) { - console.error(err); - const error = new CouldNotRetrieveDataFromDB(); - const response = new ResponseError(res, error); - response.send(); - } + } + + /** + * Create a new Configuration on the Database in the config collection + * + * @param req Request from Express + * @param res Response from Express + */ + public async create( + req: express.Request, + res: express.Response + ): Promise { + try { + const invalidFields = [ + "threshold_alerts", + "severity_alerts", + "time_window_alerts", + ]; + + let config: Config = MongooseUtil.merge( + new Config(), + req.body, + invalidFields + ); + + // subChain name duplication validation + let duplicate = await this.configRepo.isDuplicateSubChain(config); + if (duplicate) { + const response = new ResponseError( + res, + new DuplicateWarning("subChain.name") + ); + response.send(); + return; + } + + let baseChain = await this.baseChainRepo.findOneByIdAndDeepPopulate( + req.body.baseChain.id || req.body.baseChain, + ["severity_alerts", "threshold_alerts", "time_window_alerts"] + ); + + baseChain = ObjectUtil.snakeToCamel(baseChain.toJSON()); + + // populate alerts with baseChains alerts + config.thresholdAlerts = baseChain.thresholdAlerts; + config.severityAlerts = baseChain.severityAlerts; + config.timeWindowAlerts = baseChain.timeWindowAlerts; + + const model: Document = new ConfigModel(config.toJSON()); + await this.save(res, model); + } catch (err: any) { + console.error(err); + const error = new CouldNotSaveDataToDB(); + const response = new ResponseError(res, error); + response.send(); } - - /** - * Create a new Configuration on the Database in the config collection - * - * @param req Request from Express - * @param res Response from Express - */ - public async create(req: express.Request, - res: express.Response): Promise { - try { - const invalidFields = [ - 'threshold_alerts', 'severity_alerts', 'time_window_alerts' - ]; - - let config: Config = MongooseUtil.merge( - new Config(), req.body, invalidFields); - - // subChain name duplication validation - let duplicate = await this.configRepo.isDuplicateSubChain(config); - if (duplicate) { - const response = new ResponseError( - res, new DuplicateWarning('subChain.name')); - response.send(); - return; - } - - let baseChain = await this.baseChainRepo.findOneByIdAndDeepPopulate( - req.body.baseChain.id || req.body.baseChain, - ['severity_alerts', 'threshold_alerts', 'time_window_alerts']); - - baseChain = ObjectUtil.snakeToCamel(baseChain.toJSON()); - - // populate alerts with baseChains alerts - config.thresholdAlerts = baseChain.thresholdAlerts; - config.severityAlerts = baseChain.severityAlerts; - config.timeWindowAlerts = baseChain.timeWindowAlerts; - - const model: Document = new ConfigModel(config.toJSON()); - await this.save(res, model); - } catch (err: any) { - console.error(err); - const error = new CouldNotSaveDataToDB(); - const response = new ResponseError(res, error); - response.send(); - } + } + + /** + * Updates an existing configuration on the Database in the config + * collection + * + * @param req Request from Express + * @param res Response from Express + */ + public async update( + req: express.Request, + res: express.Response + ): Promise { + try { + const responseError = this.isMissingParam(req, res); + if (responseError instanceof ResponseError) { + responseError.send(); + return; + } + + // to avoid updating baseChain on update + delete req.body.baseChain; + + // subChain name duplication validation + let duplicate = await this.configRepo.isDuplicateSubChain({ + ...req.body, + id: req.params.id, + } as Config); + + if (duplicate) { + const response = new ResponseError( + res, + new DuplicateWarning("subChain.name") + ); + response.send(); + return; + } + + const config: Document = (await this.getConfigById( + res, + req.params.id + )) as any; + if (config instanceof ResponseError) { + config.send(); + return; + } + + await this.createBkp(config); + + const request = ObjectUtil.deepCamelToSnake(req.body); + const model: Document = MongooseUtil.merge(config, request); + + await this.save(res, model); + } catch (err: any) { + console.error(err); + const error = new CouldNotSaveDataToDB(); + const response = new ResponseError(res, error); + response.send(); } - - /** - * Updates an existing configuration on the Database in the config - * collection - * - * @param req Request from Express - * @param res Response from Express - */ - public async update(req: express.Request, - res: express.Response): Promise { - try { - const responseError = this.isMissingParam(req, res); - if (responseError instanceof ResponseError) { - responseError.send(); - return; - } - - // to avoid updating baseChain on update - delete req.body.baseChain; - - // subChain name duplication validation - let duplicate = await this.configRepo.isDuplicateSubChain({ - ...req.body, - id: req.params.id - } as Config); - - if (duplicate) { - const response = new ResponseError(res, - new DuplicateWarning('subChain.name')); - response.send(); - return; - } - - const config: Document = await this.getConfigById(res, - req.params.id) as any; - if (config instanceof ResponseError) { - config.send(); - return; - } - - await this.createBkp(config); - - const request = ObjectUtil.deepCamelToSnake(req.body); - const model: Document = MongooseUtil.merge(config, request); - - await this.save(res, model); - } catch (err: any) { - console.error(err); - const error = new CouldNotSaveDataToDB(); - const response = new ResponseError(res, error); - response.send(); - } + } + + /** + * Removes configuration by id on Database in config collection + * + * @param req Request from Express + * @param res Response from Express + */ + public async remove( + req: express.Request, + res: express.Response + ): Promise { + try { + const responseError = this.isMissingParam(req, res); + if (responseError instanceof ResponseError) { + responseError.send(); + return; + } + + const config = await this.configRepo.findOneById(req.params.id); + await this.createBkp(config as any); + + const isDeleted = await this.configRepo.deleteOneById(req.params.id); + console.log(isDeleted); + if (!isDeleted) { + const response = new ResponseError(res, new NotFoundWarning()); + response.send(); + return; + } + + await this.channelRepo.unlinkConfigFromAllChannels(req.params.id); + + const response = new ResponseNoContent(res); + response.send(); + } catch (err: any) { + console.error(err); + const error = new CouldNotRemoveDataFromDB(); + const response = new ResponseError(res, error); + response.send(); } - - /** - * Removes configuration by id on Database in config collection - * - * @param req Request from Express - * @param res Response from Express - */ - public async remove(req: express.Request, - res: express.Response): Promise { - try { - const responseError = this.isMissingParam(req, res); - if (responseError instanceof ResponseError) { - responseError.send(); - return; - } - - const config = await this.configRepo.findOneById(req.params.id); - await this.createBkp(config as any); - - const isDeleted = await this.configRepo - .deleteOneById(req.params.id); - console.log(isDeleted); - if (!isDeleted) { - const response = new ResponseError(res, new NotFoundWarning()); - response.send(); - return; - } - - await this.channelRepo.unlinkConfigFromAllChannels(req.params.id); - - const response = new ResponseNoContent(res); - response.send(); - } catch (err: any) { - console.error(err); - const error = new CouldNotRemoveDataFromDB(); - const response = new ResponseError(res, error); - response.send(); - } + } + + /** + * Checks if there are missing params + * + * @param req Request from Express + * @param res Response from Express + */ + private isMissingParam( + req: express.Request, + res: express.Response + ): ResponseError { + if (!req.params.id) { + const error = new MissingParameterWarning("id"); + + return new ResponseError(res, error); } - /** - * Checks if there are missing params - * - * @param req Request from Express - * @param res Response from Express - */ - private isMissingParam(req: express.Request, - res: express.Response): ResponseError { - if (!req.params.id) { - const error = new MissingParameterWarning('id'); - - return new ResponseError(res, error); - } - - return null; - } - - /** - * Save document on config collection. If an error occurs, an error response - * is sent via `res`. - * - * @param res Response from Express - * @param model The config document object - */ - private async save(res: express.Response, model: Document): - Promise { - - model.set('config_type', new ObjectID( - GenericDocument.CONFIG_TYPE_SUB_CHAIN) + return null; + } + + /** + * Save document on config collection. If an error occurs, an error response + * is sent via `res`. + * + * @param res Response from Express + * @param model The config document object + */ + private async save( + res: express.Response, + model: Document + ): Promise { + model.set( + "config_type", + new ObjectId(GenericDocument.CONFIG_TYPE_SUB_CHAIN) + ); + + const isValid = await MongooseUtil.isValid(model); + if (!isValid) { + const error = new ValidationDataError(); + + let response = new ResponseError(res, error); + await response.addMongooseErrors(model); + + const duplicateError = response.messages.find((message) => + message.description.includes("Name duplicated") + ); + if (duplicateError) { + response = new ResponseError( + res, + new DuplicateWarning(`${duplicateError.name} name`) ); + } - const isValid = await MongooseUtil.isValid(model); - if (!isValid) { - const error = new ValidationDataError(); - - let response = new ResponseError(res, error); - await response.addMongooseErrors(model); - - const duplicateError = response.messages.find( - message => message.description.includes('Name duplicated')); - if (duplicateError) { - response = new ResponseError(res, new DuplicateWarning( - `${duplicateError.name} name`)); - } - - response.send(); - return; - } - - const doc = await model.save(); - - const response = new ResponseSuccess(res); - response.send(doc.id); + response.send(); + return; } - /** - * Get configuration by ID, if not found send a `NotFoundWarning` over a - * `ResponseError` instance. - * - * @param res Response from Express - * @param id Configuration ID - * @returns a promise containing either a `Config` object or a - * `ResponseError` instance - */ - private async getConfigById(res: express.Response, id: string): - Promise { - if (!mongoose.Types.ObjectId.isValid(id)) { - const error = new InvalidIDError(id); - return new ResponseError(res, error); - } - - const config = await this.configRepo.findOneById(id); - if (!config) { - return new ResponseError(res, new NotFoundWarning()); - } - - return config; + const doc = await model.save(); + + const response = new ResponseSuccess(res); + response.send(doc.id); + } + + /** + * Get configuration by ID, if not found send a `NotFoundWarning` over a + * `ResponseError` instance. + * + * @param res Response from Express + * @param id Configuration ID + * @returns a promise containing either a `Config` object or a + * `ResponseError` instance + */ + private async getConfigById( + res: express.Response, + id: string + ): Promise { + if (!mongoose.Types.ObjectId.isValid(id)) { + const error = new InvalidIDError(id); + return new ResponseError(res, error); } - /** - * Create a bkp to comparison on alerter - * @param config Current official config - */ - private async createBkp(config : Document) : Promise { - await ConfigOldModel.deleteOne({ _id: config._id }); - const oldModel : Document = new ConfigOldModel( - config.toObject() - ); - await oldModel.save(); + const config = await this.configRepo.findOneById(id); + if (!config) { + return new ResponseError(res, new NotFoundWarning()); } + + return config; + } + + /** + * Create a bkp to comparison on alerter + * @param config Current official config + */ + private async createBkp(config: Document): Promise { + await ConfigOldModel.deleteOne({ _id: config._id }); + const oldModel: Document = new ConfigOldModel(config.toObject()); + await oldModel.save(); + } } diff --git a/api/tests/v1/config/config.odm.test.ts b/api/tests/v1/config/config.odm.test.ts index 7ab7038f..4a6927f0 100644 --- a/api/tests/v1/config/config.odm.test.ts +++ b/api/tests/v1/config/config.odm.test.ts @@ -1,457 +1,460 @@ -process.env.UI_ACCESS_IP = '0.0.0.0'; +process.env.UI_ACCESS_IP = "0.0.0.0"; -import {ConfigModel} from "../../../src/v1/entity/model/ConfigModel"; -import request from 'supertest' +import { ConfigModel } from "../../../src/v1/entity/model/ConfigModel"; +import request from "supertest"; -import {Config} from "../../../../entities/ts/Config"; -import {TestUtil} from "../util/TestUtil"; -import {config, ConfigMockService} from "./odm.mock.service"; -import {app} from '../../../src/server'; -import {Status} from '../../../src/constant/server'; -import {ObjectID} from 'mongodb'; +import { Config } from "../../../../entities/ts/Config"; +import { TestUtil } from "../util/TestUtil"; +import { config, ConfigMockService } from "./odm.mock.service"; +import { app } from "../../../src/server"; +import { Status } from "../../../src/constant/server"; +import { ObjectId } from "mongodb"; import { ObjectUtil } from "../../../src/util/ObjectUtil"; -const mockingoose = require('mockingoose'); +const mockingoose = require("mockingoose"); beforeEach(() => { - mockingoose.resetAll(); + mockingoose.resetAll(); }); -describe('ODM Config data structure', () => { - test("should check Config", async () => { - ConfigMockService.mock(); +describe("ODM Config data structure", () => { + test("should check Config", async () => { + ConfigMockService.mock(); - const id = '62675e2d8891cd77b87f5b16'; - const config = await ConfigModel.findOne({_id: id}) as Config; - expect(config.id.toString()).toEqual(id); - expect(typeof config.status).toEqual('boolean'); + const id = "62675e2d8891cd77b87f5b16"; + const config = (await ConfigModel.findOne({ _id: id })) as Config; + expect(config.id.toString()).toEqual(id); + expect(typeof config.status).toEqual("boolean"); - //1651528966 = 2022-05-02T22:02:46.000Z - const created: number = new Date(config.created).getTime() / 1000; - expect(created).toEqual(1651528966); + //1651528966 = 2022-05-02T22:02:46.000Z + const created: number = new Date(config.created).getTime() / 1000; + expect(created).toEqual(1651528966); - //1651525366 = 2022-05-02T23:02:46.000Z - const modified: number = new Date(config.modified).getTime() / 1000; - expect(modified).toEqual(1651532566); + //1651525366 = 2022-05-02T23:02:46.000Z + const modified: number = new Date(config.modified).getTime() / 1000; + expect(modified).toEqual(1651532566); - expect(typeof config.baseChain).toEqual('object'); - expect(typeof config.subChain).toEqual('object'); - expect(typeof config.contract).toEqual('object'); + expect(typeof config.baseChain).toEqual("object"); + expect(typeof config.subChain).toEqual("object"); + expect(typeof config.contract).toEqual("object"); - expect(config['threshold_alerts'].length).toEqual(1); - expect(config['severity_alerts'].length).toEqual(1); - expect(config['time_window_alerts'].length).toEqual(1); - expect(config.evm_nodes.length).toEqual(1); - expect(config.nodes.length).toEqual(2); - expect(config.repositories.length).toEqual(2); - expect(config.systems.length).toEqual(1); - }); + expect(config["threshold_alerts"].length).toEqual(1); + expect(config["severity_alerts"].length).toEqual(1); + expect(config["time_window_alerts"].length).toEqual(1); + expect(config.evm_nodes.length).toEqual(1); + expect(config.nodes.length).toEqual(2); + expect(config.repositories.length).toEqual(2); + expect(config.systems.length).toEqual(1); + }); - test("should check Basechain configured", async () => { - ConfigMockService.mock(); + test("should check Basechain configured", async () => { + ConfigMockService.mock(); - const id = '62675e2d8891cd77b87f5b16'; - const config = await ConfigModel.findOne({_id: id}) as Config; - const basechain = config.baseChain; + const id = "62675e2d8891cd77b87f5b16"; + const config = (await ConfigModel.findOne({ _id: id })) as Config; + const basechain = config.baseChain; - expect(basechain.id.toString()).toEqual('6265cefcfdb17d641746dced'); - expect(typeof basechain.status).toEqual('boolean'); + expect(basechain.id.toString()).toEqual("6265cefcfdb17d641746dced"); + expect(typeof basechain.status).toEqual("boolean"); - //1651615366 = 2022-05-03T22:02:46.000Z - const created: number = new Date(basechain.created).getTime() / 1000; - expect(created).toEqual(1651615366); + //1651615366 = 2022-05-03T22:02:46.000Z + const created: number = new Date(basechain.created).getTime() / 1000; + expect(created).toEqual(1651615366); - //1651878166 = 2022-05-06T23:02:46.000Z - const modified: number = new Date(basechain.modified).getTime() / 1000; - expect(modified).toEqual(1651878166); + //1651878166 = 2022-05-06T23:02:46.000Z + const modified: number = new Date(basechain.modified).getTime() / 1000; + expect(modified).toEqual(1651878166); - expect(basechain.name).toEqual('Chainlink'); - expect(basechain.sources.length).toEqual(1); - }); + expect(basechain.name).toEqual("Chainlink"); + expect(basechain.sources.length).toEqual(1); + }); - test("should check Sourcetype inside Basechain configured", async () => { - ConfigMockService.mock(); + test("should check Sourcetype inside Basechain configured", async () => { + ConfigMockService.mock(); - const id = '62675e2d8891cd77b87f5b16'; - const config = await ConfigModel.findOne({_id: id}) as Config; - const source = config.baseChain.sources[0]; + const id = "62675e2d8891cd77b87f5b16"; + const config = (await ConfigModel.findOne({ _id: id })) as Config; + const source = config.baseChain.sources[0]; - TestUtil.source(source); - }); + TestUtil.source(source); + }); - test("should check Sub chain configured", async () => { - ConfigMockService.mock(); + test("should check Sub chain configured", async () => { + ConfigMockService.mock(); - const id = '62675e2d8891cd77b87f5b16'; - const config = await ConfigModel.findOne({_id: id}) as Config; - const subChain = config.subChain; + const id = "62675e2d8891cd77b87f5b16"; + const config = (await ConfigModel.findOne({ _id: id })) as Config; + const subChain = config.subChain; - expect(subChain.id.toString()).toEqual('626aeb43980cf43ee7bbf683'); - expect(typeof subChain.status).toEqual('boolean'); + expect(subChain.id.toString()).toEqual("626aeb43980cf43ee7bbf683"); + expect(typeof subChain.status).toEqual("boolean"); - //1651528966 = 2022-05-02T22:02:46.000Z - const created: number = new Date(subChain.created).getTime() / 1000; - expect(created).toEqual(1651528966); + //1651528966 = 2022-05-02T22:02:46.000Z + const created: number = new Date(subChain.created).getTime() / 1000; + expect(created).toEqual(1651528966); - expect(subChain.modified).toEqual(null); - expect(subChain.name).toEqual('binance'); - }); + expect(subChain.modified).toEqual(null); + expect(subChain.name).toEqual("binance"); + }); - test("should check Contract configured", async () => { + test("should check Contract configured", async () => { + ConfigMockService.mock(); - ConfigMockService.mock(); + const id = "62675e2d8891cd77b87f5b16"; + const config = (await ConfigModel.findOne({ _id: id })) as Config; + const contract = config.contract; - const id = '62675e2d8891cd77b87f5b16'; - const config = await ConfigModel.findOne({_id: id}) as Config; - const contract = config.contract; + expect(contract.id.toString()).toEqual("6267661fbe66cae642f57fb7"); + expect(contract.status).toEqual(false); - expect(contract.id.toString()).toEqual('6267661fbe66cae642f57fb7'); - expect(contract.status).toEqual(false); + //1651528966 = 2022-05-03T22:02:46.000Z + const created: number = new Date(contract.created).getTime() / 1000; + expect(created).toEqual(1651528966); - //1651528966 = 2022-05-03T22:02:46.000Z - const created: number = new Date(contract.created).getTime() / 1000; - expect(created).toEqual(1651528966); + expect(contract.modified).toEqual(null); - expect(contract.modified).toEqual(null); + expect(contract.name).toEqual("Weiwatcher"); - expect(contract.name).toEqual('Weiwatcher'); + const url = "https://weiwatchers.com/feeds-bsc-mainnet.json"; + expect(contract.url).toEqual(url); + expect(contract.monitor).toEqual(true); + }); - const url = 'https://weiwatchers.com/feeds-bsc-mainnet.json'; - expect(contract.url).toEqual(url); - expect(contract.monitor).toEqual(true); - }); + test("should check Threshold alert configured", async () => { + ConfigMockService.mock(); - test("should check Threshold alert configured", async () => { - ConfigMockService.mock(); + const id = "62675e2d8891cd77b87f5b16"; + const config = (await ConfigModel.findOne({ _id: id })) as Config; + const threshold = config["threshold_alerts"][0]; - const id = '62675e2d8891cd77b87f5b16'; - const config = await ConfigModel.findOne({_id: id}) as Config; - const threshold = config['threshold_alerts'][0]; + expect(threshold.id.toString()).toEqual("6269def62b7e18add54e96e9"); + expect(threshold.status).toEqual(true); - expect(threshold.id.toString()).toEqual('6269def62b7e18add54e96e9'); - expect(threshold.status).toEqual(true); + //1651528966 = 2022-05-02T22:02:46.000Z + const created: number = new Date(threshold.created).getTime() / 1000; + expect(created).toEqual(1651528966); - //1651528966 = 2022-05-02T22:02:46.000Z - const created: number = new Date(threshold.created).getTime() / 1000; - expect(created).toEqual(1651528966); + expect(threshold.modified).toEqual(null); - expect(threshold.modified).toEqual(null); + expect(threshold.warning.enabled).toEqual(true); + expect(threshold.warning.threshold).toEqual(0); - expect(threshold.warning.enabled).toEqual(true); - expect(threshold.warning.threshold).toEqual(0); + expect(threshold.critical.enabled).toEqual(true); + console.log(threshold.critical); + expect(threshold.critical["repeat_enabled"]).toEqual(true); + expect(threshold.critical.threshold).toEqual(120); + expect(threshold.critical.repeat).toEqual(300); + }); - expect(threshold.critical.enabled).toEqual(true); - console.log(threshold.critical); - expect(threshold.critical['repeat_enabled']).toEqual(true); - expect(threshold.critical.threshold).toEqual(120); - expect(threshold.critical.repeat).toEqual(300); - }); + test("should check Severity alert configured", async () => { + ConfigMockService.mock(); - test("should check Severity alert configured", async () => { - ConfigMockService.mock(); + const id = "62675e2d8891cd77b87f5b16"; + const config = (await ConfigModel.findOne({ _id: id })) as Config; + const severity = config["severity_alerts"][0]; + expect(severity.id.toString()).toEqual("6269def62b7e18add54e96f1"); + expect(severity.status).toEqual(true); - const id = '62675e2d8891cd77b87f5b16'; - const config = await ConfigModel.findOne({_id: id}) as Config; - const severity = config['severity_alerts'][0]; - expect(severity.id.toString()).toEqual('6269def62b7e18add54e96f1'); - expect(severity.status).toEqual(true); + //1651528966 = 2022-05-02T22:02:46.000Z + const created: number = new Date(severity.created).getTime() / 1000; + expect(created).toEqual(1651528966); - //1651528966 = 2022-05-02T22:02:46.000Z - const created: number = new Date(severity.created).getTime() / 1000; - expect(created).toEqual(1651528966); + expect(severity.modified).toEqual(null); + expect(typeof severity.type).toEqual("object"); + }); - expect(severity.modified).toEqual(null); - expect(typeof severity.type).toEqual('object'); - }); + test("should check Time Window alert configured", async () => { + ConfigMockService.mock(); - test("should check Time Window alert configured", async () => { - ConfigMockService.mock(); + const id = "62675e2d8891cd77b87f5b16"; + const config = (await ConfigModel.findOne({ _id: id })) as Config; + const time_window = config["time_window_alerts"][0]; - const id = '62675e2d8891cd77b87f5b16'; - const config = await ConfigModel.findOne({_id: id}) as Config; - const time_window = config['time_window_alerts'][0]; + expect(time_window.id.toString()).toEqual("6269def62b7e18add54e96e9"); + expect(time_window.status).toEqual(true); - expect(time_window.id.toString()).toEqual('6269def62b7e18add54e96e9'); - expect(time_window.status).toEqual(true); + //1651528966 = 2022-05-02T22:02:46.000Z + const created: number = new Date(time_window.created).getTime() / 1000; + expect(created).toEqual(1651528966); - //1651528966 = 2022-05-02T22:02:46.000Z - const created: number = new Date(time_window.created).getTime() / 1000; - expect(created).toEqual(1651528966); + expect(time_window.modified).toEqual(null); - expect(time_window.modified).toEqual(null); + expect(time_window.warning.enabled).toEqual(true); + expect(time_window.warning.threshold).toEqual(0); + expect(time_window.warning["time_window"]).toEqual(120); - expect(time_window.warning.enabled).toEqual(true); - expect(time_window.warning.threshold).toEqual(0); - expect(time_window.warning['time_window']).toEqual(120); + expect(time_window.critical.enabled).toEqual(true); + expect(time_window.critical["repeat_enabled"]).toEqual(true); + expect(time_window.critical.threshold).toEqual(120); + expect(time_window.critical.repeat).toEqual(300); + expect(time_window.critical["time_window"]).toEqual(120); + }); - expect(time_window.critical.enabled).toEqual(true); - expect(time_window.critical['repeat_enabled']).toEqual(true); - expect(time_window.critical.threshold).toEqual(120); - expect(time_window.critical.repeat).toEqual(300); - expect(time_window.critical['time_window']).toEqual(120); - }); + test("should check Repository configured", async () => { + ConfigMockService.mock(); - test("should check Repository configured", async () => { - ConfigMockService.mock(); + const id = "62675e2d8891cd77b87f5b16"; + const config = (await ConfigModel.findOne({ _id: id })) as Config; + const repository = config.repositories[0]; - const id = '62675e2d8891cd77b87f5b16'; - const config = await ConfigModel.findOne({_id: id}) as Config; - const repository = config.repositories[0]; + expect(repository.id.toString()).toEqual("6269d75a5a34daa2cc7744e0"); + expect(repository.status).toEqual(true); - expect(repository.id.toString()).toEqual('6269d75a5a34daa2cc7744e0'); - expect(repository.status).toEqual(true); + //1651528966 = 2022-05-02T22:02:46.000Z + const created: number = new Date(repository.created).getTime() / 1000; + expect(created).toEqual(1651528966); - //1651528966 = 2022-05-02T22:02:46.000Z - const created: number = new Date(repository.created).getTime() / 1000; - expect(created).toEqual(1651528966); + expect(repository.modified).toEqual(null); + expect(repository.name).toEqual("Tendermint"); + expect(repository.value).toEqual("tendermint/tendermint/"); + expect(repository.namespace).toEqual(null); - expect(repository.modified).toEqual(null); - expect(repository.name).toEqual('Tendermint'); - expect(repository.value).toEqual('tendermint/tendermint/'); - expect(repository.namespace).toEqual(null); + expect(typeof repository.type).toEqual("object"); - expect(typeof repository.type).toEqual('object'); + expect(repository.monitor).toEqual(true); + }); - expect(repository.monitor).toEqual(true); - }); + test("should check RepositoryType inside Repository configured", async () => { + ConfigMockService.mock(); - test("should check RepositoryType inside Repository configured", async () => { - ConfigMockService.mock(); + const id = "62675e2d8891cd77b87f5b16"; + const config = (await ConfigModel.findOne({ _id: id })) as Config; + const type = config.repositories[0].type; - const id = '62675e2d8891cd77b87f5b16'; - const config = await ConfigModel.findOne({_id: id}) as Config; - const type = config.repositories[0].type; + TestUtil.repository(type); + }); - TestUtil.repository(type); - }); + test("should check EVMNode configured", async () => { + ConfigMockService.mock(); - test("should check EVMNode configured", async () => { - ConfigMockService.mock(); + const id = "62675e2d8891cd77b87f5b16"; + const config = (await ConfigModel.findOne({ _id: id })) as Config; + const evm = config.evm_nodes[0]; - const id = '62675e2d8891cd77b87f5b16'; - const config = await ConfigModel.findOne({_id: id}) as Config; - const evm = config.evm_nodes[0]; + expect(evm.id.toString()).toEqual("6267683dc0e201f1edbbdc39"); + expect(evm.status).toEqual(false); - expect(evm.id.toString()).toEqual('6267683dc0e201f1edbbdc39'); - expect(evm.status).toEqual(false); + //1651528966 = 2022-05-02T22:02:46.000Z + const created: number = new Date(evm.created).getTime() / 1000; + expect(created).toEqual(1651528966); - //1651528966 = 2022-05-02T22:02:46.000Z - const created: number = new Date(evm.created).getTime() / 1000; - expect(created).toEqual(1651528966); + expect(evm.modified).toEqual(null); + expect(evm.name).toEqual("bsc_139"); + expect(evm.nodeHttpUrl).toEqual("http://ip11:1234"); + expect(evm.monitor).toEqual(true); + }); - expect(evm.modified).toEqual(null); - expect(evm.name).toEqual('bsc_139'); - expect(evm.nodeHttpUrl).toEqual('http://ip11:1234'); - expect(evm.monitor).toEqual(true); - }); + test("should check Node configured", async () => { + ConfigMockService.mock(); - test("should check Node configured", async () => { - ConfigMockService.mock(); + const id = "62675e2d8891cd77b87f5b16"; + const config = (await ConfigModel.findOne({ _id: id })) as Config; - const id = '62675e2d8891cd77b87f5b16'; - const config = await ConfigModel.findOne({_id: id}) as Config; + const node = config.nodes[0]; - const node = config.nodes[0]; + expect(node.id.toString()).toEqual("626765f4d9d14938006c3250"); + expect(node.status).toEqual(true); - expect(node.id.toString()).toEqual('626765f4d9d14938006c3250'); - expect(node.status).toEqual(true); + //1651528966 = 2022-05-02T22:02:46.000Z + const created: number = new Date(node.created).getTime() / 1000; + expect(created).toEqual(1651528966); - //1651528966 = 2022-05-02T22:02:46.000Z - const created: number = new Date(node.created).getTime() / 1000; - expect(created).toEqual(1651528966); + expect(node.modified).toEqual(null); + expect(node.name).toEqual("chainlink_bsc_ocr"); - expect(node.modified).toEqual(null); - expect(node.name).toEqual('chainlink_bsc_ocr'); + const urls = "https://ip6:82/metrics,https://ip7:82/metrics"; + expect(node.nodePrometheusUrls).toEqual(urls); - const urls = 'https://ip6:82/metrics,https://ip7:82/metrics'; - expect(node.nodePrometheusUrls).toEqual(urls); + expect(node.monitorPrometheus).toEqual(true); + expect(node.monitorNode).toEqual(false); - expect(node.monitorPrometheus).toEqual(true); - expect(node.monitorNode).toEqual(false); + const evm_urls = "http://ip8:1234,http://ip9:1234,http://ip10:1234"; + expect(node.evmNodesUrls).toEqual(evm_urls); - const evm_urls = 'http://ip8:1234,http://ip9:1234,http://ip10:1234'; - expect(node.evmNodesUrls).toEqual(evm_urls); + const weiwatcher_url = `https://weiwatchers.com/feeds-bsc-mainnet.json`; + expect(node.weiwatchersUrl).toEqual(weiwatcher_url); - const weiwatcher_url = `https://weiwatchers.com/feeds-bsc-mainnet.json`; - expect(node.weiwatchersUrl).toEqual(weiwatcher_url); + expect(node.monitorContracts).toEqual(true); - expect(node.monitorContracts).toEqual(true); + const governance_addresses = "x1,x2,x3"; + expect(node.governanceAddresses).toEqual(governance_addresses); + }); - const governance_addresses = 'x1,x2,x3'; - expect(node.governanceAddresses).toEqual(governance_addresses); - }); + test("should check System configured", async () => { + ConfigMockService.mock(); - test("should check System configured", async () => { - ConfigMockService.mock(); + const id = "62675e2d8891cd77b87f5b16"; + const config = (await ConfigModel.findOne({ _id: id })) as Config; + const system = config.systems[0]; - const id = '62675e2d8891cd77b87f5b16'; - const config = await ConfigModel.findOne({_id: id}) as Config; - const system = config.systems[0]; + expect(system.id.toString()).toEqual("62676606c72149c8dfa37c4e"); + expect(system.status).toEqual(true); - expect(system.id.toString()).toEqual('62676606c72149c8dfa37c4e'); - expect(system.status).toEqual(true); + //1651528966 = 2022-05-02T22:02:46.000Z + const created: number = new Date(system.created).getTime() / 1000; + expect(created).toEqual(1651528966); - //1651528966 = 2022-05-02T22:02:46.000Z - const created: number = new Date(system.created).getTime() / 1000; - expect(created).toEqual(1651528966); + expect(system.modified).toEqual(null); + expect(system.name).toEqual("system_chainlink_ocr_2"); - expect(system.modified).toEqual(null); - expect(system.name).toEqual('system_chainlink_ocr_2'); + const url = "http://ip12:7200/metrics"; + expect(system.exporterUrl).toEqual(url); - const url = 'http://ip12:7200/metrics'; - expect(system.exporterUrl).toEqual(url); - - expect(system.monitor).toEqual(true); - }); + expect(system.monitor).toEqual(true); + }); }); -describe('config endpoints family', () => { - test("should check could_not_save_data error when put config", async () => { - ConfigMockService.mockUpdate(); - ConfigMockService.saveError(); +describe("config endpoints family", () => { + test("should check could_not_save_data error when put config", async () => { + ConfigMockService.mockUpdate(); + ConfigMockService.saveError(); - const newConfig = {...config}; - delete newConfig.subChain; - const endpoint = '/v1/configs/62675e2d8331c477b85f5b15'; - const res = await request(app).put(endpoint).send(newConfig); + const newConfig = { ...config }; + delete newConfig.subChain; + const endpoint = "/v1/configs/62675e2d8331c477b85f5b15"; + const res = await request(app).put(endpoint).send(newConfig); - expect(res.statusCode).toEqual(Status.E_536); - expect(res.body.status).toEqual(Status.E_536); - }); + expect(res.statusCode).toEqual(Status.E_536); + expect(res.body.status).toEqual(Status.E_536); + }); - test("should check put config", async () => { - ConfigMockService.mockUpdate(); - ConfigMockService.save(); + test("should check put config", async () => { + ConfigMockService.mockUpdate(); + ConfigMockService.save(); - const newConfig = {...config}; - delete newConfig.subChain; - const endpoint = '/v1/configs/62675e2d8331c477b85f5b15'; - const res = await request(app).put(endpoint).send(newConfig); + const newConfig = { ...config }; + delete newConfig.subChain; + const endpoint = "/v1/configs/62675e2d8331c477b85f5b15"; + const res = await request(app).put(endpoint).send(newConfig); - expect(res.statusCode).toEqual(Status.SUCCESS); - expect(res.body.status).toEqual(Status.SUCCESS); - }); + expect(res.statusCode).toEqual(Status.SUCCESS); + expect(res.body.status).toEqual(Status.SUCCESS); + }); - test("should check not found when put config", async () => { - ConfigMockService.save(); + test("should check not found when put config", async () => { + ConfigMockService.save(); - const endpoint = '/v1/configs/62675e2d8331c477b85f5b15'; - const res = await request(app).put(endpoint).send(config); + const endpoint = "/v1/configs/62675e2d8331c477b85f5b15"; + const res = await request(app).put(endpoint).send(config); - expect(res.statusCode).toEqual(Status.NOT_FOUND); - expect(res.body.status).toEqual(Status.NOT_FOUND); - }); + expect(res.statusCode).toEqual(Status.NOT_FOUND); + expect(res.body.status).toEqual(Status.NOT_FOUND); + }); - test("should check post config", async () => { - ConfigMockService.mockPost(); - ConfigMockService.save(); + test("should check post config", async () => { + ConfigMockService.mockPost(); + ConfigMockService.save(); - const endpoint = '/v1/configs'; - const res = await request(app).post(endpoint).send({ - baseChain: {id: new ObjectID()}, - subChain: {name: 'test'} - }); + const endpoint = "/v1/configs"; + const res = await request(app) + .post(endpoint) + .send({ + baseChain: { id: new ObjectId() }, + subChain: { name: "test" }, + }); - expect(res.statusCode).toEqual(Status.SUCCESS); - expect(res.body.status).toEqual(Status.SUCCESS); - }); + expect(res.statusCode).toEqual(Status.SUCCESS); + expect(res.body.status).toEqual(Status.SUCCESS); + }); - test("should check could_not_save_data error when post config", async () => { - ConfigMockService.mockPost(); - ConfigMockService.saveError(); + test("should check could_not_save_data error when post config", async () => { + ConfigMockService.mockPost(); + ConfigMockService.saveError(); - const endpoint = '/v1/configs'; - const res = await request(app).post(endpoint).send({ - baseChain: {id: new ObjectID()}, - subChain: {name: 'test'} - }); + const endpoint = "/v1/configs"; + const res = await request(app) + .post(endpoint) + .send({ + baseChain: { id: new ObjectId() }, + subChain: { name: "test" }, + }); - expect(res.statusCode).toEqual(Status.E_536); - expect(res.body.status).toEqual(Status.E_536); - }); + expect(res.statusCode).toEqual(Status.E_536); + expect(res.body.status).toEqual(Status.E_536); + }); - test("should check duplicate sub chain on post config", async () => { - ConfigMockService.mock(); - ConfigMockService.save(); + test("should check duplicate sub chain on post config", async () => { + ConfigMockService.mock(); + ConfigMockService.save(); - const endpoint = '/v1/configs'; - const res = await request(app).post(endpoint).send(config); + const endpoint = "/v1/configs"; + const res = await request(app).post(endpoint).send(config); - expect(res.statusCode).toEqual(Status.CONFLICT); - expect(res.body.status).toEqual(Status.CONFLICT); - }); + expect(res.statusCode).toEqual(Status.CONFLICT); + expect(res.body.status).toEqual(Status.CONFLICT); + }); - test("should check remove config", async () => { - ConfigMockService.remove(); + test("should check remove config", async () => { + ConfigMockService.remove(); - const endpoint = '/v1/configs/62675e2d8331c477b85f5b15'; - const res = await request(app).delete(endpoint); + const endpoint = "/v1/configs/62675e2d8331c477b85f5b15"; + const res = await request(app).delete(endpoint); - expect(res.statusCode).toEqual(Status.NO_CONTENT); - expect(res.body).toEqual({}); - }); + expect(res.statusCode).toEqual(Status.NO_CONTENT); + expect(res.body).toEqual({}); + }); - test("should check remove config and not found", async () => { - ConfigMockService.notRemoved(); + test("should check remove config and not found", async () => { + ConfigMockService.notRemoved(); - const endpoint = '/v1/configs/62675e2d8331c477b85f5b15'; - const res = await request(app).delete(endpoint); + const endpoint = "/v1/configs/62675e2d8331c477b85f5b15"; + const res = await request(app).delete(endpoint); - console.log(res.body); + console.log(res.body); - expect(res.statusCode).toEqual(Status.NOT_FOUND); - expect(res.body.status).toEqual(Status.NOT_FOUND); - }); + expect(res.statusCode).toEqual(Status.NOT_FOUND); + expect(res.body.status).toEqual(Status.NOT_FOUND); + }); - test("should check remove config and Server Error", async () => { - ConfigMockService.mockError(); + test("should check remove config and Server Error", async () => { + ConfigMockService.mockError(); - const endpoint = '/v1/configs/62675e2d8331c477b85f5b15'; - const res = await request(app).delete(endpoint); + const endpoint = "/v1/configs/62675e2d8331c477b85f5b15"; + const res = await request(app).delete(endpoint); - expect(res.statusCode).toEqual(Status.E_536); - expect(res.body.status).toEqual(Status.E_536); - }); + expect(res.statusCode).toEqual(Status.E_536); + expect(res.body.status).toEqual(Status.E_536); + }); - test("should check Config item by findOne and Server Error", async () => { - ConfigMockService.mockError(); + test("should check Config item by findOne and Server Error", async () => { + ConfigMockService.mockError(); - const endpoint = '/v1/configs/62675e2d8331c477b85f5b15'; - const res = await request(app).get(endpoint); + const endpoint = "/v1/configs/62675e2d8331c477b85f5b15"; + const res = await request(app).get(endpoint); - expect(res.statusCode).toEqual(Status.E_536); - expect(res.body.status).toEqual(Status.E_536); - }); + expect(res.statusCode).toEqual(Status.E_536); + expect(res.body.status).toEqual(Status.E_536); + }); - test("should check Config item by findOne and NotFoundWarning", async () => { - ConfigMockService.notFound(); + test("should check Config item by findOne and NotFoundWarning", async () => { + ConfigMockService.notFound(); - const endpoint = '/v1/configs/62675e2d8331c477b85f5b15'; - const res = await request(app).get(endpoint); + const endpoint = "/v1/configs/62675e2d8331c477b85f5b15"; + const res = await request(app).get(endpoint); - expect(Status.NOT_FOUND).toEqual(res.statusCode); - }); + expect(Status.NOT_FOUND).toEqual(res.statusCode); + }); - test("should check Config list and Server Error", async () => { - ConfigMockService.mockError(); + test("should check Config list and Server Error", async () => { + ConfigMockService.mockError(); - const endpoint = '/v1/configs'; - const res = await request(app).get(endpoint); + const endpoint = "/v1/configs"; + const res = await request(app).get(endpoint); - expect(res.statusCode).toEqual(Status.E_536); - expect(res.body.status).toEqual(Status.E_536); - }); + expect(res.statusCode).toEqual(Status.E_536); + expect(res.body.status).toEqual(Status.E_536); + }); - test("should check Config list", async () => { - ConfigMockService.mock(); + test("should check Config list", async () => { + ConfigMockService.mock(); - const endpoint = '/v1/configs'; - const res = await request(app).get(endpoint); + const endpoint = "/v1/configs"; + const res = await request(app).get(endpoint); - const configs = res.body.result; + const configs = res.body.result; - expect(res.statusCode).toEqual(Status.SUCCESS); - expect(res.body.status).toEqual(Status.SUCCESS); - expect(configs.length).toEqual(1); - }); + expect(res.statusCode).toEqual(Status.SUCCESS); + expect(res.body.status).toEqual(Status.SUCCESS); + expect(configs.length).toEqual(1); + }); }); diff --git a/api/tests/v1/config/odm.mock.service.ts b/api/tests/v1/config/odm.mock.service.ts index 93666bf3..af112a24 100644 --- a/api/tests/v1/config/odm.mock.service.ts +++ b/api/tests/v1/config/odm.mock.service.ts @@ -1,384 +1,387 @@ -import {ObjectID} from "mongodb"; -import {ObjectUtil} from "../../../src/util/ObjectUtil"; -import {BaseChainModel, BaseChainSchema} from "../../../src/v1/entity/model/BaseChainModel"; -import {ConfigModel} from "../../../src/v1/entity/model/ConfigModel"; -import {GenericModel, GenericSchema} from "../../../src/v1/entity/model/GenericModel"; +import { ObjectId } from "mongodb"; +import { ObjectUtil } from "../../../src/util/ObjectUtil"; import { - SeverityAlertSubconfigModel, - SeverityAlertSubconfigSchema + BaseChainModel, + BaseChainSchema, +} from "../../../src/v1/entity/model/BaseChainModel"; +import { ConfigModel } from "../../../src/v1/entity/model/ConfigModel"; +import { + GenericModel, + GenericSchema, +} from "../../../src/v1/entity/model/GenericModel"; +import { + SeverityAlertSubconfigModel, + SeverityAlertSubconfigSchema, } from "../../../src/v1/entity/model/SeverityAlertSubconfigSchema"; -import {basechain} from "../basechain/odm.mock.service"; -import {RepositorySubconfigSchema} from "../../../src/v1/entity/model/RepositorySubconfigSchema"; -import {Generic} from "../../../../entities/ts/Generic"; +import { basechain } from "../basechain/odm.mock.service"; +import { RepositorySubconfigSchema } from "../../../src/v1/entity/model/RepositorySubconfigSchema"; +import { Generic } from "../../../../entities/ts/Generic"; -const mockingoose = require('mockingoose'); +const mockingoose = require("mockingoose"); const node2 = ObjectUtil.snakeToCamel({ - "_id": "626765f4d9d14938006c0523", - "status": true, - "name": "chainlink_bsc_ocr_2", - "node_prometheus_urls": "https://ip1:82/metrics,https://ip2:82/metrics", - "monitor_prometheus": true, - "monitor_node": true, - "evm_nodes_urls": "http://ip3:1234,http://ip4:1234,http://ip5:1234", - "weiwatchers_url": "https://weiwatchers.com/feeds-bsc-mainnet.json", - "monitor_contracts": true, - "governance_addresses": "x1,x2,x3" -}) + _id: "626765f4d9d14938006c0523", + status: true, + name: "chainlink_bsc_ocr_2", + node_prometheus_urls: "https://ip1:82/metrics,https://ip2:82/metrics", + monitor_prometheus: true, + monitor_node: true, + evm_nodes_urls: "http://ip3:1234,http://ip4:1234,http://ip5:1234", + weiwatchers_url: "https://weiwatchers.com/feeds-bsc-mainnet.json", + monitor_contracts: true, + governance_addresses: "x1,x2,x3", +}); const node1 = ObjectUtil.snakeToCamel({ - "_id": "626765f4d9d14938006c3250", - "status": true, - "created": "2022-05-02T22:02:46.000Z", - "modified": null, - "name": "chainlink_bsc_ocr", - "node_prometheus_urls": "https://ip6:82/metrics,https://ip7:82/metrics", - "monitor_prometheus": true, - "monitor_node": false, - "evm_nodes_urls": "http://ip8:1234,http://ip9:1234,http://ip10:1234", - "weiwatchers_url": "https://weiwatchers.com/feeds-bsc-mainnet.json", - "monitor_contracts": true, - "governance_addresses": "x1,x2,x3" + _id: "626765f4d9d14938006c3250", + status: true, + created: "2022-05-02T22:02:46.000Z", + modified: null, + name: "chainlink_bsc_ocr", + node_prometheus_urls: "https://ip6:82/metrics,https://ip7:82/metrics", + monitor_prometheus: true, + monitor_node: false, + evm_nodes_urls: "http://ip8:1234,http://ip9:1234,http://ip10:1234", + weiwatchers_url: "https://weiwatchers.com/feeds-bsc-mainnet.json", + monitor_contracts: true, + governance_addresses: "x1,x2,x3", }); const severityType = { - "_id": "6265d08efdb17d641746dcf0", - "status": true, - "created": "2022-05-02T22:02:46.000Z", - "modified": null, - "name": "Warning", - "value": null, - "description": null, - "group": "severity_type" + _id: "6265d08efdb17d641746dcf0", + status: true, + created: "2022-05-02T22:02:46.000Z", + modified: null, + name: "Warning", + value: null, + description: null, + group: "severity_type", }; const severityAlert = { - "_id": "6269def62b7e18add54e96f1", - "created": "2022-05-02T22:02:46.000Z", - "modified": null, - "status": true, - "name": "Slashed", - "value": "slashed", - "description": "Raised when your validator has been slashed.", - "group": "severity_alert", - "type": severityType + _id: "6269def62b7e18add54e96f1", + created: "2022-05-02T22:02:46.000Z", + modified: null, + status: true, + name: "Slashed", + value: "slashed", + description: "Raised when your validator has been slashed.", + group: "severity_alert", + type: severityType, }; const thresholdAlert = { - "_id": "6269def62b7e18add54e96e9", - "status": true, - "created": "2022-05-02T22:02:46.000Z", - "modified": null, - "name": "Cannot access validator", - "value": "cannot_access_validator", - "description": "Raised when a validator is unaccessible.", - "group": "threshold_alert", - "critical": { - "enabled": true, - "repeat_enabled": true, - "threshold": 120, - "repeat": 300 - }, - "warning": { - "enabled": true, - "threshold": 0 - } + _id: "6269def62b7e18add54e96e9", + status: true, + created: "2022-05-02T22:02:46.000Z", + modified: null, + name: "Cannot access validator", + value: "cannot_access_validator", + description: "Raised when a validator is unaccessible.", + group: "threshold_alert", + critical: { + enabled: true, + repeat_enabled: true, + threshold: 120, + repeat: 300, + }, + warning: { + enabled: true, + threshold: 0, + }, }; const timeWindowAlert = { - "_id": "6269def62b7e18add54e96e9", - "status": true, - "created": "2022-05-02T22:02:46.000Z", - "modified": null, - "name": "Cannot access validator", - "value": "cannot_access_validator", - "description": "Raised when a validator is unaccessible.", - "group": "threshold_alert", - "critical": { - "enabled": true, - "repeat_enabled": true, - "threshold": 120, - "repeat": 300, - "time_window": 120 - }, - "warning": { - "enabled": true, - "threshold": 0, - "time_window": 120 - } + _id: "6269def62b7e18add54e96e9", + status: true, + created: "2022-05-02T22:02:46.000Z", + modified: null, + name: "Cannot access validator", + value: "cannot_access_validator", + description: "Raised when a validator is unaccessible.", + group: "threshold_alert", + critical: { + enabled: true, + repeat_enabled: true, + threshold: 120, + repeat: 300, + time_window: 120, + }, + warning: { + enabled: true, + threshold: 0, + time_window: 120, + }, }; const channelType = { - "_id": "62656ebafdb17d641746dcda", - "created": "2022-04-01T00:00:00Z", - "modified": null, - "status": true, - "name": "Telegram", - "value": 'telegram', - "description": null, - "group": "channel_type" + _id: "62656ebafdb17d641746dcda", + created: "2022-04-01T00:00:00Z", + modified: null, + status: true, + name: "Telegram", + value: "telegram", + description: null, + group: "channel_type", }; const repositoryType = { - "_id": "6269d55af66c0c5b67d125a6", - "created": "2022-05-02T22:02:46.000Z", - "modified": null, - "status": true, - "name": "Github Repo", - "value": 'git', - "description": null, - "group": "repository_type" + _id: "6269d55af66c0c5b67d125a6", + created: "2022-05-02T22:02:46.000Z", + modified: null, + status: true, + name: "Github Repo", + value: "git", + description: null, + group: "repository_type", }; const configType = { - "_id": "6265758cfdb17d641746dce4", - "created": "2022-05-02T22:02:46.000Z", - "modified": null, - "status": true, - "name": "Sub Chain", - "value": null, - "description": null, - "group": "config_type" + _id: "6265758cfdb17d641746dce4", + created: "2022-05-02T22:02:46.000Z", + modified: null, + status: true, + name: "Sub Chain", + value: null, + description: null, + group: "config_type", }; const sourceType = { - "_id": "6271178cf740ef3847a7d27e", - "status": true, - "created": "2022-04-01T00:00:00.000Z", - "modified": null, - "name": "Contract", - "value": "contract", - "description": null, - "group": 'source_type' + _id: "6271178cf740ef3847a7d27e", + status: true, + created: "2022-04-01T00:00:00.000Z", + modified: null, + name: "Contract", + value: "contract", + description: null, + group: "source_type", }; export const config = { - "_id": "62675e2d8891cd77b87f5b16", - "config_type": "6265758cfdb17d641746dce4", - "status": true, - "created": "2022-05-02T22:02:46.000Z", - "modified": "2022-05-02T23:02:46.000Z", - "baseChain": { - "_id": "6265cefcfdb17d641746dced", - "created": "2022-05-03T22:02:46.000Z", - "modified": "2022-05-06T23:02:46.000Z", - "status": true, - "name": "Chainlink", - "sources": [sourceType] - }, - "subChain": { - "_id": "626aeb43980cf43ee7bbf683", - "status": true, - "created": "2022-05-02T22:02:46.000Z", - "modified": null, - "name": "binance" + _id: "62675e2d8891cd77b87f5b16", + config_type: "6265758cfdb17d641746dce4", + status: true, + created: "2022-05-02T22:02:46.000Z", + modified: "2022-05-02T23:02:46.000Z", + baseChain: { + _id: "6265cefcfdb17d641746dced", + created: "2022-05-03T22:02:46.000Z", + modified: "2022-05-06T23:02:46.000Z", + status: true, + name: "Chainlink", + sources: [sourceType], + }, + subChain: { + _id: "626aeb43980cf43ee7bbf683", + status: true, + created: "2022-05-02T22:02:46.000Z", + modified: null, + name: "binance", + }, + contract: { + _id: "6267661fbe66cae642f57fb7", + status: false, + created: "2022-05-02T22:02:46.000Z", + modified: null, + name: "Weiwatcher", + url: "https://weiwatchers.com/feeds-bsc-mainnet.json", + monitor: true, + }, + threshold_alerts: [thresholdAlert], + severity_alerts: [severityAlert], + time_window_alerts: [timeWindowAlert], + repositories: [ + { + _id: "6269d75a5a34daa2cc7744e0", + status: true, + created: "2022-05-02T22:02:46.000Z", + modified: null, + name: "Tendermint", + value: "tendermint/tendermint/", + namespace: null, + type: repositoryType, + monitor: true, }, - "contract": { - "_id": "6267661fbe66cae642f57fb7", - "status": false, - "created": "2022-05-02T22:02:46.000Z", - "modified": null, - "name": "Weiwatcher", - "url": "https://weiwatchers.com/feeds-bsc-mainnet.json", - "monitor": true + { + _id: "6269d81f3f76d2c6ababbafe", + status: true, + created: "2022-05-02T22:02:46.000Z", + modified: null, + name: "tendermint", + value: "tendermit", + namespace: "tendermint", + type: { + _id: "6269d568d509ee91767dbfd6", + status: true, + created: "2022-05-02T22:02:46.000Z", + modified: null, + name: "Dockerhub Repo", + value: null, + description: null, + }, + monitor: true, }, - "threshold_alerts": [ - thresholdAlert, - ], - "severity_alerts": [ - severityAlert - ], - "time_window_alerts": [ - timeWindowAlert, - ], - "repositories": [ - { - "_id": "6269d75a5a34daa2cc7744e0", - "status": true, - "created": "2022-05-02T22:02:46.000Z", - "modified": null, - "name": "Tendermint", - "value": "tendermint/tendermint/", - "namespace": null, - "type": repositoryType, - "monitor": true - }, - { - "_id": "6269d81f3f76d2c6ababbafe", - "status": true, - "created": "2022-05-02T22:02:46.000Z", - "modified": null, - "name": "tendermint", - "value": "tendermit", - "namespace": "tendermint", - "type": { - "_id": "6269d568d509ee91767dbfd6", - "status": true, - "created": "2022-05-02T22:02:46.000Z", - "modified": null, - "name": "Dockerhub Repo", - "value": null, - "description": null - }, - "monitor": true - } - ], - "evm_nodes": [ - ObjectUtil.snakeToCamel({ - "_id": "6267683dc0e201f1edbbdc39", - "status": false, - "created": "2022-05-02T22:02:46.000Z", - "modified": null, - "name": "bsc_139", - "node_http_url": "http://ip11:1234", - "monitor": true - }) - ], - "nodes": [node1, node2], - "systems": [ - ObjectUtil.snakeToCamel({ - "_id": "62676606c72149c8dfa37c4e", - "status": true, - "created": "2022-05-02T22:02:46.000Z", - "modified": null, - "name": "system_chainlink_ocr_2", - "exporter_url": "http://ip12:7200/metrics", - "monitor": true - }) - ] + ], + evm_nodes: [ + ObjectUtil.snakeToCamel({ + _id: "6267683dc0e201f1edbbdc39", + status: false, + created: "2022-05-02T22:02:46.000Z", + modified: null, + name: "bsc_139", + node_http_url: "http://ip11:1234", + monitor: true, + }), + ], + nodes: [node1, node2], + systems: [ + ObjectUtil.snakeToCamel({ + _id: "62676606c72149c8dfa37c4e", + status: true, + created: "2022-05-02T22:02:46.000Z", + modified: null, + name: "system_chainlink_ocr_2", + exporter_url: "http://ip12:7200/metrics", + monitor: true, + }), + ], }; /** * Service to mock Configuration Document */ export class ConfigMockService { - public static mock(): void { - - //to mock populate of mongoose - ConfigModel.schema.path('baseChain', BaseChainSchema); - BaseChainModel.schema.path('sources', [GenericSchema]); - - SeverityAlertSubconfigSchema.path('status', Boolean); - SeverityAlertSubconfigSchema.path('type', Object); - RepositorySubconfigSchema.path('type', Object); - - //mock findone - mockingoose(ConfigModel).toReturn(config, 'findOne'); - mockingoose(ConfigModel).toReturn(config); - mockingoose(ConfigModel).toReturn([config], 'find'); - } - - public static mockUpdate(): void { - //to mock validators result - mockingoose(GenericModel).toReturn(1, 'countDocuments'); - mockingoose(BaseChainModel).toReturn(1, 'countDocuments'); - - //to avoid not found error - mockingoose(ConfigModel).toReturn(config, 'findOne'); - - //to avoid duplicate error - mockingoose(ConfigModel).toReturn([], 'find'); - } - - public static mockPost(): void { - //to mock validators result - mockingoose(BaseChainModel).toReturn(1, 'countDocuments'); - mockingoose(GenericModel).toReturn(1, 'countDocuments'); - - //to avoid duplicate error - mockingoose(ConfigModel).toReturn([], 'find'); - } - - public static count(total: number): void { - mockingoose(ConfigModel).toReturn(total, 'countDocuments'); - mockingoose(ConfigModel).toReturn(total, 'count'); - } - - public static mockError(): void { - mockingoose(ConfigModel).toReturn(new Error('erro'), 'findOne'); - mockingoose(ConfigModel).toReturn(new Error('erro'), 'find'); - } - - public static notFound(): void { - mockingoose(ConfigModel).toReturn(null, 'findOne'); - } - - public static remove(): void { - mockingoose(GenericModel).toReturn(1, 'countDocuments'); - mockingoose(BaseChainModel).toReturn(1, 'countDocuments'); - mockingoose(ConfigModel).toReturn(config, 'findOne'); - - mockingoose(ConfigModel).toReturn({deletedCount: 1}, 'deleteOne'); - } - - public static notRemoved(): void { - mockingoose(GenericModel).toReturn(1, 'countDocuments'); - mockingoose(BaseChainModel).toReturn(1, 'countDocuments'); - mockingoose(ConfigModel).toReturn(config, 'findOne'); - - mockingoose(ConfigModel).toReturn({deletedCount: 0}, 'deleteOne'); - - } - - public static save(): void { - const obj = Object.assign( - JSON.parse(JSON.stringify(config)), //to avoid change mock reference - {baseChain: new ObjectID().toHexString()} - ); - mockingoose(ConfigModel).toReturn(obj, 'save'); - mockingoose(BaseChainModel).toReturn(basechain, 'findOne'); - } - - public static saveError(): void { - mockingoose(ConfigModel).toReturn(new Error(), 'save'); - mockingoose(BaseChainModel).toReturn(basechain, 'findOne'); - } + public static mock(): void { + //to mock populate of mongoose + ConfigModel.schema.path("baseChain", BaseChainSchema); + BaseChainModel.schema.path("sources", [GenericSchema]); + + SeverityAlertSubconfigSchema.path("status", Boolean); + SeverityAlertSubconfigSchema.path("type", Object); + RepositorySubconfigSchema.path("type", Object); + + //mock findone + mockingoose(ConfigModel).toReturn(config, "findOne"); + mockingoose(ConfigModel).toReturn(config); + mockingoose(ConfigModel).toReturn([config], "find"); + } + + public static mockUpdate(): void { + //to mock validators result + mockingoose(GenericModel).toReturn(1, "countDocuments"); + mockingoose(BaseChainModel).toReturn(1, "countDocuments"); + + //to avoid not found error + mockingoose(ConfigModel).toReturn(config, "findOne"); + + //to avoid duplicate error + mockingoose(ConfigModel).toReturn([], "find"); + } + + public static mockPost(): void { + //to mock validators result + mockingoose(BaseChainModel).toReturn(1, "countDocuments"); + mockingoose(GenericModel).toReturn(1, "countDocuments"); + + //to avoid duplicate error + mockingoose(ConfigModel).toReturn([], "find"); + } + + public static count(total: number): void { + mockingoose(ConfigModel).toReturn(total, "countDocuments"); + mockingoose(ConfigModel).toReturn(total, "count"); + } + + public static mockError(): void { + mockingoose(ConfigModel).toReturn(new Error("erro"), "findOne"); + mockingoose(ConfigModel).toReturn(new Error("erro"), "find"); + } + + public static notFound(): void { + mockingoose(ConfigModel).toReturn(null, "findOne"); + } + + public static remove(): void { + mockingoose(GenericModel).toReturn(1, "countDocuments"); + mockingoose(BaseChainModel).toReturn(1, "countDocuments"); + mockingoose(ConfigModel).toReturn(config, "findOne"); + + mockingoose(ConfigModel).toReturn({ deletedCount: 1 }, "deleteOne"); + } + + public static notRemoved(): void { + mockingoose(GenericModel).toReturn(1, "countDocuments"); + mockingoose(BaseChainModel).toReturn(1, "countDocuments"); + mockingoose(ConfigModel).toReturn(config, "findOne"); + + mockingoose(ConfigModel).toReturn({ deletedCount: 0 }, "deleteOne"); + } + + public static save(): void { + const obj = Object.assign( + JSON.parse(JSON.stringify(config)), //to avoid change mock reference + { baseChain: new ObjectId().toHexString() } + ); + mockingoose(ConfigModel).toReturn(obj, "save"); + mockingoose(BaseChainModel).toReturn(basechain, "findOne"); + } + + public static saveError(): void { + mockingoose(ConfigModel).toReturn(new Error(), "save"); + mockingoose(BaseChainModel).toReturn(basechain, "findOne"); + } } /** * Service to mock Generic Domain Types */ export class GenericMockService { - public static mockChannelType(): void { - mockingoose(GenericModel).toReturn([channelType], 'find'); - } + public static mockChannelType(): void { + mockingoose(GenericModel).toReturn([channelType], "find"); + } - public static mockThresholdAlert(): void { - mockingoose(GenericModel).toReturn([thresholdAlert], 'find'); - } + public static mockThresholdAlert(): void { + mockingoose(GenericModel).toReturn([thresholdAlert], "find"); + } - public static mockSeverityAlert(): void { - mockingoose(SeverityAlertSubconfigModel).toReturn([severityAlert], 'find'); - } + public static mockSeverityAlert(): void { + mockingoose(SeverityAlertSubconfigModel).toReturn([severityAlert], "find"); + } - public static mockTimeWindowAlert(): void { - mockingoose(GenericModel).toReturn([timeWindowAlert], 'find'); - } + public static mockTimeWindowAlert(): void { + mockingoose(GenericModel).toReturn([timeWindowAlert], "find"); + } - public static mockSeverityType(): void { - mockingoose(GenericModel).toReturn([severityType], 'find'); - } + public static mockSeverityType(): void { + mockingoose(GenericModel).toReturn([severityType], "find"); + } - public static mockConfigType(): void { - mockingoose(GenericModel).toReturn([configType], 'find'); - } + public static mockConfigType(): void { + mockingoose(GenericModel).toReturn([configType], "find"); + } - public static mockSourceType(): void { - mockingoose(GenericModel).toReturn([sourceType], 'find'); - } + public static mockSourceType(): void { + mockingoose(GenericModel).toReturn([sourceType], "find"); + } - public static mockRepositoryType(): void { - mockingoose(GenericModel).toReturn([repositoryType], 'find'); - } + public static mockRepositoryType(): void { + mockingoose(GenericModel).toReturn([repositoryType], "find"); + } } - /** * Service to mock Errors for Generic Domain */ export class GenericErrorMockService { - public static mockFind(): void { - mockingoose(GenericModel).toReturn(new Error("Error to find a document!"), 'find'); - mockingoose(SeverityAlertSubconfigModel).toReturn(new Error("Error to find a document!"), 'find'); - } + public static mockFind(): void { + mockingoose(GenericModel).toReturn( + new Error("Error to find a document!"), + "find" + ); + mockingoose(SeverityAlertSubconfigModel).toReturn( + new Error("Error to find a document!"), + "find" + ); + } } diff --git a/api/tests/v1/util/util.odm.test.ts b/api/tests/v1/util/util.odm.test.ts index cb6c71ee..a1e9182a 100644 --- a/api/tests/v1/util/util.odm.test.ts +++ b/api/tests/v1/util/util.odm.test.ts @@ -1,84 +1,85 @@ -import { ObjectID } from "mongodb"; +import { ObjectId } from "mongodb"; import { MongooseUtil } from "../../../src/util/MongooseUtil"; import { FullData, fullData, simpleObject } from "./odm.mock.service"; -describe('Test MongooseUtil Class', () => { - test("Should check merge function", async () => { +describe("Test MongooseUtil Class", () => { + test("Should check merge function", async () => { + const oldData = JSON.parse(JSON.stringify(fullData)); - const oldData = JSON.parse(JSON.stringify(fullData)); + const id = new ObjectId(); - const id = new ObjectID(); + const newData: FullData = { + id: id.toString(), + name: "new name", + simpleObject: { + value6: true, + }, + complexObject: { + simpleObject: { + value4: 8, + }, + scalarList: [1, 4, 6, 9, 10], + mixedScalarList: [1, false, 1], + simpleList: [ + { + id: "1", + value4: 5, + }, + { + value4: 6, + }, + ], + customList: [], + }, + }; - const newData: FullData = { - id: id.toString(), - name: 'new name', - simpleObject: { - value6: true - }, - complexObject: { - simpleObject: { - value4: 8 - }, - scalarList: [1, 4, 6, 9, 10], - mixedScalarList: [1, false, 1], - simpleList: [{ - id: "1", - value4: 5 - }, { - value4: 6 - }], - customList: [] - } - }; + const result: FullData = MongooseUtil.merge(oldData, newData); - const result: FullData = MongooseUtil.merge(oldData, newData); + const simpleItem1 = { + ...simpleObject, + value4: 5, + }; + delete simpleItem1.id; - const simpleItem1 = { - ...simpleObject, - value4: 5 - }; + const expectedResult = { + ...oldData, + _id: id.toString(), + name: "new name", + simpleObject: { + ...simpleObject, + value6: true, + }, + complexObject: { + ...oldData.complexObject, + simpleObject: { ...simpleObject, value4: 8 }, + scalarList: [1, 4, 6, 9, 10], + mixedScalarList: [1, false, 1], + simpleList: [simpleItem1, { value4: 6 }], + customList: [], + }, + }; - delete simpleItem1.id; + expect(result.id).toEqual(expectedResult.id); + expect(result.name).toEqual(expectedResult.name); + expect(result.simpleObject).toEqual(expectedResult.simpleObject); - const expectedResult = { - ...oldData, - _id: id.toString(), - name: 'new name', - simpleObject: { - ...simpleObject, - value6: true - }, - complexObject: { - ...oldData.complexObject, - simpleObject: { ...simpleObject, value4: 8 }, - scalarList: [1, 4, 6, 9, 10], - mixedScalarList: [1, false, 1], - simpleList: [simpleItem1, { value4: 6 }], - customList: [] - } - } + expect(result.complexObject.simpleObject).toEqual( + expectedResult.complexObject.simpleObject + ); + expect(result.complexObject.scalarList).toEqual( + expectedResult.complexObject.scalarList + ); + expect(result.complexObject.mixedScalarList).toEqual( + expectedResult.complexObject.mixedScalarList + ); - expect(result.id).toEqual(expectedResult.id); - expect(result.name).toEqual(expectedResult.name); - expect(result.simpleObject).toEqual(expectedResult.simpleObject); + expectedResult.complexObject.simpleList[0]._id = + result.complexObject.simpleList[0]["_id"]; + expect(result.complexObject.simpleList).toEqual( + expectedResult.complexObject.simpleList + ); - expect(result.complexObject.simpleObject).toEqual( - expectedResult.complexObject.simpleObject - ); - expect(result.complexObject.scalarList).toEqual( - expectedResult.complexObject.scalarList - ); - expect(result.complexObject.mixedScalarList).toEqual( - expectedResult.complexObject.mixedScalarList - ); - - expectedResult.complexObject.simpleList[0]._id = result.complexObject.simpleList[0]['_id']; - expect(result.complexObject.simpleList).toEqual( - expectedResult.complexObject.simpleList - ); - - expect(JSON.stringify(result)).toEqual(JSON.stringify(expectedResult)); - - }); -}); \ No newline at end of file + expect(JSON.stringify(result)).toEqual(JSON.stringify(expectedResult)); + }); +}); From 235007e7e0c670588b089030224054c5c1e552f2 Mon Sep 17 00:00:00 2001 From: Roman Perera Date: Sat, 19 Apr 2025 14:40:39 +0200 Subject: [PATCH 8/9] updated containers node versions, mongodb & mongoose to latest, REACTIVATED substrate api --- api/Dockerfile | 6 +- api/package-lock.json | 581 +-- api/package.json | 5 +- api/run_server.sh | 2 +- api/src/server.ts | 6 +- api/src/server/mongo.ts | 92 +- api/src/util/MongooseUtil.ts | 403 +- api/src/v1/builder/BaseChainModelBuilder.ts | 117 +- api/src/v1/builder/ConfigModelBuilder.ts | 259 +- api/src/v1/builder/ConfigOldModelBuilder.ts | 70 +- api/src/v1/builder/GenericModelBuilder.ts | 91 +- .../SeverityAlertSubconfigModelBuilder.ts | 107 +- .../ThresholdAlertSubconfigModelBuilder.ts | 113 +- .../TimeWindowAlertSubconfigModelBuilder.ts | 117 +- .../v1/builder/channels/EmailModelBuilder.ts | 201 +- .../builder/channels/EmailOldModelBuilder.ts | 58 +- .../builder/channels/OpsgenieModelBuilder.ts | 167 +- .../channels/OpsgenieOldModelBuilder.ts | 58 +- .../builder/channels/PagerDutyModelBuilder.ts | 161 +- .../channels/PagerDutyOldModelBuilder.ts | 58 +- .../v1/builder/channels/SlackModelBuilder.ts | 197 +- .../builder/channels/SlackOldModelBuilder.ts | 58 +- .../builder/channels/TelegramModelBuilder.ts | 187 +- .../channels/TelegramOldModelBuilder.ts | 58 +- .../v1/builder/channels/TwilioModelBuilder.ts | 165 +- .../builder/channels/TwilioOldModelBuilder.ts | 58 +- .../entity/repository/AbstractRepository.ts | 167 +- .../entity/repository/BaseChainRepository.ts | 57 +- .../v1/entity/repository/ChannelRepository.ts | 674 +-- .../v1/entity/repository/ConfigRepository.ts | 131 +- .../v1/entity/repository/GenericRepository.ts | 103 +- api/src/v1/rest/channel/ChannelResource.ts | 1244 +++--- api/src/v1/rest/config/ConfigResource.ts | 2 +- api/tests/server.test.ts | 3704 ++++++++++------- docker-compose.yml | 17 +- substrate-api/Dockerfile | 7 +- substrate-api/run_server.sh | 2 +- ui/Dockerfile | 10 +- web-installer/Dockerfile | 6 +- 39 files changed, 5055 insertions(+), 4464 deletions(-) diff --git a/api/Dockerfile b/api/Dockerfile index f7266ddf..edaab4b9 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -1,4 +1,6 @@ -FROM node:14 +FROM node:20-alpine + +RUN apk add bash # Create app directory WORKDIR /opt/panic @@ -8,7 +10,7 @@ COPY ./entities ./entities # Change directory, and copy all installer contents from the host to the # container. -WORKDIR ./api +WORKDIR /opt/panic/api COPY ./api ./ # RUN npm install (ci) diff --git a/api/package-lock.json b/api/package-lock.json index 24c14465..a6e6c542 100644 --- a/api/package-lock.json +++ b/api/package-lock.json @@ -17,8 +17,8 @@ "dotenv": "^10.0.0", "express": "^4.17.1", "mockingoose": "^2.15.2", - "mongodb": "^3.6.9", - "mongoose": "^6.4.6", + "mongodb": "^6.15.0", + "mongoose": "^8.13.2", "nodemailer": "^6.7.5", "opsgenie-sdk": "^0.5.1", "redis": "^3.1.2", @@ -33,7 +33,6 @@ "@types/express": "^4.17.12", "@types/jest": "^27.0.2", "@types/mongodb": "^3.6.17", - "@types/mongoose": "^5.11.97", "@types/node": "^15.12.2", "@types/redis": "^2.8.29", "@types/supertest": "^2.0.11", @@ -1379,6 +1378,15 @@ "@jridgewell/sourcemap-codec": "^1.4.10" } }, + "node_modules/@mongodb-js/saslprep": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/@mongodb-js/saslprep/-/saslprep-1.2.2.tgz", + "integrity": "sha512-EB0O3SCSNRUFk66iRCpI+cXzIjdswfCs7F6nOC3RAGJ7xr5YhaicvsRwJ9eyzYvYRlCSDUO/c7g4yNulxKC1WA==", + "license": "MIT", + "dependencies": { + "sparse-bitfield": "^3.0.3" + } + }, "node_modules/@pagerduty/pdjs": { "version": "2.2.4", "resolved": "https://registry.npmjs.org/@pagerduty/pdjs/-/pdjs-2.2.4.tgz", @@ -1725,16 +1733,6 @@ "@types/node": "*" } }, - "node_modules/@types/mongoose": { - "version": "5.11.97", - "resolved": "https://registry.npmjs.org/@types/mongoose/-/mongoose-5.11.97.tgz", - "integrity": "sha512-cqwOVYT3qXyLiGw7ueU2kX9noE8DPGRY6z8eUxudhXY8NZ7DMKYAxyZkLSevGfhCX3dO/AoX5/SO9lAzfjon0Q==", - "deprecated": "Mongoose publishes its own types, so you do not need to install this package.", - "dev": true, - "dependencies": { - "mongoose": "*" - } - }, "node_modules/@types/node": { "version": "15.14.9", "resolved": "https://registry.npmjs.org/@types/node/-/node-15.14.9.tgz", @@ -1832,16 +1830,17 @@ } }, "node_modules/@types/webidl-conversions": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/@types/webidl-conversions/-/webidl-conversions-6.1.1.tgz", - "integrity": "sha512-XAahCdThVuCFDQLT7R7Pk/vqeObFNL3YqRyFZg+AqAP/W1/w3xHaIxuW7WszQqTbIBOPRcItYJIou3i/mppu3Q==" + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/@types/webidl-conversions/-/webidl-conversions-7.0.3.tgz", + "integrity": "sha512-CiJJvcRtIgzadHCYXw7dqEnMNRjhGZlYK05Mj9OyktqV8uVT8fD2BFOB7S1uwBE3Kj2Z+4UyPmFw/Ixgw/LAlA==", + "license": "MIT" }, "node_modules/@types/whatwg-url": { - "version": "8.2.2", - "resolved": "https://registry.npmjs.org/@types/whatwg-url/-/whatwg-url-8.2.2.tgz", - "integrity": "sha512-FtQu10RWgn3D9U4aazdwIE2yzphmTJREDqNdODHrbrZmmMqI0vMheC/6NE/J1Yveaj8H+ela+YwWTjq5PGmuhA==", + "version": "11.0.5", + "resolved": "https://registry.npmjs.org/@types/whatwg-url/-/whatwg-url-11.0.5.tgz", + "integrity": "sha512-coYR071JRaHa+xoEvvYqvnIHaVqaYrLPbsufM9BF63HkwI5Lgmy2QR8Q5K/lYDYo5AK82wOvSOS0UsLTpTG7uQ==", + "license": "MIT", "dependencies": { - "@types/node": "*", "@types/webidl-conversions": "*" } }, @@ -2274,15 +2273,6 @@ "node": "*" } }, - "node_modules/bl": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/bl/-/bl-2.2.1.tgz", - "integrity": "sha512-6Pesp1w0DEX1N550i/uGV/TqucVL4AM/pgThFSN/Qq9si1/DF9aIHs1BxD8V/QU0HoeHO6cQRTAuYnLPKq1e4g==", - "dependencies": { - "readable-stream": "^2.3.5", - "safe-buffer": "^5.1.1" - } - }, "node_modules/blakejs": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/blakejs/-/blakejs-1.2.1.tgz", @@ -2509,14 +2499,12 @@ } }, "node_modules/bson": { - "version": "4.6.5", - "resolved": "https://registry.npmjs.org/bson/-/bson-4.6.5.tgz", - "integrity": "sha512-uqrgcjyOaZsHfz7ea8zLRCLe1u+QGUSzMZmvXqO24CDW7DWoW1qiN9folSwa7hSneTSgM2ykDIzF5kcQQ8cwNw==", - "dependencies": { - "buffer": "^5.6.0" - }, + "version": "6.10.3", + "resolved": "https://registry.npmjs.org/bson/-/bson-6.10.3.tgz", + "integrity": "sha512-MTxGsqgYTwfshYWTRdmZRC+M7FnG1b4y7RO7p2k3X24Wq0yv1m77Wsj0BzlPzd/IowgESfsruQCUToa7vbOpPQ==", + "license": "Apache-2.0", "engines": { - "node": ">=6.9.0" + "node": ">=16.20.1" } }, "node_modules/buffer": { @@ -2930,11 +2918,6 @@ "integrity": "sha512-JxbCBUdrfr6AQjOXrxoTvAMJO4HBTUIlBzslcJPAz+/KT8yk53fXun51u+RenNYvad/+Vc2DIz5o9UxlCDymFQ==", "dev": true }, - "node_modules/core-util-is": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", - "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==" - }, "node_modules/cors": { "version": "2.8.5", "resolved": "https://registry.npmjs.org/cors/-/cors-2.8.5.tgz", @@ -4631,11 +4614,6 @@ "node": ">= 0.4" } }, - "node_modules/ip": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ip/-/ip-2.0.0.tgz", - "integrity": "sha512-WKa+XuLG1A1R0UWhl2+1XQSi+fZWMsYKffMZTTYsiZaUD8k2yDAj5atimTUD2TZkyCkNEeYE5NhFZmupOGtjYQ==" - }, "node_modules/ipaddr.js": { "version": "1.9.1", "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", @@ -4939,11 +4917,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/isarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" - }, "node_modules/isexe": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", @@ -5899,9 +5872,13 @@ } }, "node_modules/kareem": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/kareem/-/kareem-2.4.1.tgz", - "integrity": "sha512-aJ9opVoXroQUPfovYP5kaj2lM7Jn02Gw13bL0lg9v0V7SaUc0qavPs0Eue7d2DcC3NjqI6QAUElXNsuZSeM+EA==" + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/kareem/-/kareem-2.6.3.tgz", + "integrity": "sha512-C3iHfuGUXK2u8/ipq9LfjFfXFxAZMQJJq7vLS45r3D9Y2xQ/m4S8zaR4zMLFWh9AsNPXmcFfUDhTEO8UIC/V6Q==", + "license": "Apache-2.0", + "engines": { + "node": ">=12.0.0" + } }, "node_modules/keccak": { "version": "3.0.2", @@ -6114,8 +6091,7 @@ "node_modules/memory-pager": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/memory-pager/-/memory-pager-1.5.0.tgz", - "integrity": "sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg==", - "optional": true + "integrity": "sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg==" }, "node_modules/merge-descriptors": { "version": "1.0.1", @@ -6300,27 +6276,35 @@ } }, "node_modules/mongodb": { - "version": "3.7.3", - "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-3.7.3.tgz", - "integrity": "sha512-Psm+g3/wHXhjBEktkxXsFMZvd3nemI0r3IPsE0bU+4//PnvNWKkzhZcEsbPcYiWqe8XqXJJEg4Tgtr7Raw67Yw==", - "dependencies": { - "bl": "^2.2.1", - "bson": "^1.1.4", - "denque": "^1.4.1", - "optional-require": "^1.1.8", - "safe-buffer": "^5.1.2" + "version": "6.15.0", + "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-6.15.0.tgz", + "integrity": "sha512-ifBhQ0rRzHDzqp9jAQP6OwHSH7dbYIQjD3SbJs9YYk9AikKEettW/9s/tbSFDTpXcRbF+u1aLrhHxDFaYtZpFQ==", + "license": "Apache-2.0", + "dependencies": { + "@mongodb-js/saslprep": "^1.1.9", + "bson": "^6.10.3", + "mongodb-connection-string-url": "^3.0.0" }, "engines": { - "node": ">=4" + "node": ">=16.20.1" }, - "optionalDependencies": { - "saslprep": "^1.0.0" + "peerDependencies": { + "@aws-sdk/credential-providers": "^3.188.0", + "@mongodb-js/zstd": "^1.1.0 || ^2.0.0", + "gcp-metadata": "^5.2.0", + "kerberos": "^2.0.1", + "mongodb-client-encryption": ">=6.0.0 <7", + "snappy": "^7.2.2", + "socks": "^2.7.1" }, "peerDependenciesMeta": { - "aws4": { + "@aws-sdk/credential-providers": { "optional": true }, - "bson-ext": { + "@mongodb-js/zstd": { + "optional": true + }, + "gcp-metadata": { "optional": true }, "kerberos": { @@ -6329,108 +6313,80 @@ "mongodb-client-encryption": { "optional": true }, - "mongodb-extjson": { + "snappy": { "optional": true }, - "snappy": { + "socks": { "optional": true } } }, "node_modules/mongodb-connection-string-url": { - "version": "2.5.3", - "resolved": "https://registry.npmjs.org/mongodb-connection-string-url/-/mongodb-connection-string-url-2.5.3.tgz", - "integrity": "sha512-f+/WsED+xF4B74l3k9V/XkTVj5/fxFH2o5ToKXd8Iyi5UhM+sO9u0Ape17Mvl/GkZaFtM0HQnzAG5OTmhKw+tQ==", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/mongodb-connection-string-url/-/mongodb-connection-string-url-3.0.2.tgz", + "integrity": "sha512-rMO7CGo/9BFwyZABcKAWL8UJwH/Kc2x0g72uhDWzG48URRax5TCIcJ7Rc3RZqffZzO/Gwff/jyKwCU9TN8gehA==", + "license": "Apache-2.0", "dependencies": { - "@types/whatwg-url": "^8.2.1", - "whatwg-url": "^11.0.0" + "@types/whatwg-url": "^11.0.2", + "whatwg-url": "^14.1.0 || ^13.0.0" } }, "node_modules/mongodb-connection-string-url/node_modules/tr46": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-3.0.0.tgz", - "integrity": "sha512-l7FvfAHlcmulp8kr+flpQZmVwtu7nfRV7NZujtN0OqES8EL4O4e0qqzL0DC5gAvx/ZC/9lk6rhcUwYvkBnBnYA==", + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-5.1.1.tgz", + "integrity": "sha512-hdF5ZgjTqgAntKkklYw0R03MG2x/bSzTtkxmIRw/sTNV8YXsCJ1tfLAX23lhxhHJlEf3CRCOCGGWw3vI3GaSPw==", + "license": "MIT", "dependencies": { - "punycode": "^2.1.1" + "punycode": "^2.3.1" }, "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/mongodb-connection-string-url/node_modules/webidl-conversions": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz", "integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==", + "license": "BSD-2-Clause", "engines": { "node": ">=12" } }, "node_modules/mongodb-connection-string-url/node_modules/whatwg-url": { - "version": "11.0.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-11.0.0.tgz", - "integrity": "sha512-RKT8HExMpoYx4igMiVMY83lN6UeITKJlBQ+vR/8ZJ8OCdSiN3RwCq+9gH0+Xzj0+5IrM6i4j/6LuvzbZIQgEcQ==", + "version": "14.2.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-14.2.0.tgz", + "integrity": "sha512-De72GdQZzNTUBBChsXueQUnPKDkg/5A5zp7pFDuQAj5UFoENpiACU0wlCvzpAGnTkj++ihpKwKyYewn/XNUbKw==", + "license": "MIT", "dependencies": { - "tr46": "^3.0.0", + "tr46": "^5.1.0", "webidl-conversions": "^7.0.0" }, "engines": { - "node": ">=12" - } - }, - "node_modules/mongodb/node_modules/bson": { - "version": "1.1.6", - "resolved": "https://registry.npmjs.org/bson/-/bson-1.1.6.tgz", - "integrity": "sha512-EvVNVeGo4tHxwi8L6bPj3y3itEvStdwvvlojVxxbyYfoaxJ6keLgrTuKdyfEAszFK+H3olzBuafE0yoh0D1gdg==", - "engines": { - "node": ">=0.6.19" + "node": ">=18" } }, "node_modules/mongoose": { - "version": "6.5.2", - "resolved": "https://registry.npmjs.org/mongoose/-/mongoose-6.5.2.tgz", - "integrity": "sha512-3CFDrSLtK2qjM1pZeZpLTUyqPRkc11Iuh74ZrwS4IwEJ3K2PqGnmyPLw7ex4Kzu37ujIMp3MAuiBlUjfrcb6hw==", + "version": "8.13.2", + "resolved": "https://registry.npmjs.org/mongoose/-/mongoose-8.13.2.tgz", + "integrity": "sha512-riCBqZmNkYBWjXpM3qWLDQw7QmTKsVZDPhLXFJqC87+OjocEVpvS3dA2BPPUiLAu+m0/QmEj5pSXKhH+/DgerQ==", + "license": "MIT", "dependencies": { - "bson": "^4.6.5", - "kareem": "2.4.1", - "mongodb": "4.8.1", + "bson": "^6.10.3", + "kareem": "2.6.3", + "mongodb": "~6.15.0", "mpath": "0.9.0", - "mquery": "4.0.3", + "mquery": "5.0.0", "ms": "2.1.3", - "sift": "16.0.0" + "sift": "17.1.3" }, "engines": { - "node": ">=12.0.0" + "node": ">=16.20.1" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/mongoose" } }, - "node_modules/mongoose/node_modules/denque": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/denque/-/denque-2.1.0.tgz", - "integrity": "sha512-HVQE3AAb/pxF8fQAoiqpvg9i3evqug3hoiwakOyZAwJm+6vZehbkYXZ0l4JxS+I3QxM97v5aaRNhj8v5oBhekw==", - "engines": { - "node": ">=0.10" - } - }, - "node_modules/mongoose/node_modules/mongodb": { - "version": "4.8.1", - "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-4.8.1.tgz", - "integrity": "sha512-/NyiM3Ox9AwP5zrfT9TXjRKDJbXlLaUDQ9Rg//2lbg8D2A8GXV0VidYYnA/gfdK6uwbnL4FnAflH7FbGw3TS7w==", - "dependencies": { - "bson": "^4.6.5", - "denque": "^2.0.1", - "mongodb-connection-string-url": "^2.5.2", - "socks": "^2.6.2" - }, - "engines": { - "node": ">=12.9.0" - }, - "optionalDependencies": { - "saslprep": "^1.0.3" - } - }, "node_modules/mongoose/node_modules/ms": { "version": "2.1.3", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", @@ -6445,22 +6401,24 @@ } }, "node_modules/mquery": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/mquery/-/mquery-4.0.3.tgz", - "integrity": "sha512-J5heI+P08I6VJ2Ky3+33IpCdAvlYGTSUjwTPxkAr8i8EoduPMBX2OY/wa3IKZIQl7MU4SbFk8ndgSKyB/cl1zA==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/mquery/-/mquery-5.0.0.tgz", + "integrity": "sha512-iQMncpmEK8R8ncT8HJGsGc9Dsp8xcgYMVSbs5jgnm1lFHTZqMJTUWTDx1LBO8+mK3tPNZWFLBghQEIOULSTHZg==", + "license": "MIT", "dependencies": { "debug": "4.x" }, "engines": { - "node": ">=12.0.0" + "node": ">=14.0.0" } }, "node_modules/mquery/node_modules/debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", + "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", + "license": "MIT", "dependencies": { - "ms": "2.1.2" + "ms": "^2.1.3" }, "engines": { "node": ">=6.0" @@ -6472,9 +6430,10 @@ } }, "node_modules/mquery/node_modules/ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "license": "MIT" }, "node_modules/ms": { "version": "2.0.0", @@ -6775,17 +6734,6 @@ "node": ">= v0.6.0" } }, - "node_modules/optional-require": { - "version": "1.1.8", - "resolved": "https://registry.npmjs.org/optional-require/-/optional-require-1.1.8.tgz", - "integrity": "sha512-jq83qaUb0wNg9Krv1c5OQ+58EK+vHde6aBPzLvPPqJm89UQWsvSuFy9X/OSNJnFeSOKo7btE0n8Nl2+nE+z5nA==", - "dependencies": { - "require-at": "^1.0.6" - }, - "engines": { - "node": ">=4" - } - }, "node_modules/optionator": { "version": "0.8.3", "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz", @@ -7100,11 +7048,6 @@ "node": ">= 0.6.0" } }, - "node_modules/process-nextick-args": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", - "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==" - }, "node_modules/prompts": { "version": "2.4.2", "resolved": "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz", @@ -7158,9 +7101,10 @@ } }, "node_modules/punycode": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", - "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "license": "MIT", "engines": { "node": ">=6" } @@ -7263,25 +7207,6 @@ "integrity": "sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==", "dev": true }, - "node_modules/readable-stream": { - "version": "2.3.7", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", - "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", - "dependencies": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "node_modules/readable-stream/node_modules/safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" - }, "node_modules/redis": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/redis/-/redis-3.1.2.tgz", @@ -7416,14 +7341,6 @@ "request": "2.*.*" } }, - "node_modules/require-at": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/require-at/-/require-at-1.0.6.tgz", - "integrity": "sha512-7i1auJbMUrXEAZCOQ0VNJgmcT2VOKPRl2YGJwgpHpC9CE91Mv4/4UYIUm4chGJaI381ZDq1JUicFii64Hapd8g==", - "engines": { - "node": ">=4" - } - }, "node_modules/require-directory": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", @@ -7586,18 +7503,6 @@ "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" }, - "node_modules/saslprep": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/saslprep/-/saslprep-1.0.3.tgz", - "integrity": "sha512-/MY/PEMbk2SuY5sScONwhUDsV2p77Znkb/q3nSVstq/yQzYJOH/Azh29p9oJLsl3LnQwSvZDKagDGBsBwSooag==", - "optional": true, - "dependencies": { - "sparse-bitfield": "^3.0.3" - }, - "engines": { - "node": ">=6" - } - }, "node_modules/saxes": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/saxes/-/saxes-5.0.1.tgz", @@ -7757,9 +7662,10 @@ } }, "node_modules/sift": { - "version": "16.0.0", - "resolved": "https://registry.npmjs.org/sift/-/sift-16.0.0.tgz", - "integrity": "sha512-ILTjdP2Mv9V1kIxWMXeMTIRbOBrqKc4JAXmFMnFq3fKeyQ2Qwa3Dw1ubcye3vR+Y6ofA0b9gNDr/y2t6eUeIzQ==" + "version": "17.1.3", + "resolved": "https://registry.npmjs.org/sift/-/sift-17.1.3.tgz", + "integrity": "sha512-Rtlj66/b0ICeFzYTuNvX/EF1igRbbnGSvEyT79McoZa/DeGhMyC5pWKOEsZKnpkqtSeovd5FL/bjHWC3CIIvCQ==", + "license": "MIT" }, "node_modules/signal-exit": { "version": "3.0.7", @@ -7822,28 +7728,6 @@ "node": ">=8" } }, - "node_modules/smart-buffer": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.2.0.tgz", - "integrity": "sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==", - "engines": { - "node": ">= 6.0.0", - "npm": ">= 3.0.0" - } - }, - "node_modules/socks": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/socks/-/socks-2.7.0.tgz", - "integrity": "sha512-scnOe9y4VuiNUULJN72GrM26BNOjVsfPXI+j+98PkyEfsIXroa5ofyjT+FzGvn/xHs73U2JtoBYAVx9Hl4quSA==", - "dependencies": { - "ip": "^2.0.0", - "smart-buffer": "^4.2.0" - }, - "engines": { - "node": ">= 10.13.0", - "npm": ">= 3.0.0" - } - }, "node_modules/source-map": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", @@ -7867,7 +7751,6 @@ "version": "3.0.3", "resolved": "https://registry.npmjs.org/sparse-bitfield/-/sparse-bitfield-3.0.3.tgz", "integrity": "sha512-kvzhi7vqKTfkh0PZU+2D2PIllw2ymqJKujUcyPMd9Y75Nv4nPbGJZXNhxsgdQab2BmlDct1YnfQCguEvHr7VsQ==", - "optional": true, "dependencies": { "memory-pager": "^1.0.2" } @@ -10551,6 +10434,14 @@ "@jridgewell/sourcemap-codec": "^1.4.10" } }, + "@mongodb-js/saslprep": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/@mongodb-js/saslprep/-/saslprep-1.2.2.tgz", + "integrity": "sha512-EB0O3SCSNRUFk66iRCpI+cXzIjdswfCs7F6nOC3RAGJ7xr5YhaicvsRwJ9eyzYvYRlCSDUO/c7g4yNulxKC1WA==", + "requires": { + "sparse-bitfield": "^3.0.3" + } + }, "@pagerduty/pdjs": { "version": "2.2.4", "resolved": "https://registry.npmjs.org/@pagerduty/pdjs/-/pdjs-2.2.4.tgz", @@ -10869,15 +10760,6 @@ "@types/node": "*" } }, - "@types/mongoose": { - "version": "5.11.97", - "resolved": "https://registry.npmjs.org/@types/mongoose/-/mongoose-5.11.97.tgz", - "integrity": "sha512-cqwOVYT3qXyLiGw7ueU2kX9noE8DPGRY6z8eUxudhXY8NZ7DMKYAxyZkLSevGfhCX3dO/AoX5/SO9lAzfjon0Q==", - "dev": true, - "requires": { - "mongoose": "*" - } - }, "@types/node": { "version": "15.14.9", "resolved": "https://registry.npmjs.org/@types/node/-/node-15.14.9.tgz", @@ -10975,16 +10857,15 @@ } }, "@types/webidl-conversions": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/@types/webidl-conversions/-/webidl-conversions-6.1.1.tgz", - "integrity": "sha512-XAahCdThVuCFDQLT7R7Pk/vqeObFNL3YqRyFZg+AqAP/W1/w3xHaIxuW7WszQqTbIBOPRcItYJIou3i/mppu3Q==" + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/@types/webidl-conversions/-/webidl-conversions-7.0.3.tgz", + "integrity": "sha512-CiJJvcRtIgzadHCYXw7dqEnMNRjhGZlYK05Mj9OyktqV8uVT8fD2BFOB7S1uwBE3Kj2Z+4UyPmFw/Ixgw/LAlA==" }, "@types/whatwg-url": { - "version": "8.2.2", - "resolved": "https://registry.npmjs.org/@types/whatwg-url/-/whatwg-url-8.2.2.tgz", - "integrity": "sha512-FtQu10RWgn3D9U4aazdwIE2yzphmTJREDqNdODHrbrZmmMqI0vMheC/6NE/J1Yveaj8H+ela+YwWTjq5PGmuhA==", + "version": "11.0.5", + "resolved": "https://registry.npmjs.org/@types/whatwg-url/-/whatwg-url-11.0.5.tgz", + "integrity": "sha512-coYR071JRaHa+xoEvvYqvnIHaVqaYrLPbsufM9BF63HkwI5Lgmy2QR8Q5K/lYDYo5AK82wOvSOS0UsLTpTG7uQ==", "requires": { - "@types/node": "*", "@types/webidl-conversions": "*" } }, @@ -11319,15 +11200,6 @@ "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.1.0.tgz", "integrity": "sha512-4LwHK4nfDOraBCtst+wOWIHbu1vhvAPJK8g8nROd4iuc3PSEjWif/qwbkh8jwCJz6yDBvtU4KPynETgrfh7y3A==" }, - "bl": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/bl/-/bl-2.2.1.tgz", - "integrity": "sha512-6Pesp1w0DEX1N550i/uGV/TqucVL4AM/pgThFSN/Qq9si1/DF9aIHs1BxD8V/QU0HoeHO6cQRTAuYnLPKq1e4g==", - "requires": { - "readable-stream": "^2.3.5", - "safe-buffer": "^5.1.1" - } - }, "blakejs": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/blakejs/-/blakejs-1.2.1.tgz", @@ -11529,12 +11401,9 @@ } }, "bson": { - "version": "4.6.5", - "resolved": "https://registry.npmjs.org/bson/-/bson-4.6.5.tgz", - "integrity": "sha512-uqrgcjyOaZsHfz7ea8zLRCLe1u+QGUSzMZmvXqO24CDW7DWoW1qiN9folSwa7hSneTSgM2ykDIzF5kcQQ8cwNw==", - "requires": { - "buffer": "^5.6.0" - } + "version": "6.10.3", + "resolved": "https://registry.npmjs.org/bson/-/bson-6.10.3.tgz", + "integrity": "sha512-MTxGsqgYTwfshYWTRdmZRC+M7FnG1b4y7RO7p2k3X24Wq0yv1m77Wsj0BzlPzd/IowgESfsruQCUToa7vbOpPQ==" }, "buffer": { "version": "5.7.1", @@ -11855,11 +11724,6 @@ "integrity": "sha512-JxbCBUdrfr6AQjOXrxoTvAMJO4HBTUIlBzslcJPAz+/KT8yk53fXun51u+RenNYvad/+Vc2DIz5o9UxlCDymFQ==", "dev": true }, - "core-util-is": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", - "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==" - }, "cors": { "version": "2.8.5", "resolved": "https://registry.npmjs.org/cors/-/cors-2.8.5.tgz", @@ -13193,11 +13057,6 @@ "side-channel": "^1.0.4" } }, - "ip": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ip/-/ip-2.0.0.tgz", - "integrity": "sha512-WKa+XuLG1A1R0UWhl2+1XQSi+fZWMsYKffMZTTYsiZaUD8k2yDAj5atimTUD2TZkyCkNEeYE5NhFZmupOGtjYQ==" - }, "ipaddr.js": { "version": "1.9.1", "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", @@ -13395,11 +13254,6 @@ "call-bind": "^1.0.2" } }, - "isarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" - }, "isexe": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", @@ -14158,9 +14012,9 @@ } }, "kareem": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/kareem/-/kareem-2.4.1.tgz", - "integrity": "sha512-aJ9opVoXroQUPfovYP5kaj2lM7Jn02Gw13bL0lg9v0V7SaUc0qavPs0Eue7d2DcC3NjqI6QAUElXNsuZSeM+EA==" + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/kareem/-/kareem-2.6.3.tgz", + "integrity": "sha512-C3iHfuGUXK2u8/ipq9LfjFfXFxAZMQJJq7vLS45r3D9Y2xQ/m4S8zaR4zMLFWh9AsNPXmcFfUDhTEO8UIC/V6Q==" }, "keccak": { "version": "3.0.2", @@ -14340,8 +14194,7 @@ "memory-pager": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/memory-pager/-/memory-pager-1.5.0.tgz", - "integrity": "sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg==", - "optional": true + "integrity": "sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg==" }, "merge-descriptors": { "version": "1.0.1", @@ -14481,40 +14334,30 @@ "requires": {} }, "mongodb": { - "version": "3.7.3", - "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-3.7.3.tgz", - "integrity": "sha512-Psm+g3/wHXhjBEktkxXsFMZvd3nemI0r3IPsE0bU+4//PnvNWKkzhZcEsbPcYiWqe8XqXJJEg4Tgtr7Raw67Yw==", - "requires": { - "bl": "^2.2.1", - "bson": "^1.1.4", - "denque": "^1.4.1", - "optional-require": "^1.1.8", - "safe-buffer": "^5.1.2", - "saslprep": "^1.0.0" - }, - "dependencies": { - "bson": { - "version": "1.1.6", - "resolved": "https://registry.npmjs.org/bson/-/bson-1.1.6.tgz", - "integrity": "sha512-EvVNVeGo4tHxwi8L6bPj3y3itEvStdwvvlojVxxbyYfoaxJ6keLgrTuKdyfEAszFK+H3olzBuafE0yoh0D1gdg==" - } + "version": "6.15.0", + "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-6.15.0.tgz", + "integrity": "sha512-ifBhQ0rRzHDzqp9jAQP6OwHSH7dbYIQjD3SbJs9YYk9AikKEettW/9s/tbSFDTpXcRbF+u1aLrhHxDFaYtZpFQ==", + "requires": { + "@mongodb-js/saslprep": "^1.1.9", + "bson": "^6.10.3", + "mongodb-connection-string-url": "^3.0.0" } }, "mongodb-connection-string-url": { - "version": "2.5.3", - "resolved": "https://registry.npmjs.org/mongodb-connection-string-url/-/mongodb-connection-string-url-2.5.3.tgz", - "integrity": "sha512-f+/WsED+xF4B74l3k9V/XkTVj5/fxFH2o5ToKXd8Iyi5UhM+sO9u0Ape17Mvl/GkZaFtM0HQnzAG5OTmhKw+tQ==", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/mongodb-connection-string-url/-/mongodb-connection-string-url-3.0.2.tgz", + "integrity": "sha512-rMO7CGo/9BFwyZABcKAWL8UJwH/Kc2x0g72uhDWzG48URRax5TCIcJ7Rc3RZqffZzO/Gwff/jyKwCU9TN8gehA==", "requires": { - "@types/whatwg-url": "^8.2.1", - "whatwg-url": "^11.0.0" + "@types/whatwg-url": "^11.0.2", + "whatwg-url": "^14.1.0 || ^13.0.0" }, "dependencies": { "tr46": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-3.0.0.tgz", - "integrity": "sha512-l7FvfAHlcmulp8kr+flpQZmVwtu7nfRV7NZujtN0OqES8EL4O4e0qqzL0DC5gAvx/ZC/9lk6rhcUwYvkBnBnYA==", + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-5.1.1.tgz", + "integrity": "sha512-hdF5ZgjTqgAntKkklYw0R03MG2x/bSzTtkxmIRw/sTNV8YXsCJ1tfLAX23lhxhHJlEf3CRCOCGGWw3vI3GaSPw==", "requires": { - "punycode": "^2.1.1" + "punycode": "^2.3.1" } }, "webidl-conversions": { @@ -14523,47 +14366,30 @@ "integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==" }, "whatwg-url": { - "version": "11.0.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-11.0.0.tgz", - "integrity": "sha512-RKT8HExMpoYx4igMiVMY83lN6UeITKJlBQ+vR/8ZJ8OCdSiN3RwCq+9gH0+Xzj0+5IrM6i4j/6LuvzbZIQgEcQ==", + "version": "14.2.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-14.2.0.tgz", + "integrity": "sha512-De72GdQZzNTUBBChsXueQUnPKDkg/5A5zp7pFDuQAj5UFoENpiACU0wlCvzpAGnTkj++ihpKwKyYewn/XNUbKw==", "requires": { - "tr46": "^3.0.0", + "tr46": "^5.1.0", "webidl-conversions": "^7.0.0" } } } }, "mongoose": { - "version": "6.5.2", - "resolved": "https://registry.npmjs.org/mongoose/-/mongoose-6.5.2.tgz", - "integrity": "sha512-3CFDrSLtK2qjM1pZeZpLTUyqPRkc11Iuh74ZrwS4IwEJ3K2PqGnmyPLw7ex4Kzu37ujIMp3MAuiBlUjfrcb6hw==", + "version": "8.13.2", + "resolved": "https://registry.npmjs.org/mongoose/-/mongoose-8.13.2.tgz", + "integrity": "sha512-riCBqZmNkYBWjXpM3qWLDQw7QmTKsVZDPhLXFJqC87+OjocEVpvS3dA2BPPUiLAu+m0/QmEj5pSXKhH+/DgerQ==", "requires": { - "bson": "^4.6.5", - "kareem": "2.4.1", - "mongodb": "4.8.1", + "bson": "^6.10.3", + "kareem": "2.6.3", + "mongodb": "~6.15.0", "mpath": "0.9.0", - "mquery": "4.0.3", + "mquery": "5.0.0", "ms": "2.1.3", - "sift": "16.0.0" + "sift": "17.1.3" }, "dependencies": { - "denque": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/denque/-/denque-2.1.0.tgz", - "integrity": "sha512-HVQE3AAb/pxF8fQAoiqpvg9i3evqug3hoiwakOyZAwJm+6vZehbkYXZ0l4JxS+I3QxM97v5aaRNhj8v5oBhekw==" - }, - "mongodb": { - "version": "4.8.1", - "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-4.8.1.tgz", - "integrity": "sha512-/NyiM3Ox9AwP5zrfT9TXjRKDJbXlLaUDQ9Rg//2lbg8D2A8GXV0VidYYnA/gfdK6uwbnL4FnAflH7FbGw3TS7w==", - "requires": { - "bson": "^4.6.5", - "denque": "^2.0.1", - "mongodb-connection-string-url": "^2.5.2", - "saslprep": "^1.0.3", - "socks": "^2.6.2" - } - }, "ms": { "version": "2.1.3", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", @@ -14577,25 +14403,25 @@ "integrity": "sha512-ikJRQTk8hw5DEoFVxHG1Gn9T/xcjtdnOKIU1JTmGjZZlg9LST2mBLmcX3/ICIbgJydT2GOc15RnNy5mHmzfSew==" }, "mquery": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/mquery/-/mquery-4.0.3.tgz", - "integrity": "sha512-J5heI+P08I6VJ2Ky3+33IpCdAvlYGTSUjwTPxkAr8i8EoduPMBX2OY/wa3IKZIQl7MU4SbFk8ndgSKyB/cl1zA==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/mquery/-/mquery-5.0.0.tgz", + "integrity": "sha512-iQMncpmEK8R8ncT8HJGsGc9Dsp8xcgYMVSbs5jgnm1lFHTZqMJTUWTDx1LBO8+mK3tPNZWFLBghQEIOULSTHZg==", "requires": { "debug": "4.x" }, "dependencies": { "debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", + "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", "requires": { - "ms": "2.1.2" + "ms": "^2.1.3" } }, "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" } } }, @@ -14833,14 +14659,6 @@ "requestretry": "^7.0.0" } }, - "optional-require": { - "version": "1.1.8", - "resolved": "https://registry.npmjs.org/optional-require/-/optional-require-1.1.8.tgz", - "integrity": "sha512-jq83qaUb0wNg9Krv1c5OQ+58EK+vHde6aBPzLvPPqJm89UQWsvSuFy9X/OSNJnFeSOKo7btE0n8Nl2+nE+z5nA==", - "requires": { - "require-at": "^1.0.6" - } - }, "optionator": { "version": "0.8.3", "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz", @@ -15075,11 +14893,6 @@ "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", "integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==" }, - "process-nextick-args": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", - "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==" - }, "prompts": { "version": "2.4.2", "resolved": "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz", @@ -15127,9 +14940,9 @@ } }, "punycode": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", - "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==" + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==" }, "q": { "version": "2.0.3", @@ -15208,27 +15021,6 @@ "integrity": "sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==", "dev": true }, - "readable-stream": { - "version": "2.3.7", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", - "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", - "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - }, - "dependencies": { - "safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" - } - } - }, "redis": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/redis/-/redis-3.1.2.tgz", @@ -15330,11 +15122,6 @@ "lodash": "^4.17.15" } }, - "require-at": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/require-at/-/require-at-1.0.6.tgz", - "integrity": "sha512-7i1auJbMUrXEAZCOQ0VNJgmcT2VOKPRl2YGJwgpHpC9CE91Mv4/4UYIUm4chGJaI381ZDq1JUicFii64Hapd8g==" - }, "require-directory": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", @@ -15451,15 +15238,6 @@ "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" }, - "saslprep": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/saslprep/-/saslprep-1.0.3.tgz", - "integrity": "sha512-/MY/PEMbk2SuY5sScONwhUDsV2p77Znkb/q3nSVstq/yQzYJOH/Azh29p9oJLsl3LnQwSvZDKagDGBsBwSooag==", - "optional": true, - "requires": { - "sparse-bitfield": "^3.0.3" - } - }, "saxes": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/saxes/-/saxes-5.0.1.tgz", @@ -15590,9 +15368,9 @@ } }, "sift": { - "version": "16.0.0", - "resolved": "https://registry.npmjs.org/sift/-/sift-16.0.0.tgz", - "integrity": "sha512-ILTjdP2Mv9V1kIxWMXeMTIRbOBrqKc4JAXmFMnFq3fKeyQ2Qwa3Dw1ubcye3vR+Y6ofA0b9gNDr/y2t6eUeIzQ==" + "version": "17.1.3", + "resolved": "https://registry.npmjs.org/sift/-/sift-17.1.3.tgz", + "integrity": "sha512-Rtlj66/b0ICeFzYTuNvX/EF1igRbbnGSvEyT79McoZa/DeGhMyC5pWKOEsZKnpkqtSeovd5FL/bjHWC3CIIvCQ==" }, "signal-exit": { "version": "3.0.7", @@ -15637,20 +15415,6 @@ "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", "dev": true }, - "smart-buffer": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.2.0.tgz", - "integrity": "sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==" - }, - "socks": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/socks/-/socks-2.7.0.tgz", - "integrity": "sha512-scnOe9y4VuiNUULJN72GrM26BNOjVsfPXI+j+98PkyEfsIXroa5ofyjT+FzGvn/xHs73U2JtoBYAVx9Hl4quSA==", - "requires": { - "ip": "^2.0.0", - "smart-buffer": "^4.2.0" - } - }, "source-map": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", @@ -15671,7 +15435,6 @@ "version": "3.0.3", "resolved": "https://registry.npmjs.org/sparse-bitfield/-/sparse-bitfield-3.0.3.tgz", "integrity": "sha512-kvzhi7vqKTfkh0PZU+2D2PIllw2ymqJKujUcyPMd9Y75Nv4nPbGJZXNhxsgdQab2BmlDct1YnfQCguEvHr7VsQ==", - "optional": true, "requires": { "memory-pager": "^1.0.2" } diff --git a/api/package.json b/api/package.json index cc0b5eb6..c390bcd0 100644 --- a/api/package.json +++ b/api/package.json @@ -15,7 +15,6 @@ "@types/express": "^4.17.12", "@types/jest": "^27.0.2", "@types/mongodb": "^3.6.17", - "@types/mongoose": "^5.11.97", "@types/node": "^15.12.2", "@types/redis": "^2.8.29", "@types/supertest": "^2.0.11", @@ -35,8 +34,8 @@ "dotenv": "^10.0.0", "express": "^4.17.1", "mockingoose": "^2.15.2", - "mongodb": "^3.6.9", - "mongoose": "^6.4.6", + "mongodb": "^6.15.0", + "mongoose": "^8.13.2", "nodemailer": "^6.7.5", "opsgenie-sdk": "^0.5.1", "redis": "^3.1.2", diff --git a/api/run_server.sh b/api/run_server.sh index ad73dd72..68e8b9cb 100755 --- a/api/run_server.sh +++ b/api/run_server.sh @@ -1,2 +1,2 @@ #!/bin/bash -node src/server.js \ No newline at end of file +node --trace-deprecation src/server.js \ No newline at end of file diff --git a/api/src/server.ts b/api/src/server.ts index 19ab8ced..2ecda227 100644 --- a/api/src/server.ts +++ b/api/src/server.ts @@ -212,11 +212,11 @@ const mongoHost = process.env.DB_IP || "localhost"; const mongoPort = parseInt(process.env.DB_PORT || "27017"); const mongoDB = process.env.DB_NAME || "panicdb"; const mongoOptions: MongoClientOptions = { - useNewUrlParser: true, - useUnifiedTopology: true, socketTimeoutMS: 10000, connectTimeoutMS: 10000, serverSelectionTimeoutMS: 5000, + replicaSet: process.env.REPLICA_SET_NAME || "rs1", + readPreference: "primaryPreferred", }; const mongoInterface = new MongoInterface(mongoOptions, mongoHost, mongoPort); @@ -304,7 +304,7 @@ app.post( const query = { _id: { $in: baseChainsInput } }; const docs = await collection.find(query).toArray(); for (const doc of docs) { - const baseChainData: any = result.result[doc._id]; + const baseChainData: any = result.result[doc._id.toString()]; delete doc._id; for (const parentID in doc) { const chain = doc[parentID]; diff --git a/api/src/server/mongo.ts b/api/src/server/mongo.ts index fd04bdf6..89ba8479 100644 --- a/api/src/server/mongo.ts +++ b/api/src/server/mongo.ts @@ -1,58 +1,60 @@ -import MongoClient from "mongodb"; -import {MongoClientNotInitialised} from "../constant/errors"; +import { MongoClient, MongoClientOptions } from "mongodb"; +import { MongoClientNotInitialised } from "../constant/errors"; import { - MSG_MONGO_CONNECTION_ESTABLISHED, - MSG_MONGO_COULD_NOT_DISCONNECT, - MSG_MONGO_COULD_NOT_ESTABLISH_CONNECTION, - MSG_MONGO_DISCONNECTED + MSG_MONGO_CONNECTION_ESTABLISHED, + MSG_MONGO_COULD_NOT_DISCONNECT, + MSG_MONGO_COULD_NOT_ESTABLISH_CONNECTION, + MSG_MONGO_DISCONNECTED, } from "../constant/msg"; -export const MonitorablesCollection = 'monitorables'; +export const MonitorablesCollection = "monitorables"; export class MongoInterface { - private readonly url: string; - private readonly options: MongoClient.MongoClientOptions; - private _client?: MongoClient.MongoClient; + private readonly url: string; + private readonly options: MongoClientOptions; + private _client?: MongoClient; - constructor(options: MongoClient.MongoClientOptions, - host: string = "localhost", port: number = 27017) { - - this.options = options; - this.options.readPreference = 'primaryPreferred'; - this.options.replicaSet = 'replica-set'; + constructor( + options: MongoClientOptions, + host: string = "localhost", + port: number = 27017 + ) { + this.options = options; + this.options.readPreference = "primaryPreferred"; + this.options.replicaSet = "replica-set"; - this.url = `mongodb://rs1:${port},rs2:${port},rs3:${port}`; - this._client = undefined; - } + this.url = `mongodb://rs1:${port},rs2:${port},rs3:${port}`; + this._client = undefined; + } - get client() { - return this._client - } + get client() { + return this._client; + } - async connect() { - if (this._client) { - return; - } - try { - this._client = await MongoClient.connect(this.url, this.options); - console.log(MSG_MONGO_CONNECTION_ESTABLISHED) - } catch (err) { - console.error(MSG_MONGO_COULD_NOT_ESTABLISH_CONNECTION); - console.error(err); - } + async connect() { + if (this._client) { + return; + } + try { + this._client = await MongoClient.connect(this.url, this.options); + console.log(MSG_MONGO_CONNECTION_ESTABLISHED); + } catch (err) { + console.error(MSG_MONGO_COULD_NOT_ESTABLISH_CONNECTION); + console.error(err); } + } - async disconnect() { - if (this._client) { - try { - await this._client.close(); - console.log(MSG_MONGO_DISCONNECTED) - } catch (err) { - console.error(MSG_MONGO_COULD_NOT_DISCONNECT); - console.error(err) - } - } else { - throw new MongoClientNotInitialised() - } + async disconnect() { + if (this._client) { + try { + await this._client.close(); + console.log(MSG_MONGO_DISCONNECTED); + } catch (err) { + console.error(MSG_MONGO_COULD_NOT_DISCONNECT); + console.error(err); + } + } else { + throw new MongoClientNotInitialised(); } + } } diff --git a/api/src/util/MongooseUtil.ts b/api/src/util/MongooseUtil.ts index e9213824..149da3cb 100644 --- a/api/src/util/MongooseUtil.ts +++ b/api/src/util/MongooseUtil.ts @@ -1,197 +1,256 @@ -import {ObjectID} from "mongodb"; -import mongoose, {Document,Model,Schema} from "mongoose"; -import {ObjectUtil} from "./ObjectUtil"; +import { ObjectId } from "mongodb"; +import mongoose, { Document, Model, Schema } from "mongoose"; +import { ObjectUtil } from "./ObjectUtil"; import fs from "fs"; -import {TypeUtil} from "./TypeUtil"; +import { TypeUtil } from "./TypeUtil"; export class MongooseUtil { - - /** - * Checks if the Mongoose Document is Valid - * - * @param doc Mongoose Document - * @returns {boolean} - */ - public static async isValid(doc: Document): Promise { - try { - await doc.validate(); - return true; - } catch (e) { - return false; - } + /** + * Checks if the Mongoose Document is Valid + * + * @param doc Mongoose Document + * @returns {boolean} + */ + public static async isValid(doc: Document): Promise { + try { + await doc.validate(); + return true; + } catch (e) { + return false; } - - /** - * Enable virtual mode on mongoose and enable alias name - * - * @param schema - */ - public static virtualize(schema: Schema): void { - schema.set('toJSON', { - virtuals: true, - transform: (doc, ret) => { - if (ret._id) delete ret._id; - if (ret.config_type) delete ret.config_type; - - ret.id = doc._id; - }, - }); + } + + /** + * Enable virtual mode on mongoose and enable alias name + * + * @param schema + */ + public static virtualize(schema: Schema): void { + schema.set("toJSON", { + virtuals: true, + transform: (doc, ret) => { + if (ret._id) delete ret._id; + if (ret.config_type) delete ret.config_type; + + ret.id = doc._id; + }, + }); + } + + /** + * Deep Merge two objects + * + * @param obj1 The target object + * @param obj2 The fulfilled object + * @param ignoreFields array of field names to ignore (not include) + * @param parent + * @returns The merged object + */ + public static merge( + obj1: T | any, + obj2: any, + ignoreFields: string[] = [], + parent: T = null + ): T { + if (TypeUtil.isScalarValue(obj2)) { + return obj2; } - /** - * Deep Merge two objects - * - * @param obj1 The target object - * @param obj2 The fulfilled object - * @param ignoreFields array of field names to ignore (not include) - * @param parent - * @returns The merged object - */ - public static merge(obj1: T | any, obj2: any, - ignoreFields: string[] = [], - parent: T = null): T { - - if (TypeUtil.isScalarValue(obj2)) { - return obj2; - } + const isMongooseDocument = parent + ? !!parent && typeof parent["$isNew"] === "boolean" + : !!obj1 && typeof obj1["$isNew"] === "boolean"; - const isMongooseDocument = parent ? - (!!parent && typeof parent['$isNew'] === 'boolean') : - (!!obj1 && typeof obj1['$isNew'] === 'boolean'); + ignoreFields.forEach((invalid_field) => { + if (invalid_field in obj2) { + delete obj2[invalid_field]; + } + }); - ignoreFields.forEach(invalid_field => { - if (invalid_field in obj2) { - delete obj2[invalid_field]; - } - }); + //automatic update when edit + if (obj1 && obj1["modified"]) { + obj1["modified"] = new Date(); + } - //automatic update when edit - if (obj1 && obj1['modified']) { - obj1['modified'] = new Date(); + const convertToObjID = (x) => { + return typeof x === "string" && x.length === 24 ? new ObjectId(x) : x; + }; + Object.keys(obj2).forEach((x) => { + //to avoid changes on created and modified properties + if (x === "created" || x === "modified") { + return; + } + + if (Array.isArray(obj2[x])) { + //for empty arrays + if (obj2[x].length === 0) { + obj1[x] = obj2[x]; + return; } - const convertToObjID = x => { - return typeof x === 'string' && x.length === 24 ? new ObjectID(x) : x; + //Considering Set A for obj2, and Set B for obj1 the target + if (Array.isArray(obj1[x])) { + //We get the intersection of Sets (A & B) + obj1[x] = obj1[x].filter((b) => + obj2[x].some((a) => ObjectUtil.isObject(b) && a.id === b.id) + ); + + //and merge + obj1[x].map((b) => + MongooseUtil.merge( + b, + obj2[x].find((a) => b.id === a.id) + ) + ); + } else { + obj1[x] = []; } - Object.keys(obj2).forEach(x => { - //to avoid changes on created and modified properties - if (x === 'created' || x === 'modified') { - return; - } - - if (Array.isArray(obj2[x])) { - //for empty arrays - if (obj2[x].length === 0) { - obj1[x] = obj2[x]; - return; - } - - //Considering Set A for obj2, and Set B for obj1 the target - if (Array.isArray(obj1[x])) { - //We get the intersection of Sets (A & B) - obj1[x] = obj1[x].filter( - b => obj2[x].some(a => ObjectUtil.isObject(b) && a.id === b.id) - ); - - //and merge - obj1[x].map( - b => MongooseUtil.merge(b, obj2[x].find(a => b.id === a.id)) - ); - } else { - obj1[x] = []; - } - - //add new items to set B - obj2[x].filter(x => !x.id) - .forEach((b: object, key: number) => { - obj1[x].push(b); - }); - - return; - } else { - - //avoid undefined property - if (!obj1) { - obj1 = {}; - } - - if (x === 'id') { - //make sure to fulfil with objectID instance - - //validate before assignment - if (mongoose.Types.ObjectId.isValid(obj2[x])) { - obj1['_id'] = convertToObjID(obj2[x]); - } else { - obj1['_id'] = new ObjectID(); - } - - if (!isMongooseDocument) { - delete obj1[x]; - } - - return; - } - } - - let isReference = false; - if (mongoose.Types.ObjectId.isValid(obj1[x])) { - isReference = true; - - //if mongoose check it's a collection - if (isMongooseDocument) { - isReference = obj1[x]['collection'] !== undefined; - } - } - // to edit pre-existing reference - if (isReference) { + //add new items to set B + obj2[x] + .filter((x) => !x.id) + .forEach((b: object, key: number) => { + obj1[x].push(b); + }); + + return; + } else { + //avoid undefined property + if (!obj1) { + obj1 = {}; + } - let objID = null; + if (x === "id") { + //make sure to fulfil with objectID instance - if (obj2[x] && obj2[x].id) { - objID = convertToObjID(obj2[x].id); - } else if (mongoose.Types.ObjectId.isValid(obj2[x])) { - objID = convertToObjID(obj2[x]); - } + //validate before assignment + if (mongoose.Types.ObjectId.isValid(obj2[x])) { + obj1["_id"] = convertToObjID(obj2[x]); + } else { + obj1["_id"] = new ObjectId(); + } - obj1[x] = objID; + if (!isMongooseDocument) { + delete obj1[x]; + } - return; - } + return; + } + } - if (ObjectUtil.isObject(obj2[x])) { - //recursive changes on each property of object - obj1[x] = MongooseUtil.merge(obj1[x], obj2[x], [], obj1); - return; - } + let isReference = false; + if (mongoose.Types.ObjectId.isValid(obj1[x])) { + isReference = true; - obj1[x] = obj2[x]; + //if mongoose check it's a collection + if (isMongooseDocument) { + isReference = obj1[x]["collection"] !== undefined; + } + } - }); + // to edit pre-existing reference + if (isReference) { + let objID = null; - return obj1; - } + if (obj2[x] && obj2[x].id) { + objID = convertToObjID(obj2[x].id); + } else if (mongoose.Types.ObjectId.isValid(obj2[x])) { + objID = convertToObjID(obj2[x]); + } - /** - * Populates a model/collection from a JSON document in the base/dump directory. - * - * @param model The model which corresponds to a collection to populate - * @param fileName The name of the JSON document to use to populate collection - */ - public static async populateModel(model: Model, fileName: string): Promise { - try { - const jsonDocument = JSON.parse(fs.readFileSync(`${__dirname}/../../base/dump/${fileName}.json`, 'utf-8')); - const ids = jsonDocument.map(x => new ObjectID(x._id)); - const total = await model.find({ '_id': { '$in': ids } }).countDocuments(); - - //avoid duplicate _id key - if(total === 0){ - await model.insertMany(jsonDocument); - console.log(`Successfully populated ${fileName}.json`); + obj1[x] = objID; + + return; + } + + if (ObjectUtil.isObject(obj2[x])) { + //recursive changes on each property of object + obj1[x] = MongooseUtil.merge(obj1[x], obj2[x], [], obj1); + return; + } + + obj1[x] = obj2[x]; + }); + + return obj1; + } + + /** + * Populates a model/collection from a JSON document in the base/dump directory. + * + * @param model The model which corresponds to a collection to populate + * @param fileName The name of the JSON document to use to populate collection + */ + public static async populateModel( + model: Model, + fileName: string + ): Promise { + try { + const filePath = `${__dirname}/../../base/dump/${fileName}.json`; + if (!fs.existsSync(filePath)) { + return; + } + const fileContent = fs.readFileSync(filePath, "utf-8"); + const jsonDocument: any[] = JSON.parse(fileContent); + + if (!jsonDocument || jsonDocument.length === 0) { + return; + } + + const idsToCheck = jsonDocument + .map((doc) => { + if ( + !doc._id || + typeof doc._id !== "string" || + !ObjectId.isValid(doc._id) + ) { + return null; + } + return ObjectId.createFromHexString(doc._id); + }) + .filter((id) => id !== null) as ObjectId[]; + + if (idsToCheck.length === 0) { + return; + } + + const existingDocs = await model + .find({ _id: { $in: idsToCheck } }, { _id: 1 }) + .lean(); + const existingIds = new Set( + existingDocs.map((doc) => doc._id.toString()) + ); + + const docsToInsert = jsonDocument.filter((doc) => { + return ( + doc._id && typeof doc._id === "string" && !existingIds.has(doc._id) + ); + }); + + if (docsToInsert.length > 0) { + const preparedDocs = docsToInsert.map((doc) => ({ + ...doc, + _id: ObjectId.createFromHexString(doc._id), + })); + await model + .insertMany(preparedDocs, { ordered: false }) + .catch((err) => { + if (err.code !== 11000) { + console.error( + `Non-duplicate error during insertMany for ${fileName}: ${err}` + ); + } else { + console.warn( + `Duplicate key error during insertMany for ${fileName}, despite pre-check. (Potential race condition?)` + ); } - - } catch (err) { - console.error(`Failed to populate ${fileName}: ${err}`); - } + }); + // Log success based on attempted insertion, acknowledge potential individual errors handled above. + console.log( + `Attempted to insert ${docsToInsert.length} new documents from ${fileName}.json` + ); + } + } catch (err) { + console.error(`Failed processing ${fileName}: ${err}`); } + } } diff --git a/api/src/v1/builder/BaseChainModelBuilder.ts b/api/src/v1/builder/BaseChainModelBuilder.ts index 007a14f8..1a445445 100644 --- a/api/src/v1/builder/BaseChainModelBuilder.ts +++ b/api/src/v1/builder/BaseChainModelBuilder.ts @@ -1,67 +1,74 @@ -import mongoose, {Model, Schema} from "mongoose"; -import {BaseChain} from "../../../../entities/ts/BaseChain"; -import {Collection, ModelName} from "../../constant/mongoose"; -import {MongooseUtil} from "../../util/MongooseUtil"; -import {ObjectUtil} from "../../util/ObjectUtil"; -import {BaseMongoose} from "../entity/model/BaseMongoose"; -import {IModelBuilder} from "./IModelBuilder"; +import mongoose, { Model, Schema } from "mongoose"; +import { BaseChain } from "../../../../entities/ts/BaseChain"; +import { Collection, ModelName } from "../../constant/mongoose"; +import { MongooseUtil } from "../../util/MongooseUtil"; +import { ObjectUtil } from "../../util/ObjectUtil"; +import { BaseMongoose } from "../entity/model/BaseMongoose"; +import { IModelBuilder } from "./IModelBuilder"; /** * Builder to create Mongoose Schema and Model of BaseChain Entity */ export class BaseChainModelBuilder implements IModelBuilder { + private _schema: Schema = null; + private _model: Model = null; - private _schema: Schema = null; - private _model: Model = null; + public produceSchema(): void { + const entity = {} as BaseChain; + entity.value = { + type: String, + required: [true, "Value is required!"], + default: null, + } as any; + entity.sources = [ + { + type: Schema.Types.ObjectId, + ref: ModelName.GENERIC, + } as any, + ]; + entity.thresholdAlerts = [ + { + type: Schema.Types.ObjectId, + ref: ModelName.GENERIC, + alias: "thresholdAlerts", + } as any, + ]; + entity.severityAlerts = [ + { + type: Schema.Types.ObjectId, + ref: ModelName.SEVERITY_ALERT_SUBCONFIG, + alias: "severityAlerts", + } as any, + ]; + entity.timeWindowAlerts = [ + { + type: Schema.Types.ObjectId, + ref: ModelName.GENERIC, + alias: "timeWindowAlerts", + } as any, + ]; - public produceSchema(): void { + const obj = Object.assign(entity, new BaseMongoose()); + this._schema = new Schema(ObjectUtil.camelToSnake(obj), { + versionKey: false, + }); - const entity = {} as BaseChain; - entity.value = { - type: String, - required: [true, 'Value is required!'], - default: null - } as any; - entity.sources = [{ - type: Schema.Types.ObjectId, - ref: ModelName.GENERIC - } as any]; - entity.thresholdAlerts = [{ - type: Schema.Types.ObjectId, - ref: ModelName.GENERIC, - alias: 'thresholdAlerts' - } as any]; - entity.severityAlerts = [{ - type: Schema.Types.ObjectId, - ref: ModelName.SEVERITY_ALERT_SUBCONFIG, - alias: 'severityAlerts' - } as any]; - entity.timeWindowAlerts = [{ - type: Schema.Types.ObjectId, - ref: ModelName.GENERIC, - alias: 'timeWindowAlerts' - } as any]; + MongooseUtil.virtualize(this._schema); + } - const obj = Object.assign(entity, new BaseMongoose()); - this._schema = new Schema(ObjectUtil.camelToSnake(obj), - {versionKey: false}); + public produceModel(): void { + this._model = mongoose.model( + ModelName.BASE_CHAIN, + this._schema, + Collection.BASE_CHAIN + ); + } - MongooseUtil.virtualize(this._schema); - } + public get model(): Model { + return this._model; + } - public produceModel(): void { - this._model = mongoose.model( - ModelName.BASE_CHAIN, - this._schema, - Collection.BASE_CHAIN - ) as Model; - } - - public get model(): Model { - return this._model; - } - - public get schema(): Schema { - return this._schema; - } + public get schema(): Schema { + return this._schema; + } } diff --git a/api/src/v1/builder/ConfigModelBuilder.ts b/api/src/v1/builder/ConfigModelBuilder.ts index d659445f..2a9ae6b6 100644 --- a/api/src/v1/builder/ConfigModelBuilder.ts +++ b/api/src/v1/builder/ConfigModelBuilder.ts @@ -1,146 +1,143 @@ -import mongoose, {Model, Schema} from 'mongoose'; -import {Config} from '../../../../entities/ts/Config'; -import {Collection, ModelName} from '../../constant/mongoose'; -import {MongooseUtil} from '../../util/MongooseUtil'; -import {ObjectUtil} from '../../util/ObjectUtil'; -import {BaseChainModel} from '../entity/model/BaseChainModel'; -import {ContractSubconfigSchema} from '../entity/model/ContractSubconfigSchema'; -import {EVMNodeSubconfigSchema} from '../entity/model/EVMNodeSubconfigSchema'; -import {NodeSubconfigSchema} from '../entity/model/NodeSubconfigSchema'; -import {RepositorySubconfigSchema} from '../entity/model/RepositorySubconfigSchema'; -import {SeverityAlertSubconfigSchema} from '../entity/model/SeverityAlertSubconfigSchema'; -import {SubChainSchema} from '../entity/model/SubChainSchema'; -import {SystemSubconfigSchema} from '../entity/model/SystemSubconfigSchema'; -import {ThresholdAlertSubconfigSchema} from '../entity/model/ThresholdAlertSubconfigSchema'; -import {IModelBuilder} from './IModelBuilder'; -import {TimeWindowAlertSubconfigSchema} from "../entity/model/TimeWindowAlertSubconfigSchema"; -import {GenericRepository} from '../entity/repository/GenericRepository'; -import {BaseChainRepository} from '../entity/repository/BaseChainRepository'; -import {Base} from '../../../../entities/ts/Base'; +import mongoose, { Model, Schema } from "mongoose"; +import { Config } from "../../../../entities/ts/Config"; +import { Collection, ModelName } from "../../constant/mongoose"; +import { MongooseUtil } from "../../util/MongooseUtil"; +import { ObjectUtil } from "../../util/ObjectUtil"; +import { BaseChainModel } from "../entity/model/BaseChainModel"; +import { ContractSubconfigSchema } from "../entity/model/ContractSubconfigSchema"; +import { EVMNodeSubconfigSchema } from "../entity/model/EVMNodeSubconfigSchema"; +import { NodeSubconfigSchema } from "../entity/model/NodeSubconfigSchema"; +import { RepositorySubconfigSchema } from "../entity/model/RepositorySubconfigSchema"; +import { SeverityAlertSubconfigSchema } from "../entity/model/SeverityAlertSubconfigSchema"; +import { SubChainSchema } from "../entity/model/SubChainSchema"; +import { SystemSubconfigSchema } from "../entity/model/SystemSubconfigSchema"; +import { ThresholdAlertSubconfigSchema } from "../entity/model/ThresholdAlertSubconfigSchema"; +import { IModelBuilder } from "./IModelBuilder"; +import { TimeWindowAlertSubconfigSchema } from "../entity/model/TimeWindowAlertSubconfigSchema"; +import { GenericRepository } from "../entity/repository/GenericRepository"; +import { BaseChainRepository } from "../entity/repository/BaseChainRepository"; +import { Base } from "../../../../entities/ts/Base"; /** * Builder to create Mongoose Schema and Model of Config Entity */ export class ConfigModelBuilder implements IModelBuilder { + private _schema: Schema = null; + private _model: Model = null; - private _schema: Schema = null; - private _model: Model = null; + public produceSchema(): void { + new BaseChainModel(); - public produceSchema(): void { + const config = {} as Config; - new BaseChainModel(); + config["configType"] = { + type: Schema.Types.ObjectId, + ref: ModelName.GENERIC, + validate: { + validator: async (id: string) => { + const repo = new GenericRepository(); + return await repo.exists(id); + }, + message: (props) => `Reference with id ${props.value} doesn't exists`, + }, + } as any; + config.status = Boolean as any; + config.created = { + type: Date, + default: null, + } as any; + config.modified = { + type: Date, + default: null, + } as any; + config.ready = Boolean as any; + config.baseChain = { + type: Schema.Types.ObjectId, + ref: ModelName.BASE_CHAIN, + required: [true, "Base chain is required!"], + alias: "baseChain", + default: null, + validate: { + validator: async (id: string) => { + const repo = new BaseChainRepository(); + return await repo.exists(id); + }, + message: (props) => `Reference with id ${props.value} doesn't exists`, + }, + } as any; + config.subChain = { + type: SubChainSchema, + required: [true, "Sub chain is required!"], + alias: "subChain", + default: null, + } as any; + config.contract = { + type: ContractSubconfigSchema, + default: null, + } as any; + config.nodes = { + type: [NodeSubconfigSchema], + validate: this.nameDuplicateValidator(), + } as any; + config.evm_nodes = { + type: [EVMNodeSubconfigSchema], + validate: this.nameDuplicateValidator(), + } as any; + config.systems = { + type: [SystemSubconfigSchema], + validate: this.nameDuplicateValidator(), + } as any; + config.repositories = { + type: [RepositorySubconfigSchema], + validate: this.nameDuplicateValidator(), + } as any; + config.thresholdAlerts = { + type: [ThresholdAlertSubconfigSchema], + alias: "thresholdAlerts", + } as any; + config.severityAlerts = { + type: [SeverityAlertSubconfigSchema], + alias: "severityAlerts", + } as any; + config.timeWindowAlerts = { + type: [TimeWindowAlertSubconfigSchema], + alias: "timeWindowAlerts", + } as any; - const config = {} as Config; + this._schema = new Schema(ObjectUtil.camelToSnake(config), { + versionKey: false, + }); - config['configType'] = { - type: Schema.Types.ObjectId, - ref: ModelName.GENERIC, - validate: { - validator: async (id: string) => { - const repo = new GenericRepository(); - return await repo.exists(id); - }, - message: props => `Reference with id ${props.value} doesn't exists` - } - } as any; - config.status = Boolean as any; - config.created = { - type: Date, - default: null - } as any; - config.modified = { - type: Date, - default: null - } as any; - config.ready = Boolean as any; - config.baseChain = { - type: Schema.Types.ObjectId, - ref: ModelName.BASE_CHAIN, - required: [true, 'Base chain is required!'], - alias: 'baseChain', - default: null, - validate: { - validator: async (id: string) => { - const repo = new BaseChainRepository(); - return await repo.exists(id); - }, - message: props => `Reference with id ${props.value} doesn't exists` - } - } as any; - config.subChain = { - type: SubChainSchema, - required: [true, 'Sub chain is required!'], - alias: 'subChain', - default: null - } as any; - config.contract = { - type: ContractSubconfigSchema, - default: null - } as any; - config.nodes = { - type: [NodeSubconfigSchema], - validate: this.nameDuplicateValidator(), - } as any; - config.evm_nodes = { - type: [EVMNodeSubconfigSchema], - validate: this.nameDuplicateValidator(), - } as any; - config.systems = { - type: [SystemSubconfigSchema], - validate: this.nameDuplicateValidator(), - } as any; - config.repositories = { - type: [RepositorySubconfigSchema], - validate: this.nameDuplicateValidator(), - } as any; - config.thresholdAlerts = { - type: [ThresholdAlertSubconfigSchema], - alias: 'thresholdAlerts', - } as any; - config.severityAlerts = { - type: [SeverityAlertSubconfigSchema], - alias: 'severityAlerts', - } as any; - config.timeWindowAlerts = { - type: [TimeWindowAlertSubconfigSchema], - alias: 'timeWindowAlerts', - } as any; + MongooseUtil.virtualize(this._schema); + } - this._schema = new Schema( - ObjectUtil.camelToSnake(config), - {versionKey: false} - ); + public produceModel(): void { + this._model = mongoose.model( + ModelName.CONFIG, + this._schema, + Collection.CONFIG + ); + } - MongooseUtil.virtualize(this._schema); - } + public get model(): Model { + return this._model; + } - public produceModel(): void { - this._model = mongoose.model( - ModelName.CONFIG, - this._schema, - Collection.CONFIG - ) as Model; - } + public get schema(): Schema { + return this._schema; + } - public get model(): Model { - return this._model; - } - - public get schema(): Schema { - return this._schema; - } - - /** - * Custom validator for arrays/lists by property name - * @returns - */ - private nameDuplicateValidator(): object { - return { - validator: async (list: Base[]) => { - const uniques = new Set(list.map(item => item.name)); - return list.length === uniques.size; - }, - message: 'Name duplicated' - } - } + /** + * Custom validator for arrays/lists by property name + * @returns + */ + private nameDuplicateValidator(): object { + return { + validator: async (list: Base[]) => { + const uniques = new Set(list.map((item) => item.name)); + return list.length === uniques.size; + }, + message: "Name duplicated", + }; + } } diff --git a/api/src/v1/builder/ConfigOldModelBuilder.ts b/api/src/v1/builder/ConfigOldModelBuilder.ts index 636783e6..1fafeac3 100644 --- a/api/src/v1/builder/ConfigOldModelBuilder.ts +++ b/api/src/v1/builder/ConfigOldModelBuilder.ts @@ -1,42 +1,40 @@ -import mongoose, { Model, Schema } from 'mongoose'; -import { Config } from '../../../../entities/ts/Config'; -import { Collection, ModelName } from '../../constant/mongoose'; -import { MongooseUtil } from '../../util/MongooseUtil'; -import { IModelBuilder } from './IModelBuilder'; -import { ConfigModelBuilder } from './ConfigModelBuilder'; +import mongoose, { Model, Schema } from "mongoose"; +import { Config } from "../../../../entities/ts/Config"; +import { Collection, ModelName } from "../../constant/mongoose"; +import { MongooseUtil } from "../../util/MongooseUtil"; +import { IModelBuilder } from "./IModelBuilder"; +import { ConfigModelBuilder } from "./ConfigModelBuilder"; /** * Builder to create Mongoose Schema and Model of Config Entity */ export class ConfigOldModelBuilder implements IModelBuilder { - - private _schema: Schema = null; - private _model: Model = null; - - public produceSchema(): void { - - //make copy - const builder = new ConfigModelBuilder() - builder.produceSchema(); - - this._schema = builder.schema; - - MongooseUtil.virtualize(this._schema); - } - - public produceModel(): void { - this._model = mongoose.model( - ModelName.CONFIG_OLD, - this._schema, - Collection.CONFIG_OLD - ) as Model; - } - - public get model(): Model { - return this._model; - } - - public get schema(): Schema { - return this._schema; - } + private _schema: Schema = null; + private _model: Model = null; + + public produceSchema(): void { + //make copy + const builder = new ConfigModelBuilder(); + builder.produceSchema(); + + this._schema = builder.schema; + + MongooseUtil.virtualize(this._schema); + } + + public produceModel(): void { + this._model = mongoose.model( + ModelName.CONFIG_OLD, + this._schema, + Collection.CONFIG_OLD + ); + } + + public get model(): Model { + return this._model; + } + + public get schema(): Schema { + return this._schema; + } } diff --git a/api/src/v1/builder/GenericModelBuilder.ts b/api/src/v1/builder/GenericModelBuilder.ts index 70420805..183b7b1d 100644 --- a/api/src/v1/builder/GenericModelBuilder.ts +++ b/api/src/v1/builder/GenericModelBuilder.ts @@ -1,53 +1,50 @@ -import mongoose, {Model, Schema} from 'mongoose'; -import {Generic} from '../../../../entities/ts/Generic'; -import {BaseMongoose} from '../entity/model/BaseMongoose'; -import {IModelBuilder} from './IModelBuilder'; -import {Collection, ModelName} from '../../constant/mongoose'; -import {MongooseUtil} from "../../util/MongooseUtil"; +import mongoose, { Model, Schema } from "mongoose"; +import { Generic } from "../../../../entities/ts/Generic"; +import { BaseMongoose } from "../entity/model/BaseMongoose"; +import { IModelBuilder } from "./IModelBuilder"; +import { Collection, ModelName } from "../../constant/mongoose"; +import { MongooseUtil } from "../../util/MongooseUtil"; /** * Builder to create Mongoose Schema and Model of Generic Entity */ export class GenericModelBuilder implements IModelBuilder { - - private _schema: Schema = null; - private _model: Model = null; - - public produceSchema(): void { - - const entity = {} as Generic; - entity.description = {type: String, default: null} as any; - entity.group = { - type: String, - required: [true, 'Group is required!'], - default: null - } as any; - entity.value = { - type: String, - required: [true, 'Value is required!'], - default: null - } as any; - - const obj = Object.assign(entity, new BaseMongoose()); - this._schema = new Schema(obj as object, - {versionKey: false}); - - MongooseUtil.virtualize(this._schema); - } - - public produceModel(): void { - this._model = mongoose.model( - ModelName.GENERIC, - this._schema, - Collection.GENERIC - ) as Model; - } - - public get model(): Model { - return this._model; - } - - public get schema(): Schema { - return this._schema; - } + private _schema: Schema = null; + private _model: Model = null; + + public produceSchema(): void { + const entity = {} as Generic; + entity.description = { type: String, default: null } as any; + entity.group = { + type: String, + required: [true, "Group is required!"], + default: null, + } as any; + entity.value = { + type: String, + required: [true, "Value is required!"], + default: null, + } as any; + + const obj = Object.assign(entity, new BaseMongoose()); + this._schema = new Schema(obj as object, { versionKey: false }); + + MongooseUtil.virtualize(this._schema); + } + + public produceModel(): void { + this._model = mongoose.model( + ModelName.GENERIC, + this._schema, + Collection.GENERIC + ); + } + + public get model(): Model { + return this._model; + } + + public get schema(): Schema { + return this._schema; + } } diff --git a/api/src/v1/builder/SeverityAlertSubconfigModelBuilder.ts b/api/src/v1/builder/SeverityAlertSubconfigModelBuilder.ts index 21ab0ae1..2f65c2f0 100644 --- a/api/src/v1/builder/SeverityAlertSubconfigModelBuilder.ts +++ b/api/src/v1/builder/SeverityAlertSubconfigModelBuilder.ts @@ -1,58 +1,59 @@ -import mongoose, {Model, Schema} from "mongoose"; -import {SeverityAlertSubconfig} from "../../../../entities/ts/SeverityAlertSubconfig"; -import {ObjectUtil} from "../../util/ObjectUtil"; -import {IModelBuilder} from "./IModelBuilder"; -import {Collection, ModelName} from "../../constant/mongoose"; -import {MongooseUtil} from "../../util/MongooseUtil"; -import {StringUtil} from "../../util/StringUtil"; +import mongoose, { Model, Schema } from "mongoose"; +import { SeverityAlertSubconfig } from "../../../../entities/ts/SeverityAlertSubconfig"; +import { ObjectUtil } from "../../util/ObjectUtil"; +import { IModelBuilder } from "./IModelBuilder"; +import { Collection, ModelName } from "../../constant/mongoose"; +import { MongooseUtil } from "../../util/MongooseUtil"; +import { StringUtil } from "../../util/StringUtil"; /** * Builder to create Mongoose Schema of SeverityAlertSubconfig Entity */ -export class SeverityAlertSubconfigModelBuilder implements IModelBuilder { - - private _schema: Schema = null; - private _model: Model = null; - - public produceSchema(): void { - - const entity = {} as SeverityAlertSubconfig; - entity.status = {type: Boolean, default: true} as any; - entity.created = {type: Date, default: Date.now} as any; - entity.modified = {type: Date, default: null} as any; - entity.name = { - type: String, - default: null, - set: StringUtil.trim - } as any; - entity.value = {type: String, default: null} as any; - entity.description = {type: String, default: null} as any; - entity.group = {type: String, default: null} as any; - entity.type = { - type: Schema.Types.ObjectId, - ref: ModelName.GENERIC, - required: [true, 'Severity type is required!'], - } as any; - entity.enabled = {type: Boolean, default: true} as any; - - this._schema = new Schema(ObjectUtil.camelToSnake(entity), - {versionKey: false}); - MongooseUtil.virtualize(this._schema); - } - - public produceModel(): void { - this._model = mongoose.model( - ModelName.SEVERITY_ALERT_SUBCONFIG, - this._schema, - Collection.GENERIC - ) as Model; - } - - public get model(): Model { - return this._model; - } - - public get schema(): Schema { - return this._schema; - } +export class SeverityAlertSubconfigModelBuilder + implements IModelBuilder +{ + private _schema: Schema = null; + private _model: Model = null; + + public produceSchema(): void { + const entity = {} as SeverityAlertSubconfig; + entity.status = { type: Boolean, default: true } as any; + entity.created = { type: Date, default: Date.now } as any; + entity.modified = { type: Date, default: null } as any; + entity.name = { + type: String, + default: null, + set: StringUtil.trim, + } as any; + entity.value = { type: String, default: null } as any; + entity.description = { type: String, default: null } as any; + entity.group = { type: String, default: null } as any; + entity.type = { + type: Schema.Types.ObjectId, + ref: ModelName.GENERIC, + required: [true, "Severity type is required!"], + } as any; + entity.enabled = { type: Boolean, default: true } as any; + + this._schema = new Schema(ObjectUtil.camelToSnake(entity), { + versionKey: false, + }); + MongooseUtil.virtualize(this._schema); + } + + public produceModel(): void { + this._model = mongoose.model( + ModelName.SEVERITY_ALERT_SUBCONFIG, + this._schema, + Collection.GENERIC + ); + } + + public get model(): Model { + return this._model; + } + + public get schema(): Schema { + return this._schema; + } } diff --git a/api/src/v1/builder/ThresholdAlertSubconfigModelBuilder.ts b/api/src/v1/builder/ThresholdAlertSubconfigModelBuilder.ts index 18597515..fc54a3f7 100644 --- a/api/src/v1/builder/ThresholdAlertSubconfigModelBuilder.ts +++ b/api/src/v1/builder/ThresholdAlertSubconfigModelBuilder.ts @@ -1,64 +1,69 @@ -import mongoose, {Model, Schema} from "mongoose"; -import {ThresholdAlertSubconfig} from "../../../../entities/ts/ThresholdAlertSubconfig"; -import {ObjectUtil} from "../../util/ObjectUtil"; -import {IModelBuilder} from "./IModelBuilder"; -import {MongooseUtil} from "../../util/MongooseUtil"; -import {StringUtil} from "../../util/StringUtil"; -import {Collection, ModelName} from "../../constant/mongoose"; +import mongoose, { Model, Schema } from "mongoose"; +import { ThresholdAlertSubconfig } from "../../../../entities/ts/ThresholdAlertSubconfig"; +import { ObjectUtil } from "../../util/ObjectUtil"; +import { IModelBuilder } from "./IModelBuilder"; +import { MongooseUtil } from "../../util/MongooseUtil"; +import { StringUtil } from "../../util/StringUtil"; +import { Collection, ModelName } from "../../constant/mongoose"; /** * Builder to create Mongoose Schema of ThresholdAlertSubconfig Entity */ -export class ThresholdAlertSubconfigModelBuilder implements IModelBuilder { +export class ThresholdAlertSubconfigModelBuilder + implements IModelBuilder +{ + private _schema: Schema = null; + private _model: Model = null; - private _schema: Schema = null; - private _model: Model = null; + public produceSchema(): void { + const entity = {} as ThresholdAlertSubconfig; + entity.status = { type: Boolean, default: true } as any; + entity.created = { type: Date, default: Date.now } as any; + entity.modified = { type: Date, default: null } as any; + entity.name = { + type: String, + default: null, + set: StringUtil.trim, + } as any; + entity.value = { type: String, default: null } as any; + entity.description = { type: String, default: null } as any; + entity.group = { type: String, default: null } as any; + entity.warning = { + enabled: { type: Boolean, default: false } as any, + threshold: { type: Number, default: 0 } as any, + } as any; + entity.critical = { + enabled: { type: Boolean, default: false } as any, + repeat_enabled: { + type: Boolean, + default: false, + alias: "repeatEnabled", + } as any, + threshold: { type: Number, default: 0 } as any, + repeat: { type: Number, default: 0 } as any, + } as any; + entity.adornment = { type: String, default: null } as any; + entity.enabled = { type: Boolean, default: true } as any; - public produceSchema(): void { + this._schema = new Schema(ObjectUtil.camelToSnake(entity), { + versionKey: false, + }); + MongooseUtil.virtualize(this._schema); + } - const entity = {} as ThresholdAlertSubconfig; - entity.status = {type: Boolean, default: true} as any; - entity.created = {type: Date, default: Date.now} as any; - entity.modified = {type: Date, default: null} as any; - entity.name = { - type: String, - default: null, - set: StringUtil.trim - } as any; - entity.value = {type: String, default: null} as any; - entity.description = {type: String, default: null} as any; - entity.group = {type: String, default: null} as any; - entity.warning = { - enabled: {type: Boolean, default: false} as any, - threshold: {type: Number, default: 0} as any, - } as any; - entity.critical = { - enabled: {type: Boolean, default: false} as any, - repeat_enabled: {type: Boolean, default: false, alias: 'repeatEnabled'} as any, - threshold: {type: Number, default: 0} as any, - repeat: {type: Number, default: 0} as any, - } as any; - entity.adornment = {type: String, default: null} as any; - entity.enabled = {type: Boolean, default: true} as any; + public produceModel(): void { + this._model = mongoose.model( + ModelName.THRESHOLD_ALERT_SUBCONFIG, + this._schema, + Collection.GENERIC + ); + } - this._schema = new Schema(ObjectUtil.camelToSnake(entity), - {versionKey: false}); - MongooseUtil.virtualize(this._schema); - } + public get model(): Model { + return this._model; + } - public produceModel(): void { - this._model = mongoose.model( - ModelName.THRESHOLD_ALERT_SUBCONFIG, - this._schema, - Collection.GENERIC - ) as Model; - } - - public get model(): Model { - return this._model; - } - - public get schema(): Schema { - return this._schema; - } + public get schema(): Schema { + return this._schema; + } } diff --git a/api/src/v1/builder/TimeWindowAlertSubconfigModelBuilder.ts b/api/src/v1/builder/TimeWindowAlertSubconfigModelBuilder.ts index b138b503..7f62a62d 100644 --- a/api/src/v1/builder/TimeWindowAlertSubconfigModelBuilder.ts +++ b/api/src/v1/builder/TimeWindowAlertSubconfigModelBuilder.ts @@ -1,66 +1,71 @@ -import mongoose, {Model, Schema} from "mongoose"; -import {TimeWindowAlertSubconfig} from "../../../../entities/ts/TimeWindowAlertSubconfig"; -import {ObjectUtil} from "../../util/ObjectUtil"; -import {IModelBuilder} from "./IModelBuilder"; -import {MongooseUtil} from "../../util/MongooseUtil"; -import {StringUtil} from "../../util/StringUtil"; -import {Collection, ModelName} from "../../constant/mongoose"; +import mongoose, { Model, Schema } from "mongoose"; +import { TimeWindowAlertSubconfig } from "../../../../entities/ts/TimeWindowAlertSubconfig"; +import { ObjectUtil } from "../../util/ObjectUtil"; +import { IModelBuilder } from "./IModelBuilder"; +import { MongooseUtil } from "../../util/MongooseUtil"; +import { StringUtil } from "../../util/StringUtil"; +import { Collection, ModelName } from "../../constant/mongoose"; /** * Builder to create Mongoose Schema of TimeWindowAlertSubconfig Entity */ -export class TimeWindowAlertSubconfigModelBuilder implements IModelBuilder { +export class TimeWindowAlertSubconfigModelBuilder + implements IModelBuilder +{ + private _schema: Schema = null; + private _model: Model = null; - private _schema: Schema = null; - private _model: Model = null; + public produceSchema(): void { + const entity = {} as TimeWindowAlertSubconfig; + entity.status = { type: Boolean, default: true } as any; + entity.created = { type: Date, default: Date.now } as any; + entity.modified = { type: Date, default: null } as any; + entity.name = { + type: String, + default: null, + set: StringUtil.trim, + } as any; + entity.value = { type: String, default: null } as any; + entity.description = { type: String, default: null } as any; + entity.group = { type: String, default: null } as any; + entity.warning = { + enabled: { type: Boolean, default: false } as any, + threshold: { type: Number, default: 0 } as any, + time_window: { type: Number, default: 0, alias: "timeWindow" } as any, + } as any; + entity.critical = { + enabled: { type: Boolean, default: false } as any, + repeat_enabled: { + type: Boolean, + default: false, + alias: "repeatEnabled", + } as any, + threshold: { type: Number, default: 0 } as any, + repeat: { type: Number, default: 0 } as any, + time_window: { type: Number, default: 0, alias: "timeWindow" } as any, + } as any; + entity.adornment = { type: String, default: null } as any; + entity.enabled = { type: Boolean, default: true } as any; - public produceSchema(): void { + this._schema = new Schema(ObjectUtil.camelToSnake(entity), { + versionKey: false, + }); + MongooseUtil.virtualize(this._schema); + } - const entity = {} as TimeWindowAlertSubconfig; - entity.status = {type: Boolean, default: true} as any; - entity.created = {type: Date, default: Date.now} as any; - entity.modified = {type: Date, default: null} as any; - entity.name = { - type: String, - default: null, - set: StringUtil.trim - } as any; - entity.value = {type: String, default: null} as any; - entity.description = {type: String, default: null} as any; - entity.group = {type: String, default: null} as any; - entity.warning = { - enabled: {type: Boolean, default: false} as any, - threshold: {type: Number, default: 0} as any, - time_window: {type: Number, default: 0, alias: 'timeWindow'} as any, - } as any; - entity.critical = { - enabled: {type: Boolean, default: false} as any, - repeat_enabled: {type: Boolean, default: false, alias: 'repeatEnabled'} as any, - threshold: {type: Number, default: 0} as any, - repeat: {type: Number, default: 0} as any, - time_window: {type: Number, default: 0, alias: 'timeWindow'} as any, - } as any; - entity.adornment = {type: String, default: null} as any; - entity.enabled = {type: Boolean, default: true} as any; + public produceModel(): void { + this._model = mongoose.model( + ModelName.TIME_WINDOW_ALERT_SUBCONFIG, + this._schema, + Collection.GENERIC + ); + } - this._schema = new Schema(ObjectUtil.camelToSnake(entity), - {versionKey: false}); - MongooseUtil.virtualize(this._schema); - } + public get model(): Model { + return this._model; + } - public produceModel(): void { - this._model = mongoose.model( - ModelName.TIME_WINDOW_ALERT_SUBCONFIG, - this._schema, - Collection.GENERIC - ) as Model; - } - - public get model(): Model { - return this._model; - } - - public get schema(): Schema { - return this._schema; - } + public get schema(): Schema { + return this._schema; + } } diff --git a/api/src/v1/builder/channels/EmailModelBuilder.ts b/api/src/v1/builder/channels/EmailModelBuilder.ts index 462a14e9..726e3dff 100644 --- a/api/src/v1/builder/channels/EmailModelBuilder.ts +++ b/api/src/v1/builder/channels/EmailModelBuilder.ts @@ -1,114 +1,113 @@ -import mongoose, {Model, Schema} from 'mongoose'; -import {Collection, ModelName} from '../../../constant/mongoose'; -import {IModelBuilder} from '../IModelBuilder'; -import {ObjectUtil} from '../../../util/ObjectUtil'; -import {EmailChannel} from "../../../../../entities/ts/channels/EmailChannel"; -import {MongooseUtil} from "../../../util/MongooseUtil"; -import {StringUtil} from '../../../util/StringUtil'; +import mongoose, { Model, Schema } from "mongoose"; +import { Collection, ModelName } from "../../../constant/mongoose"; +import { IModelBuilder } from "../IModelBuilder"; +import { ObjectUtil } from "../../../util/ObjectUtil"; +import { EmailChannel } from "../../../../../entities/ts/channels/EmailChannel"; +import { MongooseUtil } from "../../../util/MongooseUtil"; +import { StringUtil } from "../../../util/StringUtil"; /** * Builder to create Mongoose Schema and Model of Email Entity */ export class EmailModelBuilder implements IModelBuilder { + private _schema: Schema = null; + private _model: Model = null; - private _schema: Schema = null; - private _model: Model = null; + public produceSchema(): void { + const entity = {} as EmailChannel; - public produceSchema(): void { + entity.created = { + type: Date, + default: null, + } as any; + entity.modified = { + type: Date, + default: null, + } as any; + entity.name = { + type: String, + required: [true, "Name is required!"], + default: null, + set: StringUtil.trim, + } as any; + entity.type = { + type: Schema.Types.ObjectId, + ref: ModelName.GENERIC, + required: [true, "Type is required!"], + default: null, + } as any; + entity.configs = [ + { + type: Schema.Types.ObjectId, + ref: ModelName.CONFIG, + } as any, + ]; + entity.smtp = { + type: String, + default: null, + } as any; + entity.port = { + type: Number, + default: null, + } as any; + entity.emailFrom = { + type: String, + alias: "emailFrom", + default: null, + } as any; + entity.emailsTo = { + type: [String], + alias: "emailsTo", + } as any; + entity.username = { + type: String, + default: null, + } as any; + entity.password = { + type: String, + default: null, + } as any; + entity.info = { + type: Boolean, + default: false, + } as any; + entity.warning = { + type: Boolean, + default: false, + } as any; + entity.critical = { + type: Boolean, + default: false, + } as any; + entity.error = { + type: Boolean, + default: false, + } as any; + entity["configType"] = { + type: Schema.Types.ObjectId, + ref: ModelName.GENERIC, + } as any; - const entity = {} as EmailChannel; + this._schema = new Schema(ObjectUtil.camelToSnake(entity), { + versionKey: false, + }); - entity.created = { - type: Date, - default: null - } as any; - entity.modified = { - type: Date, - default: null - } as any; - entity.name = { - type: String, - required: [true, 'Name is required!'], - default: null, - set: StringUtil.trim - } as any; - entity.type = { - type: Schema.Types.ObjectId, - ref: ModelName.GENERIC, - required: [true, 'Type is required!'], - default: null - } as any; - entity.configs = [{ - type: Schema.Types.ObjectId, - ref: ModelName.CONFIG - } as any]; - entity.smtp = { - type: String, - default: null - } as any; - entity.port = { - type: Number, - default: null - } as any; - entity.emailFrom = { - type: String, - alias: 'emailFrom', - default: null - } as any; - entity.emailsTo = { - type: [String], - alias: 'emailsTo' - } as any; - entity.username = { - type: String, - default: null - } as any; - entity.password = { - type: String, - default: null - } as any; - entity.info = { - type: Boolean, - default: false - } as any; - entity.warning = { - type: Boolean, - default: false - } as any; - entity.critical = { - type: Boolean, - default: false - } as any; - entity.error = { - type: Boolean, - default: false - } as any; - entity['configType'] = { - type: Schema.Types.ObjectId, - ref: ModelName.GENERIC - } as any; + MongooseUtil.virtualize(this._schema); + } - this._schema = new Schema( - ObjectUtil.camelToSnake(entity), - {versionKey: false} - ); + public produceModel(): void { + this._model = mongoose.model( + ModelName.EMAIL, + this._schema, + Collection.CONFIG + ); + } - MongooseUtil.virtualize(this._schema); - } + public get model(): Model { + return this._model; + } - public produceModel(): void { - this._model = mongoose.model( - ModelName.EMAIL, - this._schema, - Collection.CONFIG - ) as Model; - } - - public get model(): Model { - return this._model; - } - - public get schema(): Schema { - return this._schema; - } + public get schema(): Schema { + return this._schema; + } } diff --git a/api/src/v1/builder/channels/EmailOldModelBuilder.ts b/api/src/v1/builder/channels/EmailOldModelBuilder.ts index 015ee8d5..86c2bfaf 100644 --- a/api/src/v1/builder/channels/EmailOldModelBuilder.ts +++ b/api/src/v1/builder/channels/EmailOldModelBuilder.ts @@ -9,34 +9,32 @@ import { EmailModelBuilder } from "./EmailModelBuilder"; * Builder to create Mongoose Schema and Model of Config Entity */ export class EmailOldModelBuilder implements IModelBuilder { - - private _schema: Schema = null; - private _model: Model = null; - - public produceSchema(): void { - - //make copy - const builder = new EmailModelBuilder() - builder.produceSchema(); - - this._schema = builder.schema; - - MongooseUtil.virtualize(this._schema); - } - - public produceModel(): void { - this._model = mongoose.model( - ModelName.EMAIL_OLD, - this._schema, - Collection.CONFIG_OLD - ) as Model; - } - - public get model(): Model { - return this._model; - } - - public get schema(): Schema { - return this._schema; - } + private _schema: Schema = null; + private _model: Model = null; + + public produceSchema(): void { + //make copy + const builder = new EmailModelBuilder(); + builder.produceSchema(); + + this._schema = builder.schema; + + MongooseUtil.virtualize(this._schema); + } + + public produceModel(): void { + this._model = mongoose.model( + ModelName.EMAIL_OLD, + this._schema, + Collection.CONFIG_OLD + ); + } + + public get model(): Model { + return this._model; + } + + public get schema(): Schema { + return this._schema; + } } diff --git a/api/src/v1/builder/channels/OpsgenieModelBuilder.ts b/api/src/v1/builder/channels/OpsgenieModelBuilder.ts index 99aeb922..e49a1872 100644 --- a/api/src/v1/builder/channels/OpsgenieModelBuilder.ts +++ b/api/src/v1/builder/channels/OpsgenieModelBuilder.ts @@ -1,97 +1,96 @@ -import mongoose, {Model, Schema} from 'mongoose'; -import {Collection, ModelName} from '../../../constant/mongoose'; -import {IModelBuilder} from '../IModelBuilder'; -import {ObjectUtil} from '../../../util/ObjectUtil'; -import {OpsgenieChannel} from "../../../../../entities/ts/channels/OpsgenieChannel"; -import {MongooseUtil} from "../../../util/MongooseUtil"; -import {StringUtil} from '../../../util/StringUtil'; +import mongoose, { Model, Schema } from "mongoose"; +import { Collection, ModelName } from "../../../constant/mongoose"; +import { IModelBuilder } from "../IModelBuilder"; +import { ObjectUtil } from "../../../util/ObjectUtil"; +import { OpsgenieChannel } from "../../../../../entities/ts/channels/OpsgenieChannel"; +import { MongooseUtil } from "../../../util/MongooseUtil"; +import { StringUtil } from "../../../util/StringUtil"; /** * Builder to create Mongoose Schema and Model of Opsgenie Entity */ export class OpsgenieModelBuilder implements IModelBuilder { + private _schema: Schema = null; + private _model: Model = null; - private _schema: Schema = null; - private _model: Model = null; + public produceSchema(): void { + const entity = {} as OpsgenieChannel; - public produceSchema(): void { + entity.created = { + type: Date, + default: null, + } as any; + entity.modified = { + type: Date, + default: null, + } as any; + entity.name = { + type: String, + required: [true, "Name is required!"], + default: null, + set: StringUtil.trim, + } as any; + entity.type = { + type: Schema.Types.ObjectId, + ref: ModelName.GENERIC, + required: [true, "Type is required!"], + default: null, + } as any; + entity.configs = [ + { + type: Schema.Types.ObjectId, + ref: ModelName.CONFIG, + } as any, + ]; + entity.apiToken = { + type: String, + alias: "apiToken", + } as any; + entity.eu = { + type: Boolean, + default: false, + } as any; + entity.info = { + type: Boolean, + default: false, + } as any; + entity.warning = { + type: Boolean, + default: false, + } as any; + entity.critical = { + type: Boolean, + default: false, + } as any; + entity.error = { + type: Boolean, + default: false, + } as any; + entity["configType"] = { + type: Schema.Types.ObjectId, + ref: ModelName.GENERIC, + } as any; - const entity = {} as OpsgenieChannel; + this._schema = new Schema(ObjectUtil.camelToSnake(entity), { + versionKey: false, + }); - entity.created = { - type: Date, - default: null - } as any; - entity.modified = { - type: Date, - default: null - } as any; - entity.name = { - type: String, - required: [true, 'Name is required!'], - default: null, - set: StringUtil.trim - } as any; - entity.type = { - type: Schema.Types.ObjectId, - ref: ModelName.GENERIC, - required: [true, 'Type is required!'], - default: null - } as any; - entity.configs = [{ - type: Schema.Types.ObjectId, - ref: ModelName.CONFIG - } as any]; - entity.apiToken = { - type: String, - alias: 'apiToken' - } as any; - entity.eu = { - type: Boolean, - default: false - } as any; - entity.info = { - type: Boolean, - default: false - } as any; - entity.warning = { - type: Boolean, - default: false - } as any; - entity.critical = { - type: Boolean, - default: false - } as any; - entity.error = { - type: Boolean, - default: false - } as any; - entity['configType'] = { - type: Schema.Types.ObjectId, - ref: ModelName.GENERIC - } as any; + MongooseUtil.virtualize(this._schema); + } - this._schema = new Schema( - ObjectUtil.camelToSnake(entity), - {versionKey: false} - ); + public produceModel(): void { + this._model = mongoose.model( + ModelName.OPSGENIE, + this._schema, + Collection.CONFIG + ); + } - MongooseUtil.virtualize(this._schema); - } + public get model(): Model { + return this._model; + } - public produceModel(): void { - this._model = mongoose.model( - ModelName.OPSGENIE, - this._schema, - Collection.CONFIG - ) as Model; - } - - public get model(): Model { - return this._model; - } - - public get schema(): Schema { - return this._schema; - } + public get schema(): Schema { + return this._schema; + } } diff --git a/api/src/v1/builder/channels/OpsgenieOldModelBuilder.ts b/api/src/v1/builder/channels/OpsgenieOldModelBuilder.ts index 2854083b..685cac23 100644 --- a/api/src/v1/builder/channels/OpsgenieOldModelBuilder.ts +++ b/api/src/v1/builder/channels/OpsgenieOldModelBuilder.ts @@ -9,34 +9,32 @@ import { OpsgenieModelBuilder } from "./OpsgenieModelBuilder"; * Builder to create Mongoose Schema and Model of Config Entity */ export class OpsgenieOldModelBuilder implements IModelBuilder { - - private _schema: Schema = null; - private _model: Model = null; - - public produceSchema(): void { - - //make copy - const builder = new OpsgenieModelBuilder() - builder.produceSchema(); - - this._schema = builder.schema; - - MongooseUtil.virtualize(this._schema); - } - - public produceModel(): void { - this._model = mongoose.model( - ModelName.OPSGENIE_OLD, - this._schema, - Collection.CONFIG_OLD - ) as Model; - } - - public get model(): Model { - return this._model; - } - - public get schema(): Schema { - return this._schema; - } + private _schema: Schema = null; + private _model: Model = null; + + public produceSchema(): void { + //make copy + const builder = new OpsgenieModelBuilder(); + builder.produceSchema(); + + this._schema = builder.schema; + + MongooseUtil.virtualize(this._schema); + } + + public produceModel(): void { + this._model = mongoose.model( + ModelName.OPSGENIE_OLD, + this._schema, + Collection.CONFIG_OLD + ); + } + + public get model(): Model { + return this._model; + } + + public get schema(): Schema { + return this._schema; + } } diff --git a/api/src/v1/builder/channels/PagerDutyModelBuilder.ts b/api/src/v1/builder/channels/PagerDutyModelBuilder.ts index 79959efa..d65f58ae 100644 --- a/api/src/v1/builder/channels/PagerDutyModelBuilder.ts +++ b/api/src/v1/builder/channels/PagerDutyModelBuilder.ts @@ -1,94 +1,93 @@ -import mongoose, {Model, Schema} from 'mongoose'; -import {Collection, ModelName} from '../../../constant/mongoose'; -import {IModelBuilder} from '../IModelBuilder'; -import {ObjectUtil} from '../../../util/ObjectUtil'; -import {PagerDutyChannel} from "../../../../../entities/ts/channels/PagerDutyChannel"; -import {MongooseUtil} from "../../../util/MongooseUtil"; -import {StringUtil} from '../../../util/StringUtil'; +import mongoose, { Model, Schema } from "mongoose"; +import { Collection, ModelName } from "../../../constant/mongoose"; +import { IModelBuilder } from "../IModelBuilder"; +import { ObjectUtil } from "../../../util/ObjectUtil"; +import { PagerDutyChannel } from "../../../../../entities/ts/channels/PagerDutyChannel"; +import { MongooseUtil } from "../../../util/MongooseUtil"; +import { StringUtil } from "../../../util/StringUtil"; /** * Builder to create Mongoose Schema and Model of PagerDuty Entity */ export class PagerDutyModelBuilder implements IModelBuilder { + private _schema: Schema = null; + private _model: Model = null; - private _schema: Schema = null; - private _model: Model = null; + public produceSchema(): void { + const entity = {} as PagerDutyChannel; - public produceSchema(): void { + entity.created = { + type: Date, + default: null, + } as any; + entity.modified = { + type: Date, + default: null, + } as any; + entity.name = { + type: String, + required: [true, "Name is required!"], + default: null, + set: StringUtil.trim, + } as any; + entity.type = { + type: Schema.Types.ObjectId, + ref: ModelName.GENERIC, + required: [true, "Type is required!"], + default: null, + } as any; + entity.configs = [ + { + type: Schema.Types.ObjectId, + ref: ModelName.CONFIG, + } as any, + ]; + entity.integrationKey = { + type: String, + alias: "integrationKey", + default: null, + } as any; + entity.info = { + type: Boolean, + default: false, + } as any; + entity.warning = { + type: Boolean, + default: false, + } as any; + entity.critical = { + type: Boolean, + default: false, + } as any; + entity.error = { + type: Boolean, + default: false, + } as any; + entity["configType"] = { + type: Schema.Types.ObjectId, + ref: ModelName.GENERIC, + } as any; - const entity = {} as PagerDutyChannel; + this._schema = new Schema(ObjectUtil.camelToSnake(entity), { + versionKey: false, + }); - entity.created = { - type: Date, - default: null - } as any; - entity.modified = { - type: Date, - default: null - } as any; - entity.name = { - type: String, - required: [true, 'Name is required!'], - default: null, - set: StringUtil.trim - } as any; - entity.type = { - type: Schema.Types.ObjectId, - ref: ModelName.GENERIC, - required: [true, 'Type is required!'], - default: null - } as any; - entity.configs = [{ - type: Schema.Types.ObjectId, - ref: ModelName.CONFIG - } as any]; - entity.integrationKey = { - type: String, - alias: 'integrationKey', - default: null - } as any; - entity.info = { - type: Boolean, - default: false - } as any; - entity.warning = { - type: Boolean, - default: false - } as any; - entity.critical = { - type: Boolean, - default: false - } as any; - entity.error = { - type: Boolean, - default: false - } as any; - entity['configType'] = { - type: Schema.Types.ObjectId, - ref: ModelName.GENERIC - } as any; + MongooseUtil.virtualize(this._schema); + } - this._schema = new Schema( - ObjectUtil.camelToSnake(entity), - {versionKey: false} - ); + public produceModel(): void { + this._model = mongoose.model( + ModelName.PAGER_DUTY, + this._schema, + Collection.CONFIG + ); + } - MongooseUtil.virtualize(this._schema); - } + public get model(): Model { + return this._model; + } - public produceModel(): void { - this._model = mongoose.model( - ModelName.PAGER_DUTY, - this._schema, - Collection.CONFIG - ) as Model; - } - - public get model(): Model { - return this._model; - } - - public get schema(): Schema { - return this._schema; - } + public get schema(): Schema { + return this._schema; + } } diff --git a/api/src/v1/builder/channels/PagerDutyOldModelBuilder.ts b/api/src/v1/builder/channels/PagerDutyOldModelBuilder.ts index ba71ef53..a7626192 100644 --- a/api/src/v1/builder/channels/PagerDutyOldModelBuilder.ts +++ b/api/src/v1/builder/channels/PagerDutyOldModelBuilder.ts @@ -9,34 +9,32 @@ import { PagerDutyModelBuilder } from "./PagerDutyModelBuilder"; * Builder to create Mongoose Schema and Model of Config Entity */ export class PagerDutyOldModelBuilder implements IModelBuilder { - - private _schema: Schema = null; - private _model: Model = null; - - public produceSchema(): void { - - //make copy - const builder = new PagerDutyModelBuilder() - builder.produceSchema(); - - this._schema = builder.schema; - - MongooseUtil.virtualize(this._schema); - } - - public produceModel(): void { - this._model = mongoose.model( - ModelName.PAGER_DUTY_OLD, - this._schema, - Collection.CONFIG_OLD - ) as Model; - } - - public get model(): Model { - return this._model; - } - - public get schema(): Schema { - return this._schema; - } + private _schema: Schema = null; + private _model: Model = null; + + public produceSchema(): void { + //make copy + const builder = new PagerDutyModelBuilder(); + builder.produceSchema(); + + this._schema = builder.schema; + + MongooseUtil.virtualize(this._schema); + } + + public produceModel(): void { + this._model = mongoose.model( + ModelName.PAGER_DUTY_OLD, + this._schema, + Collection.CONFIG_OLD + ); + } + + public get model(): Model { + return this._model; + } + + public get schema(): Schema { + return this._schema; + } } diff --git a/api/src/v1/builder/channels/SlackModelBuilder.ts b/api/src/v1/builder/channels/SlackModelBuilder.ts index 754c3b55..13ec1c4c 100644 --- a/api/src/v1/builder/channels/SlackModelBuilder.ts +++ b/api/src/v1/builder/channels/SlackModelBuilder.ts @@ -1,112 +1,111 @@ -import mongoose, {Model, Schema} from 'mongoose'; -import {Collection, ModelName} from '../../../constant/mongoose'; -import {IModelBuilder} from '../IModelBuilder'; -import {ObjectUtil} from '../../../util/ObjectUtil'; -import {SlackChannel} from "../../../../../entities/ts/channels/SlackChannel"; -import {MongooseUtil} from "../../../util/MongooseUtil"; -import {StringUtil} from '../../../util/StringUtil'; +import mongoose, { Model, Schema } from "mongoose"; +import { Collection, ModelName } from "../../../constant/mongoose"; +import { IModelBuilder } from "../IModelBuilder"; +import { ObjectUtil } from "../../../util/ObjectUtil"; +import { SlackChannel } from "../../../../../entities/ts/channels/SlackChannel"; +import { MongooseUtil } from "../../../util/MongooseUtil"; +import { StringUtil } from "../../../util/StringUtil"; /** * Builder to create Mongoose Schema and Model of Slack Entity */ export class SlackModelBuilder implements IModelBuilder { + private _schema: Schema = null; + private _model: Model = null; - private _schema: Schema = null; - private _model: Model = null; + public produceSchema(): void { + const entity = {} as SlackChannel; - public produceSchema(): void { + entity.created = { + type: Date, + default: null, + } as any; + entity.modified = { + type: Date, + default: null, + } as any; + entity.name = { + type: String, + required: [true, "Name is required!"], + default: null, + set: StringUtil.trim, + } as any; + entity.type = { + type: Schema.Types.ObjectId, + ref: ModelName.GENERIC, + required: [true, "Type is required!"], + default: null, + } as any; + entity.configs = [ + { + type: Schema.Types.ObjectId, + ref: ModelName.CONFIG, + } as any, + ]; + entity.appToken = { + type: String, + alias: "appToken", + default: null, + } as any; + entity.botToken = { + type: String, + alias: "botToken", + default: null, + } as any; + entity.botChannelId = { + type: String, + alias: "botChannelId", + default: null, + } as any; + entity.commands = { + type: Boolean, + default: false, + } as any; + entity.alerts = { + type: Boolean, + default: false, + } as any; + entity.info = { + type: Boolean, + default: false, + } as any; + entity.warning = { + type: Boolean, + default: false, + } as any; + entity.critical = { + type: Boolean, + default: false, + } as any; + entity.error = { + type: Boolean, + default: false, + } as any; + entity["configType"] = { + type: Schema.Types.ObjectId, + ref: ModelName.GENERIC, + } as any; - const entity = {} as SlackChannel; + this._schema = new Schema(ObjectUtil.camelToSnake(entity), { + versionKey: false, + }); - entity.created = { - type: Date, - default: null - } as any; - entity.modified = { - type: Date, - default: null - } as any; - entity.name = { - type: String, - required: [true, 'Name is required!'], - default: null, - set: StringUtil.trim - } as any; - entity.type = { - type: Schema.Types.ObjectId, - ref: ModelName.GENERIC, - required: [true, 'Type is required!'], - default: null - } as any; - entity.configs = [{ - type: Schema.Types.ObjectId, - ref: ModelName.CONFIG - } as any]; - entity.appToken = { - type: String, - alias: 'appToken', - default: null - } as any; - entity.botToken = { - type: String, - alias: 'botToken', - default: null - } as any; - entity.botChannelId = { - type: String, - alias: 'botChannelId', - default: null - } as any; - entity.commands = { - type: Boolean, - default: false - } as any; - entity.alerts = { - type: Boolean, - default: false - } as any; - entity.info = { - type: Boolean, - default: false - } as any; - entity.warning = { - type: Boolean, - default: false - } as any; - entity.critical = { - type: Boolean, - default: false - } as any; - entity.error = { - type: Boolean, - default: false - } as any; - entity['configType'] = { - type: Schema.Types.ObjectId, - ref: ModelName.GENERIC - } as any; + MongooseUtil.virtualize(this._schema); + } - this._schema = new Schema( - ObjectUtil.camelToSnake(entity), - {versionKey: false} - ); + public produceModel(): void { + this._model = mongoose.model( + ModelName.SLACK, + this._schema, + Collection.CONFIG + ); + } - MongooseUtil.virtualize(this._schema); - } + public get model(): Model { + return this._model; + } - public produceModel(): void { - this._model = mongoose.model( - ModelName.SLACK, - this._schema, - Collection.CONFIG - ) as Model; - } - - public get model(): Model { - return this._model; - } - - public get schema(): Schema { - return this._schema; - } + public get schema(): Schema { + return this._schema; + } } diff --git a/api/src/v1/builder/channels/SlackOldModelBuilder.ts b/api/src/v1/builder/channels/SlackOldModelBuilder.ts index 4e85c706..6481a1b5 100644 --- a/api/src/v1/builder/channels/SlackOldModelBuilder.ts +++ b/api/src/v1/builder/channels/SlackOldModelBuilder.ts @@ -9,34 +9,32 @@ import { SlackModelBuilder } from "./SlackModelBuilder"; * Builder to create Mongoose Schema and Model of Config Entity */ export class SlackOldModelBuilder implements IModelBuilder { - - private _schema: Schema = null; - private _model: Model = null; - - public produceSchema(): void { - - //make copy - const builder = new SlackModelBuilder() - builder.produceSchema(); - - this._schema = builder.schema; - - MongooseUtil.virtualize(this._schema); - } - - public produceModel(): void { - this._model = mongoose.model( - ModelName.SLACK_OLD, - this._schema, - Collection.CONFIG_OLD - ) as Model; - } - - public get model(): Model { - return this._model; - } - - public get schema(): Schema { - return this._schema; - } + private _schema: Schema = null; + private _model: Model = null; + + public produceSchema(): void { + //make copy + const builder = new SlackModelBuilder(); + builder.produceSchema(); + + this._schema = builder.schema; + + MongooseUtil.virtualize(this._schema); + } + + public produceModel(): void { + this._model = mongoose.model( + ModelName.SLACK_OLD, + this._schema, + Collection.CONFIG_OLD + ); + } + + public get model(): Model { + return this._model; + } + + public get schema(): Schema { + return this._schema; + } } diff --git a/api/src/v1/builder/channels/TelegramModelBuilder.ts b/api/src/v1/builder/channels/TelegramModelBuilder.ts index 44c37e6d..92387b55 100644 --- a/api/src/v1/builder/channels/TelegramModelBuilder.ts +++ b/api/src/v1/builder/channels/TelegramModelBuilder.ts @@ -1,107 +1,106 @@ -import mongoose, {Model, Schema} from 'mongoose'; -import {Collection, ModelName} from '../../../constant/mongoose'; -import {IModelBuilder} from '../IModelBuilder'; -import {ObjectUtil} from '../../../util/ObjectUtil'; -import {TelegramChannel} from "../../../../../entities/ts/channels/TelegramChannel"; -import {MongooseUtil} from "../../../util/MongooseUtil"; -import {StringUtil} from '../../../util/StringUtil'; +import mongoose, { Model, Schema } from "mongoose"; +import { Collection, ModelName } from "../../../constant/mongoose"; +import { IModelBuilder } from "../IModelBuilder"; +import { ObjectUtil } from "../../../util/ObjectUtil"; +import { TelegramChannel } from "../../../../../entities/ts/channels/TelegramChannel"; +import { MongooseUtil } from "../../../util/MongooseUtil"; +import { StringUtil } from "../../../util/StringUtil"; /** * Builder to create Mongoose Schema and Model of Telegram Entity */ export class TelegramModelBuilder implements IModelBuilder { + private _schema: Schema = null; + private _model: Model = null; - private _schema: Schema = null; - private _model: Model = null; + public produceSchema(): void { + const entity = {} as TelegramChannel; - public produceSchema(): void { + entity.created = { + type: Date, + default: null, + } as any; + entity.modified = { + type: Date, + default: null, + } as any; + entity.name = { + type: String, + required: [true, "Name is required!"], + default: null, + set: StringUtil.trim, + } as any; + entity.type = { + type: Schema.Types.ObjectId, + ref: ModelName.GENERIC, + required: [true, "Type is required!"], + default: null, + } as any; + entity.configs = [ + { + type: Schema.Types.ObjectId, + ref: ModelName.CONFIG, + } as any, + ]; + entity.botToken = { + type: String, + alias: "botToken", + default: null, + } as any; + entity.chatId = { + type: String, + alias: "chatId", + default: null, + } as any; + entity.commands = { + type: Boolean, + default: false, + } as any; + entity.alerts = { + type: Boolean, + default: false, + } as any; + entity.info = { + type: Boolean, + default: false, + } as any; + entity.warning = { + type: Boolean, + default: false, + } as any; + entity.critical = { + type: Boolean, + default: false, + } as any; + entity.error = { + type: Boolean, + default: false, + } as any; + entity["configType"] = { + type: Schema.Types.ObjectId, + ref: ModelName.GENERIC, + } as any; - const entity = {} as TelegramChannel; + this._schema = new Schema(ObjectUtil.camelToSnake(entity), { + versionKey: false, + }); - entity.created = { - type: Date, - default: null - } as any; - entity.modified = { - type: Date, - default: null - } as any; - entity.name = { - type: String, - required: [true, 'Name is required!'], - default: null, - set: StringUtil.trim - } as any; - entity.type = { - type: Schema.Types.ObjectId, - ref: ModelName.GENERIC, - required: [true, 'Type is required!'], - default: null - } as any; - entity.configs = [{ - type: Schema.Types.ObjectId, - ref: ModelName.CONFIG - } as any]; - entity.botToken = { - type: String, - alias: 'botToken', - default: null - } as any; - entity.chatId = { - type: String, - alias: 'chatId', - default: null - } as any; - entity.commands = { - type: Boolean, - default: false - } as any; - entity.alerts = { - type: Boolean, - default: false - } as any; - entity.info = { - type: Boolean, - default: false - } as any; - entity.warning = { - type: Boolean, - default: false - } as any; - entity.critical = { - type: Boolean, - default: false - } as any; - entity.error = { - type: Boolean, - default: false - } as any; - entity['configType'] = { - type: Schema.Types.ObjectId, - ref: ModelName.GENERIC - } as any; + MongooseUtil.virtualize(this._schema); + } - this._schema = new Schema( - ObjectUtil.camelToSnake(entity), - {versionKey: false} - ); + public produceModel(): void { + this._model = mongoose.model( + ModelName.TELEGRAM, + this._schema, + Collection.CONFIG + ); + } - MongooseUtil.virtualize(this._schema); - } + public get model(): Model { + return this._model; + } - public produceModel(): void { - this._model = mongoose.model( - ModelName.TELEGRAM, - this._schema, - Collection.CONFIG - ) as Model; - } - - public get model(): Model { - return this._model; - } - - public get schema(): Schema { - return this._schema; - } + public get schema(): Schema { + return this._schema; + } } diff --git a/api/src/v1/builder/channels/TelegramOldModelBuilder.ts b/api/src/v1/builder/channels/TelegramOldModelBuilder.ts index 311e7a38..ba0c2d74 100644 --- a/api/src/v1/builder/channels/TelegramOldModelBuilder.ts +++ b/api/src/v1/builder/channels/TelegramOldModelBuilder.ts @@ -9,34 +9,32 @@ import { TelegramModelBuilder } from "./TelegramModelBuilder"; * Builder to create Mongoose Schema and Model of Config Entity */ export class TelegramOldModelBuilder implements IModelBuilder { - - private _schema: Schema = null; - private _model: Model = null; - - public produceSchema(): void { - - //make copy - const builder = new TelegramModelBuilder() - builder.produceSchema(); - - this._schema = builder.schema; - - MongooseUtil.virtualize(this._schema); - } - - public produceModel(): void { - this._model = mongoose.model( - ModelName.TELEGRAM_OLD, - this._schema, - Collection.CONFIG_OLD - ) as Model; - } - - public get model(): Model { - return this._model; - } - - public get schema(): Schema { - return this._schema; - } + private _schema: Schema = null; + private _model: Model = null; + + public produceSchema(): void { + //make copy + const builder = new TelegramModelBuilder(); + builder.produceSchema(); + + this._schema = builder.schema; + + MongooseUtil.virtualize(this._schema); + } + + public produceModel(): void { + this._model = mongoose.model( + ModelName.TELEGRAM_OLD, + this._schema, + Collection.CONFIG_OLD + ); + } + + public get model(): Model { + return this._model; + } + + public get schema(): Schema { + return this._schema; + } } diff --git a/api/src/v1/builder/channels/TwilioModelBuilder.ts b/api/src/v1/builder/channels/TwilioModelBuilder.ts index 6b7f7b4d..207bc391 100644 --- a/api/src/v1/builder/channels/TwilioModelBuilder.ts +++ b/api/src/v1/builder/channels/TwilioModelBuilder.ts @@ -1,96 +1,95 @@ -import mongoose, {Model, Schema} from 'mongoose'; -import {Collection, ModelName} from '../../../constant/mongoose'; -import {IModelBuilder} from '../IModelBuilder'; -import {ObjectUtil} from '../../../util/ObjectUtil'; -import {TwilioChannel} from "../../../../../entities/ts/channels/TwilioChannel"; -import {MongooseUtil} from "../../../util/MongooseUtil"; -import {StringUtil} from '../../../util/StringUtil'; +import mongoose, { Model, Schema } from "mongoose"; +import { Collection, ModelName } from "../../../constant/mongoose"; +import { IModelBuilder } from "../IModelBuilder"; +import { ObjectUtil } from "../../../util/ObjectUtil"; +import { TwilioChannel } from "../../../../../entities/ts/channels/TwilioChannel"; +import { MongooseUtil } from "../../../util/MongooseUtil"; +import { StringUtil } from "../../../util/StringUtil"; /** * Builder to create Mongoose Schema and Model of Twilio Entity */ export class TwilioModelBuilder implements IModelBuilder { + private _schema: Schema = null; + private _model: Model = null; - private _schema: Schema = null; - private _model: Model = null; + public produceSchema(): void { + const entity = {} as TwilioChannel; - public produceSchema(): void { + entity.created = { + type: Date, + default: null, + } as any; + entity.modified = { + type: Date, + default: null, + } as any; + entity.name = { + type: String, + required: [true, "Name is required!"], + default: null, + set: StringUtil.trim, + } as any; + entity.type = { + type: Schema.Types.ObjectId, + ref: ModelName.GENERIC, + required: [true, "Type is required!"], + default: null, + } as any; + entity.configs = [ + { + type: Schema.Types.ObjectId, + ref: ModelName.CONFIG, + } as any, + ]; + entity.accountSid = { + type: String, + alias: "accountSid", + default: null, + } as any; + entity.authToken = { + type: String, + alias: "authToken", + default: null, + } as any; + entity.twilioPhoneNumber = { + type: String, + alias: "twilioPhoneNumber", + default: null, + } as any; + entity.twilioPhoneNumbersToDial = { + type: [String], + alias: "twilioPhoneNumbersToDial", + } as any; + entity.critical = { + type: Boolean, + default: false, + } as any; + entity["configType"] = { + type: Schema.Types.ObjectId, + ref: ModelName.GENERIC, + } as any; - const entity = {} as TwilioChannel; + this._schema = new Schema(ObjectUtil.camelToSnake(entity), { + versionKey: false, + }); - entity.created = { - type: Date, - default: null - } as any; - entity.modified = { - type: Date, - default: null - } as any; - entity.name = { - type: String, - required: [true, 'Name is required!'], - default: null, - set: StringUtil.trim - } as any; - entity.type = { - type: Schema.Types.ObjectId, - ref: ModelName.GENERIC, - required: [true, 'Type is required!'], - default: null - } as any; - entity.configs = [{ - type: Schema.Types.ObjectId, - ref: ModelName.CONFIG - } as any]; - entity.accountSid = { - type: String, - alias: 'accountSid', - default: null - } as any; - entity.authToken = { - type: String, - alias: 'authToken', - default: null - } as any; - entity.twilioPhoneNumber = { - type: String, - alias: 'twilioPhoneNumber', - default: null - } as any; - entity.twilioPhoneNumbersToDial = { - type: [String], - alias: 'twilioPhoneNumbersToDial' - } as any; - entity.critical = { - type: Boolean, - default: false - } as any; - entity['configType'] = { - type: Schema.Types.ObjectId, - ref: ModelName.GENERIC - } as any; + MongooseUtil.virtualize(this._schema); + } - this._schema = new Schema( - ObjectUtil.camelToSnake(entity), - {versionKey: false} - ); + public produceModel(): void { + this._model = mongoose.model( + ModelName.TWILIO, + this._schema, + Collection.CONFIG + ); + } - MongooseUtil.virtualize(this._schema); - } + public get model(): Model { + return this._model; + } - public produceModel(): void { - this._model = mongoose.model( - ModelName.TWILIO, - this._schema, - Collection.CONFIG - ) as Model; - } - - public get model(): Model { - return this._model; - } - - public get schema(): Schema { - return this._schema; - } + public get schema(): Schema { + return this._schema; + } } diff --git a/api/src/v1/builder/channels/TwilioOldModelBuilder.ts b/api/src/v1/builder/channels/TwilioOldModelBuilder.ts index 88d8df04..451547d2 100644 --- a/api/src/v1/builder/channels/TwilioOldModelBuilder.ts +++ b/api/src/v1/builder/channels/TwilioOldModelBuilder.ts @@ -9,34 +9,32 @@ import { TwilioModelBuilder } from "./TwilioModelBuilder"; * Builder to create Mongoose Schema and Model of Config Entity */ export class TwillioOldModelBuilder implements IModelBuilder { - - private _schema: Schema = null; - private _model: Model = null; - - public produceSchema(): void { - - //make copy - const builder = new TwilioModelBuilder() - builder.produceSchema(); - - this._schema = builder.schema; - - MongooseUtil.virtualize(this._schema); - } - - public produceModel(): void { - this._model = mongoose.model( - ModelName.TWILIO_OLD, - this._schema, - Collection.CONFIG_OLD - ) as Model; - } - - public get model(): Model { - return this._model; - } - - public get schema(): Schema { - return this._schema; - } + private _schema: Schema = null; + private _model: Model = null; + + public produceSchema(): void { + //make copy + const builder = new TwilioModelBuilder(); + builder.produceSchema(); + + this._schema = builder.schema; + + MongooseUtil.virtualize(this._schema); + } + + public produceModel(): void { + this._model = mongoose.model( + ModelName.TWILIO_OLD, + this._schema, + Collection.CONFIG_OLD + ); + } + + public get model(): Model { + return this._model; + } + + public get schema(): Schema { + return this._schema; + } } diff --git a/api/src/v1/entity/repository/AbstractRepository.ts b/api/src/v1/entity/repository/AbstractRepository.ts index 9b8c8a2d..c3200070 100644 --- a/api/src/v1/entity/repository/AbstractRepository.ts +++ b/api/src/v1/entity/repository/AbstractRepository.ts @@ -1,94 +1,101 @@ -import {ObjectID} from 'mongodb'; -import * as mongoose from 'mongoose'; +import { ObjectId } from "mongodb"; +import * as mongoose from "mongoose"; /** * Implements generic operations to interact with database */ export class AbstractRepository { + public constructor(protected model: mongoose.Model) {} - public constructor(protected model: mongoose.Model) { - } + /** + * Return a list of from database + * + * @returns an array of items + */ + public async findAll(): Promise> { + return this.model.find(); + } - /** - * Return a list of from database - * - * @returns an array of items - */ - public async findAll(): Promise> { - return this.model.find(); - } + /** + * Return a list of from database by id and deep populate references + * + * @param fieldsToPopulate reference fields to populate within document + * @param populateObject object which contains information regarding deep population + * @returns an array of items with populated references + */ + public async findAllAndDeepPopulate( + fieldsToPopulate: string[], + populateObject + ): Promise> { + return this.model + .find() + .populate(fieldsToPopulate) + .populate(populateObject); + } - /** - * Return a list of from database by id and deep populate references - * - * @param fieldsToPopulate reference fields to populate within document - * @param populateObject object which contains information regarding deep population - * @returns an array of items with populated references - */ - public async findAllAndDeepPopulate(fieldsToPopulate: string[], populateObject): Promise> { - return this.model.find().populate(fieldsToPopulate).populate(populateObject); - } + /** + * Return a list of from database by criteria + * + * @param criteria + * @returns an array of items + */ + public async findBy(criteria: object): Promise> { + return this.model.find(criteria); + } - /** - * Return a list of from database by criteria - * - * @param criteria - * @returns an array of items - */ - public async findBy(criteria: object): Promise> { - return this.model.find(criteria); - } + /** + * Return a list of from database by criteria + * + * @param criteria + * @param fieldsToPopulate reference fields to populate within documents + */ + public async findByAndPopulate( + criteria: object, + fieldsToPopulate: string[] + ): Promise> { + return this.model.find(criteria).populate(fieldsToPopulate); + } - /** - * Return a list of from database by criteria - * - * @param criteria - * @param fieldsToPopulate reference fields to populate within documents - */ - public async findByAndPopulate(criteria: object, fieldsToPopulate: string[]): Promise> { - return this.model.find(criteria).populate(fieldsToPopulate); - } + /** + * Return item from database by id + * + * @param id The mongo hash id + * Return item + * + */ + public async findOneById(id: string): Promise { + return this.model.findOne({ _id: new ObjectId(id) }); + } - /** - * Return item from database by id - * - * @param id The mongo hash id - * Return item - * - */ - public async findOneById(id: string): Promise { - return this.model.findOne({_id: new ObjectID(id)}); - } + /** + * Return from database by criteria + * + * @param criteria + * @returns an item + */ + public async findOneBy(criteria: object): Promise { + return this.model.findOne(criteria); + } - /** - * Return from database by criteria - * - * @param criteria - * @returns an item - */ - public async findOneBy(criteria: object): Promise { - return this.model.findOne(criteria); - } + /** + * Return true if document exists + * + * @param id The mongo hash id + * @returns true if document exists, false otherwise + */ + public async exists(id: string): Promise { + const result = await this.model.find({ _id: id }).countDocuments(); + return result > 0; + } - /** - * Return true if document exists - * - * @param id The mongo hash id - * @returns true if document exists, false otherwise - */ - public async exists(id: string): Promise { - const result = await this.model.find({_id: id}).countDocuments(); - return result > 0; - } - - /** - * Deletes an item from database by id - * - * @param id The mongo hash id - * @returns true if document was deleted - */ - public async deleteOneById(id: string): Promise { - const result = await this.model.deleteOne({_id: new ObjectID(id)}); - return result.deletedCount === 1; - } + /** + * Deletes an item from database by id + * + * @param id The mongo hash id + * @returns true if document was deleted + */ + public async deleteOneById(id: string): Promise { + const result = await this.model.deleteOne({ _id: new ObjectId(id) }); + return result.deletedCount === 1; + } } diff --git a/api/src/v1/entity/repository/BaseChainRepository.ts b/api/src/v1/entity/repository/BaseChainRepository.ts index 05e5fb36..d6bee5ff 100644 --- a/api/src/v1/entity/repository/BaseChainRepository.ts +++ b/api/src/v1/entity/repository/BaseChainRepository.ts @@ -1,37 +1,38 @@ -import {ObjectID} from "mongodb"; -import {BaseChain} from "../../../../../entities/ts/BaseChain"; -import {BaseChainModel} from "../model/BaseChainModel"; -import {AbstractRepository} from "./AbstractRepository"; +import { ObjectId } from "mongodb"; +import { BaseChain } from "../../../../../entities/ts/BaseChain"; +import { BaseChainModel } from "../model/BaseChainModel"; +import { AbstractRepository } from "./AbstractRepository"; /** * Implements specific operations for Base Chain model in database */ export class BaseChainRepository extends AbstractRepository { - public constructor() { - super(BaseChainModel); - } - - /** - * Return item BaseChain from database by id and deep populate references - * - * @param id The mongo hash id - * @param fieldsToPopulate reference fields to populate within document - * @param populateObject object which contains information regarding deep - * population - * @returns item BaseChain with populated references - */ - public async findOneByIdAndDeepPopulate( - id: string, - fieldsToPopulate: string[], - populateObject = null - ): Promise { - const query = this.model.findOne({ _id: new ObjectID(id) }) - .populate(fieldsToPopulate); + public constructor() { + super(BaseChainModel); + } - if (populateObject) { - query.populate(populateObject); - } + /** + * Return item BaseChain from database by id and deep populate references + * + * @param id The mongo hash id + * @param fieldsToPopulate reference fields to populate within document + * @param populateObject object which contains information regarding deep + * population + * @returns item BaseChain with populated references + */ + public async findOneByIdAndDeepPopulate( + id: string, + fieldsToPopulate: string[], + populateObject = null + ): Promise { + const query = this.model + .findOne({ _id: new ObjectId(id) }) + .populate(fieldsToPopulate); - return query; + if (populateObject) { + query.populate(populateObject); } + + return query; + } } diff --git a/api/src/v1/entity/repository/ChannelRepository.ts b/api/src/v1/entity/repository/ChannelRepository.ts index f730b506..f4fc08ee 100644 --- a/api/src/v1/entity/repository/ChannelRepository.ts +++ b/api/src/v1/entity/repository/ChannelRepository.ts @@ -1,347 +1,455 @@ -import {Channel} from "../../../../../entities/ts/channels/AbstractChannel"; -import {EmailChannel} from "../../../../../entities/ts/channels/EmailChannel"; -import {OpsgenieChannel} from "../../../../../entities/ts/channels/OpsgenieChannel"; -import {PagerDutyChannel} from "../../../../../entities/ts/channels/PagerDutyChannel"; -import {SlackChannel} from "../../../../../entities/ts/channels/SlackChannel"; -import {TelegramChannel} from "../../../../../entities/ts/channels/TelegramChannel"; -import {TwilioChannel} from "../../../../../entities/ts/channels/TwilioChannel"; -import {AbstractRepository} from "./AbstractRepository"; -import {EmailModel} from "../model/channels/EmailModel"; -import {OpsgenieModel} from "../model/channels/OpsgenieModel"; -import {PagerDutyModel} from "../model/channels/PagerDutyModel"; -import {SlackModel} from "../model/channels/SlackModel"; -import {TelegramModel} from "../model/channels/TelegramModel"; -import {TwilioModel} from "../model/channels/TwilioModel"; -import mongoose, {UpdateWriteOpResult} from "mongoose"; -import {ObjectID} from "mongodb"; -import {GenericDocument} from "../../../constant/mongoose"; +import { Channel } from "../../../../../entities/ts/channels/AbstractChannel"; +import { EmailChannel } from "../../../../../entities/ts/channels/EmailChannel"; +import { OpsgenieChannel } from "../../../../../entities/ts/channels/OpsgenieChannel"; +import { PagerDutyChannel } from "../../../../../entities/ts/channels/PagerDutyChannel"; +import { SlackChannel } from "../../../../../entities/ts/channels/SlackChannel"; +import { TelegramChannel } from "../../../../../entities/ts/channels/TelegramChannel"; +import { TwilioChannel } from "../../../../../entities/ts/channels/TwilioChannel"; +import { AbstractRepository } from "./AbstractRepository"; +import { EmailModel } from "../model/channels/EmailModel"; +import { OpsgenieModel } from "../model/channels/OpsgenieModel"; +import { PagerDutyModel } from "../model/channels/PagerDutyModel"; +import { SlackModel } from "../model/channels/SlackModel"; +import { TelegramModel } from "../model/channels/TelegramModel"; +import { TwilioModel } from "../model/channels/TwilioModel"; +import mongoose, { UpdateWriteOpResult } from "mongoose"; +import { ObjectId } from "mongodb"; +import { GenericDocument } from "../../../constant/mongoose"; /** * Implements specific operations for all Channel models */ class ChannelRepository extends AbstractRepository { - public constructor(protected model: mongoose.Model) { - super(model); - } + public constructor(protected model: mongoose.Model) { + super(model); + } - /** - * Return a list of channels from database and populate references - * - * @param fieldsToPopulate reference fields to populate within documents - * @param config_type_id the id of the config type to be returned - * @param populateObject object which contains information regarding deep population - * @returns an array of channels with populated references - */ - public async findAllByAndDeepPopulate(fieldsToPopulate: string[], config_type_id: string, - populateObject): Promise> { - return this.model.find({ - config_type: new ObjectID(config_type_id) - }).populate(fieldsToPopulate).populate(populateObject); - } + /** + * Return a list of channels from database and populate references + * + * @param fieldsToPopulate reference fields to populate within documents + * @param config_type_id the id of the config type to be returned + * @param populateObject object which contains information regarding deep population + * @returns an array of channels with populated references + */ + public async findAllByAndDeepPopulate( + fieldsToPopulate: string[], + config_type_id: string, + populateObject + ): Promise> { + return this.model + .find({ + config_type: new ObjectId(config_type_id), + }) + .populate(fieldsToPopulate) + .populate(populateObject); + } - /** - * Returns a channel from database by id and populate references - * - * @param id The mongo hash id - * @param fieldsToPopulate reference fields to populate within document - * @param config_type_id the id of the config type to be returned - * @param populateObject object which contains information regarding deep population - * @returns channel with populated references - */ - public async findOneByAndDeepPopulate(id: string, fieldsToPopulate: string[], config_type_id: string, - populateObject): Promise { - return this.model.findOne({ - _id: id, - config_type: new ObjectID(config_type_id) - }).populate(fieldsToPopulate).populate(populateObject); - } + /** + * Returns a channel from database by id and populate references + * + * @param id The mongo hash id + * @param fieldsToPopulate reference fields to populate within document + * @param config_type_id the id of the config type to be returned + * @param populateObject object which contains information regarding deep population + * @returns channel with populated references + */ + public async findOneByAndDeepPopulate( + id: string, + fieldsToPopulate: string[], + config_type_id: string, + populateObject + ): Promise { + return this.model + .findOne({ + _id: id, + config_type: new ObjectId(config_type_id), + }) + .populate(fieldsToPopulate) + .populate(populateObject); + } - /** - * Creates a link between a channel and a config by adding the - * config id to the array of configs in channel document. - * - * @param channel_id The mongo hash id of the channel document - * @param config_id The mongo hash id of the config document - * @returns update result - */ - public async linkConfigToChannel(channel_id: string, config_id: string): Promise { - return this.model.updateOne( - {_id: channel_id, config_type: {$ne: new ObjectID(GenericDocument.CONFIG_TYPE_SUB_CHAIN)}}, - // @ts-ignore - {$addToSet: {configs: new ObjectID(config_id)}} - ); - } + /** + * Creates a link between a channel and a config by adding the + * config id to the array of configs in channel document. + * + * @param channel_id The mongo hash id of the channel document + * @param config_id The mongo hash id of the config document + * @returns update result + */ + public async linkConfigToChannel( + channel_id: string, + config_id: string + ): Promise { + return this.model.updateOne( + { + _id: channel_id, + config_type: { + $ne: new ObjectId(GenericDocument.CONFIG_TYPE_SUB_CHAIN), + }, + }, + // @ts-ignore + { $addToSet: { configs: new ObjectId(config_id) } } + ); + } - /** - * Removes the link between a channel and a config by removing the - * config id from the array of configs in channel document. - * - * @param channel_id The mongo hash id of the channel document - * @param config_id The mongo hash id of the config document - * @returns update result - */ - public async unlinkConfigFromChannel(channel_id: string, config_id: string): Promise { - return this.model.updateOne( - {_id: channel_id, config_type: {$ne: new ObjectID(GenericDocument.CONFIG_TYPE_SUB_CHAIN)}}, - // @ts-ignore - {$pull: {configs: new ObjectID(config_id)}} - ); - } - - /** - * Removes the link between all channels and a config by removing the - * config id from the array of configs in channel documents (if any). - * - * @param config_id The mongo hash id of the config document - * @returns update result - */ - public async unlinkConfigFromAllChannels(config_id: string): Promise { - return this.model.updateMany( - {config_type: {$ne: new ObjectID(GenericDocument.CONFIG_TYPE_SUB_CHAIN)}}, - // @ts-ignore - {$pull: {configs: new ObjectID(config_id)}} - ); - } + /** + * Removes the link between a channel and a config by removing the + * config id from the array of configs in channel document. + * + * @param channel_id The mongo hash id of the channel document + * @param config_id The mongo hash id of the config document + * @returns update result + */ + public async unlinkConfigFromChannel( + channel_id: string, + config_id: string + ): Promise { + return this.model.updateOne( + { + _id: channel_id, + config_type: { + $ne: new ObjectId(GenericDocument.CONFIG_TYPE_SUB_CHAIN), + }, + }, + // @ts-ignore + { $pull: { configs: new ObjectId(config_id) } } + ); + } - /** - * Check if channel name already exists on Database - * - * @param channel The channel or request object - * @returns true if channel name exists, otherwise false - */ - public async isDuplicateChannelName(channel: Channel): Promise { - if ('name' in channel) { - const criteria = { - name: channel.name - } + /** + * Removes the link between all channels and a config by removing the + * config id from the array of configs in channel documents (if any). + * + * @param config_id The mongo hash id of the config document + * @returns update result + */ + public async unlinkConfigFromAllChannels( + config_id: string + ): Promise { + return this.model.updateMany( + { + config_type: { + $ne: new ObjectId(GenericDocument.CONFIG_TYPE_SUB_CHAIN), + }, + }, + // @ts-ignore + { $pull: { configs: new ObjectId(config_id) } } + ); + } - // for edit case, to ignore self register - const isValidConfigID = 'id' in channel && mongoose.Types.ObjectId.isValid(channel.id); + /** + * Check if channel name already exists on Database + * + * @param channel The channel or request object + * @returns true if channel name exists, otherwise false + */ + public async isDuplicateChannelName(channel: Channel): Promise { + if ("name" in channel) { + const criteria = { + name: channel.name, + }; - if (isValidConfigID) { - criteria['_id'] = {'$ne': new ObjectID(channel.id)}; - } + // for edit case, to ignore self register + const isValidConfigID = + "id" in channel && mongoose.Types.ObjectId.isValid(channel.id); - const channels = await this.findBy(criteria); - return channels && channels.length > 0; - } + if (isValidConfigID) { + criteria["_id"] = { $ne: new ObjectId(channel.id) }; + } - return false; + const channels = await this.findBy(criteria); + return channels && channels.length > 0; } + + return false; + } } /** * Implements specific operations for Email model in database */ export class EmailRepository extends ChannelRepository { - public constructor() { - super(EmailModel); - } + public constructor() { + super(EmailModel); + } - /** - * Return a list of Email channels from database and populate references - * - * @param fieldsToPopulate reference fields to populate within documents - * @param populateObject object which contains information regarding deep population - * @returns an array of Email channels with populated references - */ - public async findAllByAndDeepPopulate(fieldsToPopulate: string[], - populateObject): Promise> { - return super.findAllByAndDeepPopulate( - fieldsToPopulate, GenericDocument.CONFIG_TYPE_EMAIL_CHANNEL, populateObject); - } + /** + * Return a list of Email channels from database and populate references + * + * @param fieldsToPopulate reference fields to populate within documents + * @param populateObject object which contains information regarding deep population + * @returns an array of Email channels with populated references + */ + public async findAllByAndDeepPopulate( + fieldsToPopulate: string[], + populateObject + ): Promise> { + return super.findAllByAndDeepPopulate( + fieldsToPopulate, + GenericDocument.CONFIG_TYPE_EMAIL_CHANNEL, + populateObject + ); + } - /** - * Returns an Email channel from database by id and populate references - * - * @param id The mongo hash id - * @param fieldsToPopulate reference fields to populate within document - * @param populateObject object which contains information regarding deep population - * @returns Email channel with populated references - */ - public async findOneByAndDeepPopulate(id: string, fieldsToPopulate: string[], - populateObject): Promise { - return super.findOneByAndDeepPopulate( - id, fieldsToPopulate, GenericDocument.CONFIG_TYPE_EMAIL_CHANNEL, populateObject); - } + /** + * Returns an Email channel from database by id and populate references + * + * @param id The mongo hash id + * @param fieldsToPopulate reference fields to populate within document + * @param populateObject object which contains information regarding deep population + * @returns Email channel with populated references + */ + public async findOneByAndDeepPopulate( + id: string, + fieldsToPopulate: string[], + populateObject + ): Promise { + return super.findOneByAndDeepPopulate( + id, + fieldsToPopulate, + GenericDocument.CONFIG_TYPE_EMAIL_CHANNEL, + populateObject + ); + } } /** * Implements specific operations for Opsgenie model in database */ export class OpsgenieRepository extends ChannelRepository { - public constructor() { - super(OpsgenieModel); - } + public constructor() { + super(OpsgenieModel); + } - /** - * Return a list of Opsgenie channels from database and populate references - * - * @param fieldsToPopulate reference fields to populate within documents - * @param populateObject object which contains information regarding deep population - * @returns an array of Opsgenie channels with populated references - */ - public async findAllByAndDeepPopulate(fieldsToPopulate: string[], - populateObject): Promise> { - return super.findAllByAndDeepPopulate( - fieldsToPopulate, GenericDocument.CONFIG_TYPE_OPSGENIE_CHANNEL, populateObject); - } + /** + * Return a list of Opsgenie channels from database and populate references + * + * @param fieldsToPopulate reference fields to populate within documents + * @param populateObject object which contains information regarding deep population + * @returns an array of Opsgenie channels with populated references + */ + public async findAllByAndDeepPopulate( + fieldsToPopulate: string[], + populateObject + ): Promise> { + return super.findAllByAndDeepPopulate( + fieldsToPopulate, + GenericDocument.CONFIG_TYPE_OPSGENIE_CHANNEL, + populateObject + ); + } - /** - * Returns an Opsgenie channel from database by id and populate references - * - * @param id The mongo hash id - * @param fieldsToPopulate reference fields to populate within document - * @param populateObject object which contains information regarding deep population - * @returns Opsgenie channel with populated references - */ - public async findOneByAndDeepPopulate(id: string, fieldsToPopulate: string[], - populateObject): Promise { - return super.findOneByAndDeepPopulate( - id, fieldsToPopulate, GenericDocument.CONFIG_TYPE_OPSGENIE_CHANNEL, populateObject); - } + /** + * Returns an Opsgenie channel from database by id and populate references + * + * @param id The mongo hash id + * @param fieldsToPopulate reference fields to populate within document + * @param populateObject object which contains information regarding deep population + * @returns Opsgenie channel with populated references + */ + public async findOneByAndDeepPopulate( + id: string, + fieldsToPopulate: string[], + populateObject + ): Promise { + return super.findOneByAndDeepPopulate( + id, + fieldsToPopulate, + GenericDocument.CONFIG_TYPE_OPSGENIE_CHANNEL, + populateObject + ); + } } /** * Implements specific operations for PagerDuty model in database */ export class PagerDutyRepository extends ChannelRepository { - public constructor() { - super(PagerDutyModel); - } + public constructor() { + super(PagerDutyModel); + } - /** - * Return a list of PagerDuty channels from database and populate references - * - * @param fieldsToPopulate reference fields to populate within documents - * @param populateObject object which contains information regarding deep population - * @returns an array of PagerDuty channels with populated references - */ - public async findAllByAndDeepPopulate(fieldsToPopulate: string[], - populateObject): Promise> { - return super.findAllByAndDeepPopulate( - fieldsToPopulate, GenericDocument.CONFIG_TYPE_PAGERDUTY_CHANNEL, populateObject); - } + /** + * Return a list of PagerDuty channels from database and populate references + * + * @param fieldsToPopulate reference fields to populate within documents + * @param populateObject object which contains information regarding deep population + * @returns an array of PagerDuty channels with populated references + */ + public async findAllByAndDeepPopulate( + fieldsToPopulate: string[], + populateObject + ): Promise> { + return super.findAllByAndDeepPopulate( + fieldsToPopulate, + GenericDocument.CONFIG_TYPE_PAGERDUTY_CHANNEL, + populateObject + ); + } - /** - * Returns a PagerDuty channel from database by id and populate references - * - * @param id The mongo hash id - * @param fieldsToPopulate reference fields to populate within document - * @param populateObject object which contains information regarding deep population - * @returns PagerDuty channel with populated references - */ - public async findOneByAndDeepPopulate(id: string, fieldsToPopulate: string[], - populateObject): Promise { - return super.findOneByAndDeepPopulate( - id, fieldsToPopulate, GenericDocument.CONFIG_TYPE_PAGERDUTY_CHANNEL, populateObject); - } + /** + * Returns a PagerDuty channel from database by id and populate references + * + * @param id The mongo hash id + * @param fieldsToPopulate reference fields to populate within document + * @param populateObject object which contains information regarding deep population + * @returns PagerDuty channel with populated references + */ + public async findOneByAndDeepPopulate( + id: string, + fieldsToPopulate: string[], + populateObject + ): Promise { + return super.findOneByAndDeepPopulate( + id, + fieldsToPopulate, + GenericDocument.CONFIG_TYPE_PAGERDUTY_CHANNEL, + populateObject + ); + } } /** * Implements specific operations for Slack model in database */ export class SlackRepository extends ChannelRepository { - public constructor() { - super(SlackModel); - } + public constructor() { + super(SlackModel); + } - /** - * Return a list of Slack channels from database and populate references - * - * @param fieldsToPopulate reference fields to populate within documents - * @param populateObject object which contains information regarding deep population - * @returns an array of Slack channels with populated references - */ - public async findAllByAndDeepPopulate(fieldsToPopulate: string[], - populateObject): Promise> { - return super.findAllByAndDeepPopulate( - fieldsToPopulate, GenericDocument.CONFIG_TYPE_SLACK_CHANNEL, populateObject); - } + /** + * Return a list of Slack channels from database and populate references + * + * @param fieldsToPopulate reference fields to populate within documents + * @param populateObject object which contains information regarding deep population + * @returns an array of Slack channels with populated references + */ + public async findAllByAndDeepPopulate( + fieldsToPopulate: string[], + populateObject + ): Promise> { + return super.findAllByAndDeepPopulate( + fieldsToPopulate, + GenericDocument.CONFIG_TYPE_SLACK_CHANNEL, + populateObject + ); + } - /** - * Returns a Slack channel from database by id and populate references - * - * @param id The mongo hash id - * @param fieldsToPopulate reference fields to populate within document - * @param populateObject object which contains information regarding deep population - * @returns Slack channel with populated references - */ - public async findOneByAndDeepPopulate(id: string, fieldsToPopulate: string[], - populateObject): Promise { - return super.findOneByAndDeepPopulate( - id, fieldsToPopulate, GenericDocument.CONFIG_TYPE_SLACK_CHANNEL, populateObject); - } + /** + * Returns a Slack channel from database by id and populate references + * + * @param id The mongo hash id + * @param fieldsToPopulate reference fields to populate within document + * @param populateObject object which contains information regarding deep population + * @returns Slack channel with populated references + */ + public async findOneByAndDeepPopulate( + id: string, + fieldsToPopulate: string[], + populateObject + ): Promise { + return super.findOneByAndDeepPopulate( + id, + fieldsToPopulate, + GenericDocument.CONFIG_TYPE_SLACK_CHANNEL, + populateObject + ); + } } /** * Implements specific operations for Telegram model in database */ export class TelegramRepository extends ChannelRepository { - public constructor() { - super(TelegramModel); - } + public constructor() { + super(TelegramModel); + } - /** - * Return a list of Telegram channels from database and populate references - * - * @param fieldsToPopulate reference fields to populate within documents - * @param populateObject object which contains information regarding deep population - * @returns an array of Telegram channels with populated references - */ - public async findAllByAndDeepPopulate(fieldsToPopulate: string[], - populateObject): Promise> { - return super.findAllByAndDeepPopulate( - fieldsToPopulate, GenericDocument.CONFIG_TYPE_TELEGRAM_CHANNEL, populateObject); - } + /** + * Return a list of Telegram channels from database and populate references + * + * @param fieldsToPopulate reference fields to populate within documents + * @param populateObject object which contains information regarding deep population + * @returns an array of Telegram channels with populated references + */ + public async findAllByAndDeepPopulate( + fieldsToPopulate: string[], + populateObject + ): Promise> { + return super.findAllByAndDeepPopulate( + fieldsToPopulate, + GenericDocument.CONFIG_TYPE_TELEGRAM_CHANNEL, + populateObject + ); + } - /** - * Returns a Telegram channel from database by id and populate references - * - * @param id The mongo hash id - * @param fieldsToPopulate reference fields to populate within document - * @param populateObject object which contains information regarding deep population - * @returns Telegram channel with populated references - */ - public async findOneByAndDeepPopulate(id: string, fieldsToPopulate: string[], - populateObject): Promise { - return super.findOneByAndDeepPopulate( - id, fieldsToPopulate, GenericDocument.CONFIG_TYPE_TELEGRAM_CHANNEL, populateObject); - } + /** + * Returns a Telegram channel from database by id and populate references + * + * @param id The mongo hash id + * @param fieldsToPopulate reference fields to populate within document + * @param populateObject object which contains information regarding deep population + * @returns Telegram channel with populated references + */ + public async findOneByAndDeepPopulate( + id: string, + fieldsToPopulate: string[], + populateObject + ): Promise { + return super.findOneByAndDeepPopulate( + id, + fieldsToPopulate, + GenericDocument.CONFIG_TYPE_TELEGRAM_CHANNEL, + populateObject + ); + } } /** * Implements specific operations for Twilio model in database */ export class TwilioRepository extends ChannelRepository { - public constructor() { - super(TwilioModel); - } + public constructor() { + super(TwilioModel); + } - /** - * Return a list of Twilio channels from database and populate references - * - * @param fieldsToPopulate reference fields to populate within documents - * @param populateObject object which contains information regarding deep population - * @returns an array of Twilio channels with populated references - */ - public async findAllByAndDeepPopulate(fieldsToPopulate: string[], - populateObject): Promise> { - return super.findAllByAndDeepPopulate( - fieldsToPopulate, GenericDocument.CONFIG_TYPE_TWILIO_CHANNEL, populateObject); - } + /** + * Return a list of Twilio channels from database and populate references + * + * @param fieldsToPopulate reference fields to populate within documents + * @param populateObject object which contains information regarding deep population + * @returns an array of Twilio channels with populated references + */ + public async findAllByAndDeepPopulate( + fieldsToPopulate: string[], + populateObject + ): Promise> { + return super.findAllByAndDeepPopulate( + fieldsToPopulate, + GenericDocument.CONFIG_TYPE_TWILIO_CHANNEL, + populateObject + ); + } - /** - * Returns a Twilio channel from database by id and populate references - * - * @param id The mongo hash id - * @param fieldsToPopulate reference fields to populate within document - * @param populateObject object which contains information regarding deep population - * @returns Twilio channel with populated references - */ - public async findOneByAndDeepPopulate(id: string, fieldsToPopulate: string[], - populateObject): Promise { - return super.findOneByAndDeepPopulate( - id, fieldsToPopulate, GenericDocument.CONFIG_TYPE_TWILIO_CHANNEL, populateObject); - } + /** + * Returns a Twilio channel from database by id and populate references + * + * @param id The mongo hash id + * @param fieldsToPopulate reference fields to populate within document + * @param populateObject object which contains information regarding deep population + * @returns Twilio channel with populated references + */ + public async findOneByAndDeepPopulate( + id: string, + fieldsToPopulate: string[], + populateObject + ): Promise { + return super.findOneByAndDeepPopulate( + id, + fieldsToPopulate, + GenericDocument.CONFIG_TYPE_TWILIO_CHANNEL, + populateObject + ); + } } diff --git a/api/src/v1/entity/repository/ConfigRepository.ts b/api/src/v1/entity/repository/ConfigRepository.ts index 0aa0f471..d51bd241 100644 --- a/api/src/v1/entity/repository/ConfigRepository.ts +++ b/api/src/v1/entity/repository/ConfigRepository.ts @@ -1,78 +1,83 @@ -import {ObjectID} from "mongodb"; +import { ObjectId } from "mongodb"; import mongoose from "mongoose"; -import {Config} from "../../../../../entities/ts/Config"; -import {GenericDocument, ModelName} from "../../../constant/mongoose"; -import {ConfigModel} from "../model/ConfigModel"; -import {AbstractRepository} from "./AbstractRepository"; +import { Config } from "../../../../../entities/ts/Config"; +import { GenericDocument, ModelName } from "../../../constant/mongoose"; +import { ConfigModel } from "../model/ConfigModel"; +import { AbstractRepository } from "./AbstractRepository"; /** * Implements specific operations for Config model in database */ export class ConfigRepository extends AbstractRepository { - public constructor() { - super(ConfigModel); - } - - /** - * Return a list of Config from database - */ - public async findAll(): Promise> { - return this.model.find({ - config_type: new ObjectID(GenericDocument.CONFIG_TYPE_SUB_CHAIN) - }).populate({path: 'base_chain', select: {'name': 1, 'value': 1}}) - .populate({path: 'repositories', populate: {path: 'type'}}) - .populate({path: 'severity_alerts', populate: {path: 'type'}}); - } - - /** - * Return item Config from database by id - * - */ - public async findOneById(id: string): Promise { - return this.model.findOne({ - _id: new ObjectID(id), - config_type: new ObjectID(GenericDocument.CONFIG_TYPE_SUB_CHAIN) - }).populate({path: 'base_chain', select: {'name': 1, 'value': 1}}) - .populate({path: 'repositories', populate: {path: 'type'}}) - .populate({path: 'severity_alerts', populate: {path: 'type'}}); - } - - /** - * Return a list of Config from database by criteria - * - * @param criteria - */ - public async findBy(criteria: object): Promise> { - criteria['config_type'] = new ObjectID(GenericDocument.CONFIG_TYPE_SUB_CHAIN); - return this.model.find(criteria); - } + public constructor() { + super(ConfigModel); + } - /** - * Check if sub chain within the config already exists on Database - * - * @param config The config or request object - * @returns true if sub chain name exists, otherwise false - */ - public async isDuplicateSubChain(config: Config): Promise { + /** + * Return a list of Config from database + */ + public async findAll(): Promise> { + return this.model + .find({ + config_type: new ObjectId(GenericDocument.CONFIG_TYPE_SUB_CHAIN), + }) + .populate({ path: "base_chain", select: { name: 1, value: 1 } }) + .populate({ path: "repositories", populate: { path: "type" } }) + .populate({ path: "severity_alerts", populate: { path: "type" } }); + } - const hasName = config && config.subChain && config.subChain.name; - if (hasName) { + /** + * Return item Config from database by id + * + */ + public async findOneById(id: string): Promise { + return this.model + .findOne({ + _id: new ObjectId(id), + config_type: new ObjectId(GenericDocument.CONFIG_TYPE_SUB_CHAIN), + }) + .populate({ path: "base_chain", select: { name: 1, value: 1 } }) + .populate({ path: "repositories", populate: { path: "type" } }) + .populate({ path: "severity_alerts", populate: { path: "type" } }); + } - const criteria = { - 'sub_chain.name': config.subChain.name - } + /** + * Return a list of Config from database by criteria + * + * @param criteria + */ + public async findBy(criteria: object): Promise> { + criteria["config_type"] = new ObjectId( + GenericDocument.CONFIG_TYPE_SUB_CHAIN + ); + return this.model.find(criteria); + } - // for edit case, to ignore self register - const isValidConfigID = 'id' in config && mongoose.Types.ObjectId.isValid(config.id); + /** + * Check if sub chain within the config already exists on Database + * + * @param config The config or request object + * @returns true if sub chain name exists, otherwise false + */ + public async isDuplicateSubChain(config: Config): Promise { + const hasName = config && config.subChain && config.subChain.name; + if (hasName) { + const criteria = { + "sub_chain.name": config.subChain.name, + }; - if (isValidConfigID) { - criteria['_id'] = {'$ne': new ObjectID(config.id)}; - } + // for edit case, to ignore self register + const isValidConfigID = + "id" in config && mongoose.Types.ObjectId.isValid(config.id); - const configs = await this.findBy(criteria); - return configs && configs.length > 0; - } + if (isValidConfigID) { + criteria["_id"] = { $ne: new ObjectId(config.id) }; + } - return false; + const configs = await this.findBy(criteria); + return configs && configs.length > 0; } + + return false; + } } diff --git a/api/src/v1/entity/repository/GenericRepository.ts b/api/src/v1/entity/repository/GenericRepository.ts index a4cc5d6d..c04ef215 100644 --- a/api/src/v1/entity/repository/GenericRepository.ts +++ b/api/src/v1/entity/repository/GenericRepository.ts @@ -1,63 +1,72 @@ -import {Generic} from "../../../../../entities/ts/Generic"; -import {SeverityAlertSubconfig} from "../../../../../entities/ts/SeverityAlertSubconfig"; -import {GenericModel} from "../model/GenericModel"; -import {AbstractRepository} from "./AbstractRepository"; -import {SeverityAlertSubconfigModel} from "../model/SeverityAlertSubconfigSchema"; -import {ObjectID} from "mongodb"; +import { Generic } from "../../../../../entities/ts/Generic"; +import { SeverityAlertSubconfig } from "../../../../../entities/ts/SeverityAlertSubconfig"; +import { GenericModel } from "../model/GenericModel"; +import { AbstractRepository } from "./AbstractRepository"; +import { SeverityAlertSubconfigModel } from "../model/SeverityAlertSubconfigSchema"; +import { ObjectId } from "mongodb"; /** * Implements specific operations for Generic Types model in database */ export class GenericRepository extends AbstractRepository { - public constructor() { - super(GenericModel); - } + public constructor() { + super(GenericModel); + } - /** - * Return a list of Generic types by group - * - * @param group The name of group that you want to filter - */ - public async findByGroup(group: string): Promise { - return this.findBy({group: group}); - } + /** + * Return a list of Generic types by group + * + * @param group The name of group that you want to filter + */ + public async findByGroup(group: string): Promise { + return this.findBy({ group: group }); + } - /** - * Return a list of Generic types by group and populate references - * - * @param group The name of group that you want to filter - * @param fieldsToPopulate reference fields to populate within documents - */ - public async findByGroupAndPopulate(group: string, fieldsToPopulate: string[]): Promise { - return this.findByAndPopulate({group: group}, fieldsToPopulate); - } + /** + * Return a list of Generic types by group and populate references + * + * @param group The name of group that you want to filter + * @param fieldsToPopulate reference fields to populate within documents + */ + public async findByGroupAndPopulate( + group: string, + fieldsToPopulate: string[] + ): Promise { + return this.findByAndPopulate({ group: group }, fieldsToPopulate); + } - /** - * Return a Generic type by group and ID - * - * @param group The name of group that you want to filter - * @param id The mongo hash id - */ - public async findOneByGroupAndId(group: string, id: string): Promise { - return this.findOneBy({group: group, _id: new ObjectID(id)}); - } + /** + * Return a Generic type by group and ID + * + * @param group The name of group that you want to filter + * @param id The mongo hash id + */ + public async findOneByGroupAndId( + group: string, + id: string + ): Promise { + return this.findOneBy({ group: group, _id: new ObjectId(id) }); + } } /** * Implements specific operations for SeverityAlertSubconfig model in database */ export class SeverityAlertSubconfigRepository extends AbstractRepository { - public constructor() { - super(SeverityAlertSubconfigModel); - } + public constructor() { + super(SeverityAlertSubconfigModel); + } - /** - * Return a list of Generic types by group and populate references - * - * @param group The name of group that you want to filter - * @param fieldsToPopulate reference fields to populate within documents - */ - public async findByGroupAndPopulate(group: string, fieldsToPopulate: string[]): Promise { - return this.findByAndPopulate({group: group}, fieldsToPopulate); - } + /** + * Return a list of Generic types by group and populate references + * + * @param group The name of group that you want to filter + * @param fieldsToPopulate reference fields to populate within documents + */ + public async findByGroupAndPopulate( + group: string, + fieldsToPopulate: string[] + ): Promise { + return this.findByAndPopulate({ group: group }, fieldsToPopulate); + } } diff --git a/api/src/v1/rest/channel/ChannelResource.ts b/api/src/v1/rest/channel/ChannelResource.ts index 75114473..99b3ee6c 100644 --- a/api/src/v1/rest/channel/ChannelResource.ts +++ b/api/src/v1/rest/channel/ChannelResource.ts @@ -1,44 +1,51 @@ import express from "express"; -import {AbstractChannel, Channel} from "../../../../../entities/ts/channels/AbstractChannel"; -import {EmailChannel} from "../../../../../entities/ts/channels/EmailChannel"; -import {OpsgenieChannel} from "../../../../../entities/ts/channels/OpsgenieChannel"; -import {PagerDutyChannel} from "../../../../../entities/ts/channels/PagerDutyChannel"; -import {SlackChannel} from "../../../../../entities/ts/channels/SlackChannel"; -import {TelegramChannel} from "../../../../../entities/ts/channels/TelegramChannel"; -import {TwilioChannel} from "../../../../../entities/ts/channels/TwilioChannel"; -import {Generic} from "../../../../../entities/ts/Generic"; import { - CouldNotRemoveDataFromDB, - CouldNotRetrieveDataFromDB, - CouldNotSaveDataToDB, - DuplicateWarning, - InvalidIDError, - MissingParameterWarning, - NotFoundWarning, - ValidationDataError + AbstractChannel, + Channel, +} from "../../../../../entities/ts/channels/AbstractChannel"; +import { EmailChannel } from "../../../../../entities/ts/channels/EmailChannel"; +import { OpsgenieChannel } from "../../../../../entities/ts/channels/OpsgenieChannel"; +import { PagerDutyChannel } from "../../../../../entities/ts/channels/PagerDutyChannel"; +import { SlackChannel } from "../../../../../entities/ts/channels/SlackChannel"; +import { TelegramChannel } from "../../../../../entities/ts/channels/TelegramChannel"; +import { TwilioChannel } from "../../../../../entities/ts/channels/TwilioChannel"; +import { Generic } from "../../../../../entities/ts/Generic"; +import { + CouldNotRemoveDataFromDB, + CouldNotRetrieveDataFromDB, + CouldNotSaveDataToDB, + DuplicateWarning, + InvalidIDError, + MissingParameterWarning, + NotFoundWarning, + ValidationDataError, } from "../../../constant/server.feedback"; -import {ResponseError, ResponseNoContent, ResponseSuccess} from "../../entity/io/ResponseData"; import { - EmailRepository, - OpsgenieRepository, - PagerDutyRepository, - SlackRepository, - TelegramRepository, - TwilioRepository + ResponseError, + ResponseNoContent, + ResponseSuccess, +} from "../../entity/io/ResponseData"; +import { + EmailRepository, + OpsgenieRepository, + PagerDutyRepository, + SlackRepository, + TelegramRepository, + TwilioRepository, } from "../../entity/repository/ChannelRepository"; -import mongoose, {Document, Model} from "mongoose"; -import {MongooseUtil} from "../../../util/MongooseUtil"; -import {EmailModel} from "../../entity/model/channels/EmailModel"; -import {OpsgenieModel} from "../../entity/model/channels/OpsgenieModel"; -import {PagerDutyModel} from "../../entity/model/channels/PagerDutyModel"; -import {SlackModel} from "../../entity/model/channels/SlackModel"; -import {TelegramModel} from "../../entity/model/channels/TelegramModel"; -import {TwilioModel} from "../../entity/model/channels/TwilioModel"; -import {GenericRepository} from "../../entity/repository/GenericRepository"; -import {ObjectID} from "mongodb"; -import {GenericDocument} from "../../../constant/mongoose"; -import {ObjectUtil} from "../../../util/ObjectUtil"; -import {ConfigRepository} from "../../entity/repository/ConfigRepository"; +import mongoose, { Document, Model } from "mongoose"; +import { MongooseUtil } from "../../../util/MongooseUtil"; +import { EmailModel } from "../../entity/model/channels/EmailModel"; +import { OpsgenieModel } from "../../entity/model/channels/OpsgenieModel"; +import { PagerDutyModel } from "../../entity/model/channels/PagerDutyModel"; +import { SlackModel } from "../../entity/model/channels/SlackModel"; +import { TelegramModel } from "../../entity/model/channels/TelegramModel"; +import { TwilioModel } from "../../entity/model/channels/TwilioModel"; +import { GenericRepository } from "../../entity/repository/GenericRepository"; +import { ObjectId } from "mongodb"; +import { GenericDocument } from "../../../constant/mongoose"; +import { ObjectUtil } from "../../../util/ObjectUtil"; +import { ConfigRepository } from "../../entity/repository/ConfigRepository"; import { EmailOldModel } from "../../entity/model/channels/EmailOldModel"; import { OpsgenieOldModel } from "../../entity/model/channels/OpsgenieOldModel"; import { PagerDutyOldModel } from "../../entity/model/channels/PagerDutyOldModel"; @@ -51,578 +58,669 @@ import { TwilioOldModel } from "../../entity/model/channels/TwilioOldModel"; * Resource Controller for Panic Channel */ export class ChannelResource { - private readonly emailRepo: EmailRepository = null; - private readonly opsgenieRepo: OpsgenieRepository = null; - private readonly pagerDutyRepo: PagerDutyRepository = null; - private readonly slackRepo: SlackRepository = null; - private readonly telegramRepo: TelegramRepository = null; - private readonly twilioRepo: TwilioRepository = null; - - private readonly genericRepo: GenericRepository = null; - private readonly configRepo: ConfigRepository = null; - - constructor() { - this.emailRepo = new EmailRepository(); - this.opsgenieRepo = new OpsgenieRepository(); - this.pagerDutyRepo = new PagerDutyRepository(); - this.slackRepo = new SlackRepository(); - this.telegramRepo = new TelegramRepository(); - this.twilioRepo = new TwilioRepository(); - - this.genericRepo = new GenericRepository(); - this.configRepo = new ConfigRepository(); + private readonly emailRepo: EmailRepository = null; + private readonly opsgenieRepo: OpsgenieRepository = null; + private readonly pagerDutyRepo: PagerDutyRepository = null; + private readonly slackRepo: SlackRepository = null; + private readonly telegramRepo: TelegramRepository = null; + private readonly twilioRepo: TwilioRepository = null; + + private readonly genericRepo: GenericRepository = null; + private readonly configRepo: ConfigRepository = null; + + constructor() { + this.emailRepo = new EmailRepository(); + this.opsgenieRepo = new OpsgenieRepository(); + this.pagerDutyRepo = new PagerDutyRepository(); + this.slackRepo = new SlackRepository(); + this.telegramRepo = new TelegramRepository(); + this.twilioRepo = new TwilioRepository(); + + this.genericRepo = new GenericRepository(); + this.configRepo = new ConfigRepository(); + } + + /** + * Get a list of Channels from Database from channel collections + * + * @param req Request from Express + * @param res Response from Express + */ + public async getAll( + req: express.Request, + res: express.Response + ): Promise { + try { + const fieldsToPopulate: string[] = ["type"]; + const populateObject = { path: "configs", select: "sub_chain.name" }; + const emailChannels = await this.emailRepo.findAllByAndDeepPopulate( + fieldsToPopulate, + populateObject + ); + const opsgenieChannels = await this.opsgenieRepo.findAllByAndDeepPopulate( + fieldsToPopulate, + populateObject + ); + const pagerDutyChannels = + await this.pagerDutyRepo.findAllByAndDeepPopulate( + fieldsToPopulate, + populateObject + ); + const slackChannels = await this.slackRepo.findAllByAndDeepPopulate( + fieldsToPopulate, + populateObject + ); + const telegramChannels = await this.telegramRepo.findAllByAndDeepPopulate( + fieldsToPopulate, + populateObject + ); + const twilioChannels = await this.twilioRepo.findAllByAndDeepPopulate( + fieldsToPopulate, + populateObject + ); + + const channels: Channel[] = [].concat( + emailChannels, + opsgenieChannels, + pagerDutyChannels, + slackChannels, + telegramChannels, + twilioChannels + ); + + const response = new ResponseSuccess(res); + response.send(channels); + } catch (err: any) { + console.error(err); + const error = new CouldNotRetrieveDataFromDB(); + const response = new ResponseError(res, error); + response.send(); } - - /** - * Get a list of Channels from Database from channel collections - * - * @param req Request from Express - * @param res Response from Express - */ - public async getAll(req: express.Request, - res: express.Response): Promise { - try { - const fieldsToPopulate: string[] = ['type']; - const populateObject = {path: 'configs', select: 'sub_chain.name'}; - const emailChannels = await this.emailRepo.findAllByAndDeepPopulate(fieldsToPopulate, populateObject); - const opsgenieChannels = await this.opsgenieRepo.findAllByAndDeepPopulate(fieldsToPopulate, populateObject); - const pagerDutyChannels = await this.pagerDutyRepo.findAllByAndDeepPopulate(fieldsToPopulate, populateObject); - const slackChannels = await this.slackRepo.findAllByAndDeepPopulate(fieldsToPopulate, populateObject); - const telegramChannels = await this.telegramRepo.findAllByAndDeepPopulate(fieldsToPopulate, populateObject); - const twilioChannels = await this.twilioRepo.findAllByAndDeepPopulate(fieldsToPopulate, populateObject); - - const channels: Channel[] = [].concat(emailChannels, opsgenieChannels, - pagerDutyChannels, slackChannels, telegramChannels, twilioChannels); - - const response = new ResponseSuccess(res); - response.send(channels); - } catch (err: any) { - console.error(err); - const error = new CouldNotRetrieveDataFromDB(); - const response = new ResponseError(res, error); - response.send(); - } + } + + /** + * Get channel by id from Database from channel collections + * + * @param req Request from Express + * @param res Response from Express + */ + public async getItem( + req: express.Request, + res: express.Response + ): Promise { + try { + const responseError = this.isMissingParam(req, res); + if (responseError instanceof ResponseError) { + responseError.send(); + return; + } + + let channel = await this.getChannelById(res, req.params.id); + if (channel instanceof ResponseError) { + channel.send(); + return; + } + + const response = new ResponseSuccess(res); + response.send(channel); + } catch (err: any) { + console.error(err); + const error = new CouldNotRetrieveDataFromDB(); + const response = new ResponseError(res, error); + response.send(); } - - /** - * Get channel by id from Database from channel collections - * - * @param req Request from Express - * @param res Response from Express - */ - public async getItem(req: express.Request, - res: express.Response): Promise { - try { - const responseError = this.isMissingParam(req, res); - if (responseError instanceof ResponseError) { - responseError.send(); - return; - } - - let channel = await this.getChannelById(res, req.params.id); - if (channel instanceof ResponseError) { - channel.send(); - return; - } - - const response = new ResponseSuccess(res); - response.send(channel); - } catch (err: any) { - console.error(err); - const error = new CouldNotRetrieveDataFromDB(); - const response = new ResponseError(res, error); - response.send(); - } + } + + /** + * Gets a channel by ID, if not found send a `NotFoundWarning` over a `ResponseError` instance. + * + * @param res Response from Express + * @param id Channel ID + * @returns a promise containing either a `Channels` object or a `ResponseError` instance + */ + private async getChannelById( + res: express.Response, + id: string + ): Promise { + const isValid = mongoose.Types.ObjectId.isValid(id); + if (!isValid) { + const error = new InvalidIDError(id); + return new ResponseError(res, error); } - /** - * Gets a channel by ID, if not found send a `NotFoundWarning` over a `ResponseError` instance. - * - * @param res Response from Express - * @param id Channel ID - * @returns a promise containing either a `Channels` object or a `ResponseError` instance - */ - private async getChannelById(res: express.Response, id: string): Promise { - const isValid = mongoose.Types.ObjectId.isValid(id); - if (!isValid) { - const error = new InvalidIDError(id); - return new ResponseError(res, error); - } - - const allRepos = [ - this.emailRepo, this.opsgenieRepo, this.pagerDutyRepo, - this.slackRepo, this.telegramRepo, this.twilioRepo - ]; - - let channel: Channel = null; + const allRepos = [ + this.emailRepo, + this.opsgenieRepo, + this.pagerDutyRepo, + this.slackRepo, + this.telegramRepo, + this.twilioRepo, + ]; + + let channel: Channel = null; + + for (const repo of allRepos) { + if (!channel) { + channel = await repo.findOneByAndDeepPopulate(id, ["type"], { + path: "configs", + select: "sub_chain.name", + }); + } + } - for (const repo of allRepos) { - if (!channel) { - channel = await repo.findOneByAndDeepPopulate( - id, ['type'], - {path: 'configs', select: 'sub_chain.name'} - ); - } - } + if (!channel) { + return new ResponseError(res, new NotFoundWarning()); + } - if (!channel) { - return new ResponseError(res, new NotFoundWarning()); - } + return channel; + } + + /** + * Create a new Channel on Database in respective channel collection + * + * @param req Request from Express + * @param res Response from Express + */ + public async create( + req: express.Request, + res: express.Response + ): Promise { + try { + const channelType = await this.checkChannelTypeOnCreate(req, res); + if (channelType instanceof ResponseError) { + channelType.send(); + return; + } + + delete req.body.type; + // @ts-ignore + req.body["type"] = channelType._id.toString(); + + const invalidFields = ["configs"]; + const ChannelModel = ChannelResource.getModelByChannelTypeValue( + channelType.value + ); + const initialisedChannelObject = + ChannelResource.getInitialisedObjectByChannelTypeValue( + channelType.value + ); + if (!ChannelModel || !initialisedChannelObject) { + const error = new ValidationDataError(); + error.description += " Unrecognised channel type."; + const response = new ResponseError(res, error); + response.send(); + } + let channel: Channel = MongooseUtil.merge( + initialisedChannelObject, + req.body, + invalidFields + ); + let model: Document = new ChannelModel(channel.toJSON()); + + // For channel name duplication validation, we can use email repo + // since we are using the same collection for channels/configs. + let duplicate = await this.emailRepo.isDuplicateChannelName(channel); + if (duplicate) { + const response = new ResponseError(res, new DuplicateWarning("name")); + response.send(); + return; + } + + await this.save(res, model, channelType.value); + } catch (err: any) { + console.error(err); + const error = new CouldNotSaveDataToDB(); + const response = new ResponseError(res, error); + response.send(); + } + } + + public async checkChannelTypeOnCreate( + req: express.Request, + res: express.Response + ): Promise { + if (!("type" in req.body) || typeof req.body.type.id !== "string") { + const error = new ValidationDataError(); + error.description += " Field type not specified or is not a string."; + return new ResponseError(res, error); + } + const channelType = await this.genericRepo.findOneByGroupAndId( + "channel_type", + req.body.type.id + ); + + if (!channelType) { + const error = new ValidationDataError(); + error.description += ` Channel type with id ${req.body.type.id} not found.`; + return new ResponseError(res, error); + } - return channel; + return channelType; + } + + /** + * Returns the config type ID for the given channel type value. + * + * @param channelTypeValue the value of the channel type as a string + * @returns config type ID for a channel if type corresponds to a channel, null otherwise + */ + private static getConfigTypeIdByChannelTypeValue( + channelTypeValue: string + ): GenericDocument | null { + switch (channelTypeValue.toLowerCase()) { + case "email": + return GenericDocument.CONFIG_TYPE_EMAIL_CHANNEL; + case "opsgenie": + return GenericDocument.CONFIG_TYPE_OPSGENIE_CHANNEL; + case "pagerduty": + return GenericDocument.CONFIG_TYPE_PAGERDUTY_CHANNEL; + case "slack": + return GenericDocument.CONFIG_TYPE_SLACK_CHANNEL; + case "telegram": + return GenericDocument.CONFIG_TYPE_TELEGRAM_CHANNEL; + case "twilio": + return GenericDocument.CONFIG_TYPE_TWILIO_CHANNEL; + default: + return null; + } + } + + /** + * Save document in channel collections. If an error occurs, an error response is sent via `res`. + * + * @param res Response from Express + * @param model The channel document object + * @param channelTypeValue The value of the channel type for Channel document passed + */ + private async save( + res: express.Response, + model: Document, + channelTypeValue: string + ): Promise { + const isValid = await MongooseUtil.isValid(model); + if (!isValid) { + const error = new ValidationDataError(); + const response = new ResponseError(res, error); + await response.addMongooseErrors(model); + response.send(); + return; } - /** - * Create a new Channel on Database in respective channel collection - * - * @param req Request from Express - * @param res Response from Express - */ - public async create(req: express.Request, - res: express.Response): Promise { - try { - const channelType = await this.checkChannelTypeOnCreate(req, res); - if (channelType instanceof ResponseError) { - channelType.send(); - return; - } - - delete req.body.type; - // @ts-ignore - req.body['type'] = channelType._id.toString(); - - const invalidFields = ['configs']; - const ChannelModel = ChannelResource.getModelByChannelTypeValue(channelType.value); - const initialisedChannelObject = ChannelResource.getInitialisedObjectByChannelTypeValue(channelType.value); - if (!ChannelModel || !initialisedChannelObject) { - const error = new ValidationDataError(); - error.description += ' Unrecognised channel type.'; - const response = new ResponseError(res, error); - response.send(); - } - let channel: Channel = MongooseUtil.merge(initialisedChannelObject, req.body, invalidFields); - let model: Document = new ChannelModel(channel.toJSON()); - - // For channel name duplication validation, we can use email repo - // since we are using the same collection for channels/configs. - let duplicate = await this.emailRepo.isDuplicateChannelName(channel); - if (duplicate) { - const response = new ResponseError(res, - new DuplicateWarning('name')); - response.send(); - return; - } - - await this.save(res, model, channelType.value); - } catch (err: any) { - console.error(err); - const error = new CouldNotSaveDataToDB(); - const response = new ResponseError(res, error); - response.send(); + model.set( + "config_type", + new ObjectId( + ChannelResource.getConfigTypeIdByChannelTypeValue(channelTypeValue) + ) + ); + + const doc = await model.save(); + + const response = new ResponseSuccess(res); + response.send(doc.id); + } + + /** + * Returns a model for the given channel type value. + * + * @param channelTypeValue the value of the channel type as a string + * @returns a Model for a channel if type corresponds to a channel, null otherwise + */ + private static getModelByChannelTypeValue( + channelTypeValue: string + ): Model | null { + switch (channelTypeValue.toLowerCase()) { + case "email": + return EmailModel; + case "opsgenie": + return OpsgenieModel; + case "pagerduty": + return PagerDutyModel; + case "slack": + return SlackModel; + case "telegram": + return TelegramModel; + case "twilio": + return TwilioModel; + default: + return null; + } + } + + /** + * Returns an initialised object corresponding to the given channel type value. + * + * @param channelTypeValue the value of the channel type as a string + * @returns an initialised object for a channel if type corresponds to a channel, null otherwise + */ + private static getInitialisedObjectByChannelTypeValue( + channelTypeValue: string + ): Channel | null { + switch (channelTypeValue.toLowerCase()) { + case "email": + return new EmailChannel(); + case "opsgenie": + return new OpsgenieChannel(); + case "pagerduty": + return new PagerDutyChannel(); + case "slack": + return new SlackChannel(); + case "telegram": + return new TelegramChannel(); + case "twilio": + return new TwilioChannel(); + default: + return null; + } + } + + /** + * Updates an existing Channel on the Database in respective channel collection + * + * @param req Request from Express + * @param res Response from Express + */ + public async update( + req: express.Request, + res: express.Response + ): Promise { + try { + const responseError = this.isMissingParam(req, res); + if (responseError instanceof ResponseError) { + responseError.send(); + return; + } + + // to avoid updating certain fields update + ["type", "configs"].forEach((field) => { + if (field in req.body) { + delete req.body[field]; } + }); + + // For channel name duplication validation, we can use email repo + // since we are using the same collection for channels/configs. + let duplicate = await this.emailRepo.isDuplicateChannelName({ + ...req.body, + id: req.params.id, + } as Channel); + + if (duplicate) { + const response = new ResponseError(res, new DuplicateWarning("name")); + response.send(); + return; + } + + const channel = await this.getChannelById(res, req.params.id); + if (channel instanceof ResponseError) { + channel.send(); + return; + } + + await this.createBkp(channel); + + const request = ObjectUtil.deepCamelToSnake(req.body); + const model: Document = MongooseUtil.merge(channel, request); + + await this.save(res, model, channel.type.value); + } catch (err: any) { + console.error(err); + const error = new CouldNotSaveDataToDB(); + const response = new ResponseError(res, error); + response.send(); } - - public async checkChannelTypeOnCreate(req: express.Request, - res: express.Response): Promise { - if (!('type' in req.body) || typeof req.body.type.id !== 'string') { - const error = new ValidationDataError(); - error.description += ' Field type not specified or is not a string.'; - return new ResponseError(res, error); + } + + /** + * Removes a Channel by ID on Database in respective channel collection + * + * @param req Request from Express + * @param res Response from Express + */ + public async remove( + req: express.Request, + res: express.Response + ): Promise { + try { + const responseError = this.isMissingParam(req, res); + if (responseError instanceof ResponseError) { + responseError.send(); + return; + } + + const channel = await this.emailRepo.findOneById(req.params.id); + this.createBkp(channel as any); + + // Here we can use email repo since we are using the same collections + // and IDs are always unique. + const isDeleted = await this.emailRepo.deleteOneById(req.params.id); + if (!isDeleted) { + const response = new ResponseError(res, new NotFoundWarning()); + response.send(); + return; + } + + const response = new ResponseNoContent(res); + response.send(); + } catch (err: any) { + console.error(err); + const error = new CouldNotRemoveDataFromDB(); + const response = new ResponseError(res, error); + response.send(); + } + } + + /** + * Create a link between a channel and a config on Database + * + * @param req Request from Express + * @param res Response from Express + */ + public async createConfigLink( + req: express.Request, + res: express.Response + ): Promise { + try { + const config_id = await this.configValidation(req, res); + + if (config_id) { + const channel = await this.getChannelById(res, req.params.channel_id); + + if (channel instanceof ResponseError) { + channel.send(); + return; } - const channelType = await this.genericRepo.findOneByGroupAndId('channel_type', req.body.type.id); - if (!channelType) { - const error = new ValidationDataError(); - error.description += ` Channel type with id ${req.body.type.id} not found.`; - return new ResponseError(res, error); - } + await this.createBkp(channel); - return channelType; - } + // Here we can use email repo since we are using the same collections + // and IDs are always unique. + const result = await this.emailRepo.linkConfigToChannel( + req.params.channel_id, + config_id + ); - /** - * Returns the config type ID for the given channel type value. - * - * @param channelTypeValue the value of the channel type as a string - * @returns config type ID for a channel if type corresponds to a channel, null otherwise - */ - private static getConfigTypeIdByChannelTypeValue(channelTypeValue: string): GenericDocument | null { - switch (channelTypeValue.toLowerCase()) { - case 'email': - return GenericDocument.CONFIG_TYPE_EMAIL_CHANNEL; - case 'opsgenie': - return GenericDocument.CONFIG_TYPE_OPSGENIE_CHANNEL; - case 'pagerduty': - return GenericDocument.CONFIG_TYPE_PAGERDUTY_CHANNEL; - case 'slack': - return GenericDocument.CONFIG_TYPE_SLACK_CHANNEL; - case 'telegram': - return GenericDocument.CONFIG_TYPE_TELEGRAM_CHANNEL; - case 'twilio': - return GenericDocument.CONFIG_TYPE_TWILIO_CHANNEL; - default: - return null; + if (result.matchedCount < 1) { + const response = new ResponseError(res, new NotFoundWarning()); + response.send(); + return; } - } - /** - * Save document in channel collections. If an error occurs, an error response is sent via `res`. - * - * @param res Response from Express - * @param model The channel document object - * @param channelTypeValue The value of the channel type for Channel document passed - */ - private async save(res: express.Response, model: Document, - channelTypeValue: string): Promise { - const isValid = await MongooseUtil.isValid(model); - if (!isValid) { - const error = new ValidationDataError(); - const response = new ResponseError(res, error); - await response.addMongooseErrors(model); - response.send(); - return; + const response = new ResponseNoContent(res); + response.send(); + } + } catch (err: any) { + console.error(err); + const error = new CouldNotSaveDataToDB(); + const response = new ResponseError(res, error); + response.send(); + } + } + + /** + * Removes a link between a channel and a config on Database + * + * @param req Request from Express + * @param res Response from Express + */ + public async removeConfigLink( + req: express.Request, + res: express.Response + ): Promise { + try { + const config_id = await this.configValidation(req, res); + + if (config_id) { + const channel = await this.getChannelById(res, req.params.channel_id); + + if (channel instanceof ResponseError) { + channel.send(); + return; } - model.set('config_type', new ObjectID( - ChannelResource.getConfigTypeIdByChannelTypeValue(channelTypeValue))); - - const doc = await model.save(); + await this.createBkp(channel); - const response = new ResponseSuccess(res); - response.send(doc.id); - } + // Here we can use email repo since we are using the same collections + // and IDs are always unique. + const result = await this.emailRepo.unlinkConfigFromChannel( + req.params.channel_id, + config_id + ); - /** - * Returns a model for the given channel type value. - * - * @param channelTypeValue the value of the channel type as a string - * @returns a Model for a channel if type corresponds to a channel, null otherwise - */ - private static getModelByChannelTypeValue(channelTypeValue: string): Model | null { - switch (channelTypeValue.toLowerCase()) { - case 'email': - return EmailModel; - case 'opsgenie': - return OpsgenieModel; - case 'pagerduty': - return PagerDutyModel; - case 'slack': - return SlackModel; - case 'telegram': - return TelegramModel; - case 'twilio': - return TwilioModel; - default: - return null; + if (result.matchedCount < 1) { + const response = new ResponseError(res, new NotFoundWarning()); + response.send(); + return; } - } - /** - * Returns an initialised object corresponding to the given channel type value. - * - * @param channelTypeValue the value of the channel type as a string - * @returns an initialised object for a channel if type corresponds to a channel, null otherwise - */ - private static getInitialisedObjectByChannelTypeValue(channelTypeValue: string): Channel | null { - switch (channelTypeValue.toLowerCase()) { - case 'email': - return new EmailChannel(); - case 'opsgenie': - return new OpsgenieChannel(); - case 'pagerduty': - return new PagerDutyChannel(); - case 'slack': - return new SlackChannel(); - case 'telegram': - return new TelegramChannel(); - case 'twilio': - return new TwilioChannel(); - default: - return null; - } + const response = new ResponseNoContent(res); + response.send(); + } + } catch (err: any) { + console.error(err); + const error = new CouldNotSaveDataToDB(); + const response = new ResponseError(res, error); + response.send(); } - - /** - * Updates an existing Channel on the Database in respective channel collection - * - * @param req Request from Express - * @param res Response from Express - */ - public async update(req: express.Request, - res: express.Response): Promise { - try { - const responseError = this.isMissingParam(req, res); - if (responseError instanceof ResponseError) { - responseError.send(); - return; - } - - // to avoid updating certain fields update - ['type', 'configs'].forEach((field) => { - if (field in req.body) { - delete req.body[field]; - } - }); - - // For channel name duplication validation, we can use email repo - // since we are using the same collection for channels/configs. - let duplicate = await this.emailRepo.isDuplicateChannelName({ - ...req.body, - id: req.params.id - } as Channel); - - if (duplicate) { - const response = new ResponseError(res, - new DuplicateWarning('name')); - response.send(); - return; - } - - const channel = await this.getChannelById(res, req.params.id); - if (channel instanceof ResponseError) { - channel.send(); - return; - } - - await this.createBkp(channel); - - const request = ObjectUtil.deepCamelToSnake(req.body); - const model: Document = MongooseUtil.merge(channel, request); - - await this.save(res, model, channel.type.value); - } catch (err: any) { - console.error(err); - const error = new CouldNotSaveDataToDB(); - const response = new ResponseError(res, error); - response.send(); - } + } + + /** + * Validates configs request by checking params and config IDs + * + * @param req Request from Express + * @param res Response from Express + * @returns config id if validation is successful, else null + */ + private async configValidation( + req: express.Request, + res: express.Response + ): Promise { + const responseError = this.isMissingParam(req, res, true); + if (responseError instanceof ResponseError) { + responseError.send(); + return null; } - /** - * Removes a Channel by ID on Database in respective channel collection - * - * @param req Request from Express - * @param res Response from Express - */ - public async remove(req: express.Request, - res: express.Response): Promise { - try { - const responseError = this.isMissingParam(req, res); - if (responseError instanceof ResponseError) { - responseError.send(); - return; - } - - const channel = await this.emailRepo.findOneById(req.params.id); - this.createBkp(channel as any); - - // Here we can use email repo since we are using the same collections - // and IDs are always unique. - const isDeleted = await this.emailRepo.deleteOneById(req.params.id); - if (!isDeleted) { - const response = new ResponseError(res, new NotFoundWarning()); - response.send(); - return; - } - - const response = new ResponseNoContent(res); - response.send(); - } catch (err: any) { - console.error(err); - const error = new CouldNotRemoveDataFromDB(); - const response = new ResponseError(res, error); - response.send(); - } - } - - /** - * Create a link between a channel and a config on Database - * - * @param req Request from Express - * @param res Response from Express - */ - public async createConfigLink(req: express.Request, - res: express.Response): Promise { - try { - const config_id = await this.configValidation(req, res); - - if (config_id) { - - const channel = await this.getChannelById( - res, req.params.channel_id); - - if (channel instanceof ResponseError) { - channel.send(); - return; - } - - await this.createBkp(channel); - - - // Here we can use email repo since we are using the same collections - // and IDs are always unique. - const result = await this.emailRepo.linkConfigToChannel(req.params.channel_id, config_id); - - if (result.matchedCount < 1) { - const response = new ResponseError(res, new NotFoundWarning()); - response.send(); - return; - } - - const response = new ResponseNoContent(res); - response.send(); - } - } catch (err: any) { - console.error(err); - const error = new CouldNotSaveDataToDB(); - const response = new ResponseError(res, error); - response.send(); - } + const config = await this.configRepo.findOneById(req.params.config_id); + if (!config) { + const response = new ResponseError(res, new NotFoundWarning()); + response.send(); + return null; } - /** - * Removes a link between a channel and a config on Database - * - * @param req Request from Express - * @param res Response from Express - */ - public async removeConfigLink(req: express.Request, - res: express.Response): Promise { - try { - const config_id = await this.configValidation(req, res); - - if (config_id) { - - const channel = await this.getChannelById( - res, req.params.channel_id); - - if (channel instanceof ResponseError) { - channel.send(); - return; - } - - await this.createBkp(channel); - - // Here we can use email repo since we are using the same collections - // and IDs are always unique. - const result = await this.emailRepo.unlinkConfigFromChannel(req.params.channel_id, config_id); - - if (result.matchedCount < 1) { - const response = new ResponseError(res, new NotFoundWarning()); - response.send(); - return; - } - - const response = new ResponseNoContent(res); - response.send(); - } - } catch (err: any) { - console.error(err); - const error = new CouldNotSaveDataToDB(); - const response = new ResponseError(res, error); - response.send(); - } + return config.id; + } + + /** + * Checks if there are missing params and that params are valid + * + * @param req Request from Express + * @param res Response from Express + * @param channelConfigIdsCheck whether to check for channel and config IDs instead of a single id + * + * @returns {@link ResponseError} instance if missing/invalid params, else null + */ + private isMissingParam( + req: express.Request, + res: express.Response, + channelConfigIdsCheck: boolean = false + ): ResponseError | null { + if (!channelConfigIdsCheck && !req.params.id) { + const error = new MissingParameterWarning("id"); + + return new ResponseError(res, error); } - /** - * Validates configs request by checking params and config IDs - * - * @param req Request from Express - * @param res Response from Express - * @returns config id if validation is successful, else null - */ - private async configValidation(req: express.Request, res: express.Response): Promise { - const responseError = this.isMissingParam(req, res, true); - if (responseError instanceof ResponseError) { - responseError.send(); - return null; - } - - const config = await this.configRepo.findOneById(req.params.config_id); - if (!config) { - const response = new ResponseError(res, new NotFoundWarning()); - response.send(); - return null; - } + if ( + channelConfigIdsCheck && + (!req.params.channel_id || !req.params.config_id) + ) { + const error = new MissingParameterWarning( + !req.params.channel_id ? "channel_id" : "config_id" + ); - return config.id; + return new ResponseError(res, error); } - /** - * Checks if there are missing params and that params are valid - * - * @param req Request from Express - * @param res Response from Express - * @param channelConfigIdsCheck whether to check for channel and config IDs instead of a single id - * - * @returns {@link ResponseError} instance if missing/invalid params, else null - */ - private isMissingParam(req: express.Request, res: express.Response, - channelConfigIdsCheck: boolean = false): ResponseError | null { - if (!channelConfigIdsCheck && !req.params.id) { - const error = new MissingParameterWarning('id'); - - return new ResponseError(res, error); - } - - if (channelConfigIdsCheck && (!req.params.channel_id || !req.params.config_id)) { - const error = new MissingParameterWarning(!req.params.channel_id ? 'channel_id' : 'config_id'); - - return new ResponseError(res, error); - } - - if (!channelConfigIdsCheck && !mongoose.Types.ObjectId.isValid(req.params.id)) { - const error = new InvalidIDError(req.params.id); - return new ResponseError(res, error); - } - - if (channelConfigIdsCheck && (!mongoose.Types.ObjectId.isValid(req.params.channel_id) || - !mongoose.Types.ObjectId.isValid(req.params.config_id))) { - const error = new InvalidIDError( - !mongoose.Types.ObjectId.isValid(req.params.channel_id) ? req.params.channel_id : req.params.config_id); - return new ResponseError(res, error); - } - - return null; + if ( + !channelConfigIdsCheck && + !mongoose.Types.ObjectId.isValid(req.params.id) + ) { + const error = new InvalidIDError(req.params.id); + return new ResponseError(res, error); } + if ( + channelConfigIdsCheck && + (!mongoose.Types.ObjectId.isValid(req.params.channel_id) || + !mongoose.Types.ObjectId.isValid(req.params.config_id)) + ) { + const error = new InvalidIDError( + !mongoose.Types.ObjectId.isValid(req.params.channel_id) + ? req.params.channel_id + : req.params.config_id + ); + return new ResponseError(res, error); + } - /** - * Create a bkp to comparison on alerter - * @param channel Current official config - */ - private async createBkp(channel : AbstractChannel) : Promise { - try { - if(!channel) { - return; - } - - const type = channel['config_type']['_id'].toString(); - let model : Model = null; - - if(type === GenericDocument.CONFIG_TYPE_EMAIL_CHANNEL){ - model = EmailOldModel; - } else if(type === GenericDocument.CONFIG_TYPE_OPSGENIE_CHANNEL){ - model = OpsgenieOldModel; - } else if(type === GenericDocument.CONFIG_TYPE_PAGERDUTY_CHANNEL){ - model = PagerDutyOldModel; - } else if(type === GenericDocument.CONFIG_TYPE_SLACK_CHANNEL){ - model = SlackOldModel; - } else if(type === GenericDocument.CONFIG_TYPE_TELEGRAM_CHANNEL){ - model = TelegramOldModel; - } else if(type === GenericDocument.CONFIG_TYPE_TWILIO_CHANNEL){ - model = TwilioOldModel; - } else { - return; - } - - await model.deleteOne({ _id: channel.id }); - channel = channel['toObject'](); - const oldModel : Document = new model(channel); - await oldModel.save(); - } catch(err) { - console.error(err); - return; - } + return null; + } + + /** + * Create a bkp to comparison on alerter + * @param channel Current official config + */ + private async createBkp(channel: AbstractChannel): Promise { + try { + if (!channel) { + return; + } + + const type = channel["config_type"]["_id"].toString(); + let model: Model = null; + + if (type === GenericDocument.CONFIG_TYPE_EMAIL_CHANNEL) { + model = EmailOldModel; + } else if (type === GenericDocument.CONFIG_TYPE_OPSGENIE_CHANNEL) { + model = OpsgenieOldModel; + } else if (type === GenericDocument.CONFIG_TYPE_PAGERDUTY_CHANNEL) { + model = PagerDutyOldModel; + } else if (type === GenericDocument.CONFIG_TYPE_SLACK_CHANNEL) { + model = SlackOldModel; + } else if (type === GenericDocument.CONFIG_TYPE_TELEGRAM_CHANNEL) { + model = TelegramOldModel; + } else if (type === GenericDocument.CONFIG_TYPE_TWILIO_CHANNEL) { + model = TwilioOldModel; + } else { + return; + } + + await model.deleteOne({ _id: channel.id }); + channel = channel["toObject"](); + const oldModel: Document = new model(channel); + await oldModel.save(); + } catch (err) { + console.error(err); + return; } + } } diff --git a/api/src/v1/rest/config/ConfigResource.ts b/api/src/v1/rest/config/ConfigResource.ts index b6370e46..e184a697 100644 --- a/api/src/v1/rest/config/ConfigResource.ts +++ b/api/src/v1/rest/config/ConfigResource.ts @@ -200,7 +200,7 @@ export class ConfigResource { await this.createBkp(config); const request = ObjectUtil.deepCamelToSnake(req.body); - const model: Document = MongooseUtil.merge(config, request); + const model: Document = MongooseUtil.merge(config, request); await this.save(res, model); } catch (err: any) { diff --git a/api/tests/server.test.ts b/api/tests/server.test.ts index d7104628..65dc655e 100644 --- a/api/tests/server.test.ts +++ b/api/tests/server.test.ts @@ -1,112 +1,111 @@ // Make sure that this is at the top of the file since it is required before importing any packages. -process.env.UI_ACCESS_IP = '0.0.0.0'; - -import {getPrometheusMetricFromBaseChain} from "../src/server/utils"; -import {AggregationCursor, Collection, FilterQuery, MongoCallback, MongoClientCommonOption} from 'mongodb'; -import {Callback, RedisError} from 'redis'; -import {baseChains, PingStatus} from "../src/constant/server"; -import request from 'supertest' -import {app, mongoInterval, redisInterval, server} from '../src/server'; -import {EnvVariablesNotAvailable, InvalidEndpoint, MissingKeysInBody} from '../src/constant/errors'; +process.env.UI_ACCESS_IP = "0.0.0.0"; + +import { getPrometheusMetricFromBaseChain } from "../src/server/utils"; +import { AggregationCursor, Collection, DbOptions } from "mongodb"; +import { Callback, RedisError } from "redis"; +import { baseChains, PingStatus } from "../src/constant/server"; +import request from "supertest"; +import { app, mongoInterval, redisInterval, server } from "../src/server"; import { - alertsMultipleSourcesEndpointRet, - alertsMultipleSourcesMongoRet, - alertsOverviewChainSourceEndpointRet, - alertsOverviewChainSourceRedisRet, - alertsOverviewChainSourceWithUniqueIdentifierEndpointRet, - alertsOverviewChainSourceWithUniqueIdentifierRedisRet, - alertsOverviewMultipleSourcesEndpointRet, - alertsOverviewMultipleSourcesRedisRet, - alertsOverviewSingleDockerHubRepoEndpointRet, - alertsOverviewSingleDockerHubRepoRedisRet, - alertsOverviewSingleGitHubRepoEndpointRet, - alertsOverviewSingleGitHubRepoRedisRet, - alertsOverviewSingleNodeEndpointRet, - alertsOverviewSingleNodeRedisRet, - alertsOverviewSingleSystemEndpointRet, - alertsOverviewSingleSystemRedisRet, - alertsSingleSourceEndpointRet, - alertsSingleSourceMongoRet, - axiosMock, - emptyAxiosResponse, - metricsMultipleSystemsEndpointRet, - metricsMultipleSystemsMongoRet, - metricsSingleRepoRedisEndpointRet, - metricsSingleRepoRedisRet, - metricsSingleSystemEndpointRet, - metricsSingleSystemMongoRet, - metricsSingleSystemRedisEndpointRet, - metricsSingleSystemRedisRet, - monitorablesInfoInvalidBaseChainsError, - monitorablesInfoMultipleSourcesAndBaseChainsEndpointRet, - monitorablesInfoMultipleSourcesAndBaseChainsMongoRet, - monitorablesInfoMultipleSourcesEndpointRet, - monitorablesInfoMultipleSourcesMongoRet, - monitorablesInfoSingleSourceEndpointRet, - monitorablesInfoSingleSourceMongoRet, - noMetricsSingleRepoRedisEndpointRet, - noMetricsSingleRepoRedisRet, - noMetricsSingleSystemRedisEndpointRet, - noMetricsSingleSystemRedisRet, - parentIdsInvalidSchemaError -} from './test-utils'; -import {AxiosResponse} from "axios"; -import * as pagerDutyApi from '@pagerduty/pdjs'; - -const Web3 = require('web3'); -const opsgenie = require('opsgenie-sdk'); -const twilio = require('twilio'); -const nodemailer = require('nodemailer'); + EnvVariablesNotAvailable, + InvalidEndpoint, + MissingKeysInBody, +} from "../src/constant/errors"; +import { + alertsMultipleSourcesEndpointRet, + alertsMultipleSourcesMongoRet, + alertsOverviewChainSourceEndpointRet, + alertsOverviewChainSourceRedisRet, + alertsOverviewChainSourceWithUniqueIdentifierEndpointRet, + alertsOverviewChainSourceWithUniqueIdentifierRedisRet, + alertsOverviewMultipleSourcesEndpointRet, + alertsOverviewMultipleSourcesRedisRet, + alertsOverviewSingleDockerHubRepoEndpointRet, + alertsOverviewSingleDockerHubRepoRedisRet, + alertsOverviewSingleGitHubRepoEndpointRet, + alertsOverviewSingleGitHubRepoRedisRet, + alertsOverviewSingleNodeEndpointRet, + alertsOverviewSingleNodeRedisRet, + alertsOverviewSingleSystemEndpointRet, + alertsOverviewSingleSystemRedisRet, + alertsSingleSourceEndpointRet, + alertsSingleSourceMongoRet, + axiosMock, + emptyAxiosResponse, + metricsMultipleSystemsEndpointRet, + metricsMultipleSystemsMongoRet, + metricsSingleRepoRedisEndpointRet, + metricsSingleRepoRedisRet, + metricsSingleSystemEndpointRet, + metricsSingleSystemMongoRet, + metricsSingleSystemRedisEndpointRet, + metricsSingleSystemRedisRet, + monitorablesInfoInvalidBaseChainsError, + monitorablesInfoMultipleSourcesAndBaseChainsEndpointRet, + monitorablesInfoMultipleSourcesAndBaseChainsMongoRet, + monitorablesInfoMultipleSourcesEndpointRet, + monitorablesInfoMultipleSourcesMongoRet, + monitorablesInfoSingleSourceEndpointRet, + monitorablesInfoSingleSourceMongoRet, + noMetricsSingleRepoRedisEndpointRet, + noMetricsSingleRepoRedisRet, + noMetricsSingleSystemRedisEndpointRet, + noMetricsSingleSystemRedisRet, + parentIdsInvalidSchemaError, +} from "./test-utils"; +import { AxiosResponse } from "axios"; +import * as pagerDutyApi from "@pagerduty/pdjs"; + +const Web3 = require("web3"); +const opsgenie = require("opsgenie-sdk"); +const twilio = require("twilio"); +const nodemailer = require("nodemailer"); // Mongo Mock let mongoAggregateMockReturn: any[] = []; let mongoFindMockReturn: any[] = []; let mongoMockCount: number = 0; -jest.mock('../src/server/mongo', () => { - return { - ...jest.requireActual('../src/server/mongo'), - MongoInterface: jest.fn(() => { +jest.mock("../src/server/mongo", () => { + return { + ...jest.requireActual("../src/server/mongo"), + MongoInterface: jest.fn(() => { + return { + client: { + db: ( + dbName?: string | undefined, + options?: DbOptions | undefined + ) => { return { - client: { - db: ((dbName?: string | undefined, - options?: MongoClientCommonOption | undefined) => { - return { - collection: (_name: string, - _callback?: - MongoCallback> | - undefined) => { - return { - countDocuments: (async () => mongoMockCount), - aggregate: - ((_callback: - MongoCallback>) => { - return { - toArray: (async (): - Promise => { - return mongoAggregateMockReturn - }) - } - }), - find: ((_query: FilterQuery) => { - return { - toArray: (async (): - Promise => { - return mongoFindMockReturn - }) - } - }) - } - } - } - }) - }, - connect: () => { - return - } - } - }) - }; + collection: (_name: string) => { + return { + countDocuments: async () => mongoMockCount, + aggregate: () => { + return { + toArray: async (): Promise => { + return mongoAggregateMockReturn; + }, + }; + }, + find: (_query: any) => { + return { + toArray: async (): Promise => { + return mongoFindMockReturn; + }, + }; + }, + }; + }, + }; + }, + }, + connect: () => { + return; + }, + }; + }), + }; }); // Redis Mock @@ -116,1591 +115,2118 @@ let hgetallMockCallbackReply: { [p: string]: string } = {}; let hmgetMockCallbackReply: string[] = []; let execMockCallbackError: Error | null = null; -jest.mock('../src/server/redis', () => { - return { - ...jest.requireActual('../src/server/redis'), - RedisInterface: jest.fn(() => { +jest.mock("../src/server/redis", () => { + return { + ...jest.requireActual("../src/server/redis"), + RedisInterface: jest.fn(() => { + return { + client: { + mget: ( + _arg1: string | string[], + cb?: Callback | undefined + ) => { + if (cb) { + cb(mgetMockCallbackError, mgetMockCallbackReply); + } + }, + multi: () => { return { - client: { - mget: ((_arg1: string | string[], - cb?: Callback | undefined) => { - if (cb) { - cb(mgetMockCallbackError, mgetMockCallbackReply); - } - }), - multi: () => { - return { - hgetall: ((_key: string, cb?: Callback<{ - [p: string]: string - }> | undefined) => { - if (cb) { - cb(null, hgetallMockCallbackReply); - } - }), - hmget: ((_key: string, _arg1: string | string[], - cb?: Callback | undefined) => { - if (cb) { - cb(null, hmgetMockCallbackReply); - } - }), - exec: ((cb?: Callback | undefined) => { - if (cb) { - cb(execMockCallbackError, []); - } - }) - } - } - }, - connect: () => { - return + hgetall: ( + _key: string, + cb?: + | Callback<{ + [p: string]: string; + }> + | undefined + ) => { + if (cb) { + cb(null, hgetallMockCallbackReply); } - } - }) - }; + }, + hmget: ( + _key: string, + _arg1: string | string[], + cb?: Callback | undefined + ) => { + if (cb) { + cb(null, hmgetMockCallbackReply); + } + }, + exec: (cb?: Callback | undefined) => { + if (cb) { + cb(execMockCallbackError, []); + } + }, + }; + }, + }, + connect: () => { + return; + }, + }; + }), + }; }); // Axios Mock -jest.mock('axios'); +jest.mock("axios"); // Web3 mock -jest.mock('web3'); +jest.mock("web3"); let web3EthGetBlockNumberPromiseResolve: null | number = null; Web3.providers.HttpProvider.mockImplementation((_url) => null); Web3.mockImplementation((_provider) => { - return { - eth: { - getBlockNumber: () => { - return new Promise((resolve, reject) => { - web3EthGetBlockNumberPromiseResolve !== null ? - resolve(web3EthGetBlockNumberPromiseResolve) : reject({message: 'test'}) - }); - } - } - } + return { + eth: { + getBlockNumber: () => { + return new Promise((resolve, reject) => { + web3EthGetBlockNumberPromiseResolve !== null + ? resolve(web3EthGetBlockNumberPromiseResolve) + : reject({ message: "test" }); + }); + }, + }, + }; }); // Opsgenie SDK Mock let alertV2CreateMockCallbackError: Error | null = null; -jest.mock('opsgenie-sdk'); +jest.mock("opsgenie-sdk"); opsgenie.alertV2.create.mockImplementation((_data, cb) => { - cb(alertV2CreateMockCallbackError, null); -}) + cb(alertV2CreateMockCallbackError, null); +}); // Slack Web-Api Mock let slackMock = { - chat: { - postMessage: jest.fn() - }, + chat: { + postMessage: jest.fn(), + }, }; -jest.mock('@slack/web-api', () => { - return {WebClient: jest.fn(() => slackMock)}; +jest.mock("@slack/web-api", () => { + return { WebClient: jest.fn(() => slackMock) }; }); // Twilio mock -jest.mock('twilio'); +jest.mock("twilio"); let twilioClientCallsCreateResolve: null | object = null; twilio.mockImplementation((_accountSid, _authToken) => { - return { - calls: { - create: (_content) => { - return new Promise((resolve, reject) => { - twilioClientCallsCreateResolve !== null ? - resolve(twilioClientCallsCreateResolve) : reject({message: 'test'}) - }); - } - } - } + return { + calls: { + create: (_content) => { + return new Promise((resolve, reject) => { + twilioClientCallsCreateResolve !== null + ? resolve(twilioClientCallsCreateResolve) + : reject({ message: "test" }); + }); + }, + }, + }; }); // nodemailer mock -jest.mock('nodemailer'); +jest.mock("nodemailer"); let nodemailerVerifyCallbackError: null | Error = null; let nodemailerSendMailCallbackError: null | Error = null; nodemailer.createTransport.mockReturnValue({ - verify: (callback) => { - callback(nodemailerVerifyCallbackError, null); - }, - sendMail: (_, callback) => { - callback(nodemailerSendMailCallbackError, null); - } + verify: (callback) => { + callback(nodemailerVerifyCallbackError, null); + }, + sendMail: (_, callback) => { + callback(nodemailerSendMailCallbackError, null); + }, }); // This is used to clear all mock data before each test beforeEach(async () => { - mongoAggregateMockReturn = []; - mongoFindMockReturn = []; - mgetMockCallbackReply = []; - mgetMockCallbackError = null; - hgetallMockCallbackReply = {}; - hmgetMockCallbackReply = []; - execMockCallbackError = null; - web3EthGetBlockNumberPromiseResolve = null; - alertV2CreateMockCallbackError = null; - slackMock = { - chat: { - postMessage: jest.fn() - }, - }; - twilioClientCallsCreateResolve = null; - nodemailerVerifyCallbackError = null; - nodemailerSendMailCallbackError = null; + mongoAggregateMockReturn = []; + mongoFindMockReturn = []; + mgetMockCallbackReply = []; + mgetMockCallbackError = null; + hgetallMockCallbackReply = {}; + hmgetMockCallbackReply = []; + execMockCallbackError = null; + web3EthGetBlockNumberPromiseResolve = null; + alertV2CreateMockCallbackError = null; + slackMock = { + chat: { + postMessage: jest.fn(), + }, + }; + twilioClientCallsCreateResolve = null; + nodemailerVerifyCallbackError = null; + nodemailerSendMailCallbackError = null; - jest.clearAllMocks(); -}) + jest.clearAllMocks(); +}); // Used to stop redis and mongo interval processes after all tests finish afterAll(() => { - mongoInterval.unref(); - redisInterval.unref(); + mongoInterval.unref(); + redisInterval.unref(); - server.close(); + server.close(); }); -describe('Mongo Monitorables Info POST Route', () => { - const endpoint = '/server/mongo/monitorablesInfo'; - - it.each([ - ['a single base chain which is not in mongo is specified', - {baseChains: ['cosmos']}, - [], - {result: {cosmos: {}}}], - ['a single base chain which is empty in mongo is specified', - {baseChains: ['cosmos']}, - [{_id: "cosmos"}], - {result: {cosmos: {}}}], - ['all base chains specified are not in mongo', - {baseChains: ['cosmos', 'general', 'chainlink', 'substrate']}, - [], - { - result: { - cosmos: {}, general: {}, chainlink: {}, - substrate: {} - } - }], - ['all base chains specified are empty in mongo', - {baseChains}, - [ - {_id: "cosmos"}, - {_id: "general"}, - {_id: "chainlink"}, - {_id: "substrate"}], - { - result: { - cosmos: {}, general: {}, chainlink: {}, - substrate: {} - } - }], - ['all base chains are specified and a single base chain with a' + - ' single source is in mongo', - {baseChains}, - monitorablesInfoSingleSourceMongoRet, - monitorablesInfoSingleSourceEndpointRet], - ['all base chains are specified and a single base chain with multiple' + - ' sources is in mongo', - {baseChains}, - monitorablesInfoMultipleSourcesAndBaseChainsMongoRet, - monitorablesInfoMultipleSourcesAndBaseChainsEndpointRet], - ['all base chains are specified and multiple base chains with' + - ' multiple sources are in mongo', - {baseChains}, - monitorablesInfoMultipleSourcesMongoRet, - monitorablesInfoMultipleSourcesEndpointRet] - ])('Should return correct result and a successful status code if %s', - async (_: string, body: any, mongoRet: any[], - endpointRet: any) => { - mongoFindMockReturn = mongoRet; - const res = await request(app).post(endpoint).send(body); - expect(res.statusCode).toEqual(200); - expect(res.body).toEqual(endpointRet); - }); - - it.each([ - [532, 'baseChains not specified', {}, - {error: 'Error: Missing key(s) baseChains in body.'}], - [537, 'invalid base chains are specified', {baseChains: ['test']}, - {error: monitorablesInfoInvalidBaseChainsError}], - ])('Should return error and %s status code if %s', - async (statusCode: number, _: string, body: any, - endpointRet: any) => { - const res = await request(app).post(endpoint).send(body); - expect(res.statusCode).toEqual(statusCode); - expect(res.body).toEqual(endpointRet); - }); - - it('Should return error and status code 536 if mongo returns an ' + - 'error', - async () => { - mongoFindMockReturn = [null]; - const res = await request(app).post(endpoint) - .send({baseChains: ['cosmos']}); - expect(res.statusCode).toEqual(536); - expect(res.body) - .toEqual({ - error: 'Error: Could not retrieve data from Mongo.' - }); - }); +describe("Mongo Monitorables Info POST Route", () => { + const endpoint = "/server/mongo/monitorablesInfo"; + + it.each([ + [ + "a single base chain which is not in mongo is specified", + { baseChains: ["cosmos"] }, + [], + { result: { cosmos: {} } }, + ], + [ + "a single base chain which is empty in mongo is specified", + { baseChains: ["cosmos"] }, + [{ _id: "cosmos" }], + { result: { cosmos: {} } }, + ], + [ + "all base chains specified are not in mongo", + { baseChains: ["cosmos", "general", "chainlink", "substrate"] }, + [], + { + result: { + cosmos: {}, + general: {}, + chainlink: {}, + substrate: {}, + }, + }, + ], + [ + "all base chains specified are empty in mongo", + { baseChains }, + [ + { _id: "cosmos" }, + { _id: "general" }, + { _id: "chainlink" }, + { _id: "substrate" }, + ], + { + result: { + cosmos: {}, + general: {}, + chainlink: {}, + substrate: {}, + }, + }, + ], + [ + "all base chains are specified and a single base chain with a" + + " single source is in mongo", + { baseChains }, + monitorablesInfoSingleSourceMongoRet, + monitorablesInfoSingleSourceEndpointRet, + ], + [ + "all base chains are specified and a single base chain with multiple" + + " sources is in mongo", + { baseChains }, + monitorablesInfoMultipleSourcesAndBaseChainsMongoRet, + monitorablesInfoMultipleSourcesAndBaseChainsEndpointRet, + ], + [ + "all base chains are specified and multiple base chains with" + + " multiple sources are in mongo", + { baseChains }, + monitorablesInfoMultipleSourcesMongoRet, + monitorablesInfoMultipleSourcesEndpointRet, + ], + ])( + "Should return correct result and a successful status code if %s", + async (_: string, body: any, mongoRet: any[], endpointRet: any) => { + mongoFindMockReturn = mongoRet; + const res = await request(app).post(endpoint).send(body); + expect(res.statusCode).toEqual(200); + expect(res.body).toEqual(endpointRet); + } + ); + + it.each([ + [ + 532, + "baseChains not specified", + {}, + { error: "Error: Missing key(s) baseChains in body." }, + ], + [ + 537, + "invalid base chains are specified", + { baseChains: ["test"] }, + { error: monitorablesInfoInvalidBaseChainsError }, + ], + ])( + "Should return error and %s status code if %s", + async (statusCode: number, _: string, body: any, endpointRet: any) => { + const res = await request(app).post(endpoint).send(body); + expect(res.statusCode).toEqual(statusCode); + expect(res.body).toEqual(endpointRet); + } + ); + + it( + "Should return error and status code 536 if mongo returns an " + "error", + async () => { + mongoFindMockReturn = [null]; + const res = await request(app) + .post(endpoint) + .send({ baseChains: ["cosmos"] }); + expect(res.statusCode).toEqual(536); + expect(res.body).toEqual({ + error: "Error: Could not retrieve data from Mongo.", + }); + } + ); }); -describe('Mongo Alerts POST Route', () => { - const endpoint = '/server/mongo/alerts'; - - it.each([ - ['no chains, severities, or sources are specified', - { - chains: [], severities: [], sources: [], minTimestamp: 0, - maxTimestamp: 5000000000, noOfAlerts: 100 - }, - [], - {result: {alerts: []}}], - ['a single chain, all severities, and a single source that has no ' + - 'alerts are specified', - { - chains: ['test_chain'], - severities: ['INFO', 'WARNING', 'CRITICAL', 'ERROR'], - sources: ['test_source'], minTimestamp: 0, - maxTimestamp: 5000000000, noOfAlerts: 100 - }, - [], - {result: {alerts: []}}], - ['a single chain, all severities, and a single source that has some ' + - 'alerts are specified', - { - chains: ['test_chain'], - severities: ['INFO', 'WARNING', 'CRITICAL', 'ERROR'], - sources: ['test_source'], minTimestamp: 0, - maxTimestamp: 5000000000, noOfAlerts: 100 - }, - alertsSingleSourceMongoRet, - alertsSingleSourceEndpointRet], - ['multiple chains, all severities, and multiple sources that have ' + - 'some alerts are specified', - { - chains: ['test_chain', 'test_chain2'], - severities: ['INFO', 'WARNING', 'CRITICAL', 'ERROR'], - sources: ['test_node', 'test_repo'], minTimestamp: 0, - maxTimestamp: 5000000000, noOfAlerts: 100 - }, - alertsMultipleSourcesMongoRet, - alertsMultipleSourcesEndpointRet], - ])('Should return correct result and a successful status code if %s', - async (_: string, body: any, mongoRet: any[], endpointRet: any) => { - mongoAggregateMockReturn = mongoRet; - const res = await request(app).post(endpoint).send(body); - expect(res.statusCode).toEqual(200); - expect(res.body).toEqual(endpointRet); - }); - - it.each([ - [532, - 'chains, severities, sources, minTimestamp, maxTimestamp, and ' + - 'noOfAlerts not specified', - {}, - { - error: 'Error: Missing key(s) chains, severities, sources, ' + - 'minTimestamp, maxTimestamp, noOfAlerts in body.' - }], - [532, 'chains not specified', - { - severities: [], sources: [], minTimestamp: 0, maxTimestamp: 0, - noOfAlerts: 1 - }, - {error: 'Error: Missing key(s) chains in body.'}], - [532, 'severities not specified', - { - chains: [], sources: [], minTimestamp: 0, maxTimestamp: 0, - noOfAlerts: 1 - }, - {error: 'Error: Missing key(s) severities in body.'}], - [532, 'sources not specified', - { - chains: [], severities: [], minTimestamp: 0, maxTimestamp: 0, - noOfAlerts: 1 - }, - {error: 'Error: Missing key(s) sources in body.'}], - [532, 'minTimestamp not specified', - { - chains: [], severities: [], sources: [], maxTimestamp: 0, - noOfAlerts: 1 - }, - {error: 'Error: Missing key(s) minTimestamp in body.'}], - [532, 'maxTimestamp not specified', - { - chains: [], severities: [], sources: [], minTimestamp: 0, - noOfAlerts: 1 - }, - {error: 'Error: Missing key(s) maxTimestamp in body.'}], - [532, 'noOfAlerts not specified', - { - chains: [], severities: [], sources: [], minTimestamp: 0, - maxTimestamp: 0 - }, - {error: 'Error: Missing key(s) noOfAlerts in body.'}], - [539, 'invalid chain specified (non-string)', - { - chains: ['test_chain', 123], severities: [], sources: [], - minTimestamp: 0, maxTimestamp: 0, noOfAlerts: 1 - }, - {error: 'Error: An invalid value was given to req.body.chains.'}], - [539, 'invalid severity specified (non-string)', - { - chains: [], severities: ['test_chain', 123], sources: [], - minTimestamp: 0, maxTimestamp: 0, noOfAlerts: 1 - }, - { - error: 'Error: An invalid value was given to ' + - 'req.body.severities.' - }], - [539, 'invalid source specified (non-string)', - { - chains: [], severities: [], sources: ['test_chain', 123], - minTimestamp: 0, maxTimestamp: 0, noOfAlerts: 1 - }, - {error: 'Error: An invalid value was given to req.body.sources.'}], - [539, 'invalid severity specified (non-existing)', - { - chains: [], severities: ['TEST'], sources: [], minTimestamp: 0, - maxTimestamp: 0, noOfAlerts: 1 - }, - { - error: 'Error: An invalid value was given to ' + - 'req.body.severities.' - }], - [539, 'invalid minTimestamp specified (string)', - { - chains: [], severities: [], sources: [], minTimestamp: 'TEST', - maxTimestamp: 0, noOfAlerts: 1 - }, - { - error: 'Error: An invalid value was given to ' + - 'req.body.minTimestamp.' - }], - [539, 'invalid maxTimestamp specified (string)', - { - chains: [], severities: [], sources: [], minTimestamp: 0, - maxTimestamp: 'TEST', noOfAlerts: 1 - }, - { - error: 'Error: An invalid value was given to ' + - 'req.body.maxTimestamp.' - }], - [539, 'invalid noOfAlerts specified (string)', - { - chains: [], severities: [], sources: [], minTimestamp: 0, - maxTimestamp: 0, noOfAlerts: 'TEST' - }, - { - error: 'Error: An invalid value was given to ' + - 'req.body.noOfAlerts.' - }], - [539, 'invalid minTimestamp specified (negative number)', - { - chains: [], severities: [], sources: [], minTimestamp: -123, - maxTimestamp: 0, noOfAlerts: 1 - }, - { - error: 'Error: An invalid value was given to ' + - 'req.body.minTimestamp.' - }], - [539, 'invalid maxTimestamp specified (negative number)', - { - chains: [], severities: [], sources: [], minTimestamp: 0, - maxTimestamp: -123, noOfAlerts: 1 - }, - { - error: 'Error: An invalid value was given to ' + - 'req.body.maxTimestamp.' - }], - [539, 'invalid noOfAlerts specified (negative number)', - { - chains: [], severities: [], sources: [], minTimestamp: 0, - maxTimestamp: 0, noOfAlerts: -123 - }, - { - error: 'Error: An invalid value was given to ' + - 'req.body.noOfAlerts.' - }], - [539, 'invalid noOfAlerts specified (zero)', - { - chains: [], severities: [], sources: [], minTimestamp: 0, - maxTimestamp: 0, noOfAlerts: 0 - }, - { - error: 'Error: An invalid value was given to ' + - 'req.body.noOfAlerts.' - }], - ])('Should return error and %s status code if %s', - async (statusCode: number, _: string, body: any, - endpointRet: any) => { - const res = await request(app).post(endpoint).send(body); - expect(res.statusCode).toEqual(statusCode); - expect(res.body).toEqual(endpointRet); - }); - - it('Should return error and status code 536 if mongo ' + - 'returns an error', - async () => { - mongoAggregateMockReturn = [null, null]; - const res = await request(app).post(endpoint).send({ - chains: ['test_chain'], severities: [], - sources: ['test_source'], minTimestamp: 0, - maxTimestamp: 5000000000, noOfAlerts: 100 - }); - expect(res.statusCode).toEqual(536); - expect(res.body) - .toEqual({ - error: 'Error: Could not retrieve data from Mongo.' - }); - }); +describe("Mongo Alerts POST Route", () => { + const endpoint = "/server/mongo/alerts"; + + it.each([ + [ + "no chains, severities, or sources are specified", + { + chains: [], + severities: [], + sources: [], + minTimestamp: 0, + maxTimestamp: 5000000000, + noOfAlerts: 100, + }, + [], + { result: { alerts: [] } }, + ], + [ + "a single chain, all severities, and a single source that has no " + + "alerts are specified", + { + chains: ["test_chain"], + severities: ["INFO", "WARNING", "CRITICAL", "ERROR"], + sources: ["test_source"], + minTimestamp: 0, + maxTimestamp: 5000000000, + noOfAlerts: 100, + }, + [], + { result: { alerts: [] } }, + ], + [ + "a single chain, all severities, and a single source that has some " + + "alerts are specified", + { + chains: ["test_chain"], + severities: ["INFO", "WARNING", "CRITICAL", "ERROR"], + sources: ["test_source"], + minTimestamp: 0, + maxTimestamp: 5000000000, + noOfAlerts: 100, + }, + alertsSingleSourceMongoRet, + alertsSingleSourceEndpointRet, + ], + [ + "multiple chains, all severities, and multiple sources that have " + + "some alerts are specified", + { + chains: ["test_chain", "test_chain2"], + severities: ["INFO", "WARNING", "CRITICAL", "ERROR"], + sources: ["test_node", "test_repo"], + minTimestamp: 0, + maxTimestamp: 5000000000, + noOfAlerts: 100, + }, + alertsMultipleSourcesMongoRet, + alertsMultipleSourcesEndpointRet, + ], + ])( + "Should return correct result and a successful status code if %s", + async (_: string, body: any, mongoRet: any[], endpointRet: any) => { + mongoAggregateMockReturn = mongoRet; + const res = await request(app).post(endpoint).send(body); + expect(res.statusCode).toEqual(200); + expect(res.body).toEqual(endpointRet); + } + ); + + it.each([ + [ + 532, + "chains, severities, sources, minTimestamp, maxTimestamp, and " + + "noOfAlerts not specified", + {}, + { + error: + "Error: Missing key(s) chains, severities, sources, " + + "minTimestamp, maxTimestamp, noOfAlerts in body.", + }, + ], + [ + 532, + "chains not specified", + { + severities: [], + sources: [], + minTimestamp: 0, + maxTimestamp: 0, + noOfAlerts: 1, + }, + { error: "Error: Missing key(s) chains in body." }, + ], + [ + 532, + "severities not specified", + { + chains: [], + sources: [], + minTimestamp: 0, + maxTimestamp: 0, + noOfAlerts: 1, + }, + { error: "Error: Missing key(s) severities in body." }, + ], + [ + 532, + "sources not specified", + { + chains: [], + severities: [], + minTimestamp: 0, + maxTimestamp: 0, + noOfAlerts: 1, + }, + { error: "Error: Missing key(s) sources in body." }, + ], + [ + 532, + "minTimestamp not specified", + { + chains: [], + severities: [], + sources: [], + maxTimestamp: 0, + noOfAlerts: 1, + }, + { error: "Error: Missing key(s) minTimestamp in body." }, + ], + [ + 532, + "maxTimestamp not specified", + { + chains: [], + severities: [], + sources: [], + minTimestamp: 0, + noOfAlerts: 1, + }, + { error: "Error: Missing key(s) maxTimestamp in body." }, + ], + [ + 532, + "noOfAlerts not specified", + { + chains: [], + severities: [], + sources: [], + minTimestamp: 0, + maxTimestamp: 0, + }, + { error: "Error: Missing key(s) noOfAlerts in body." }, + ], + [ + 539, + "invalid chain specified (non-string)", + { + chains: ["test_chain", 123], + severities: [], + sources: [], + minTimestamp: 0, + maxTimestamp: 0, + noOfAlerts: 1, + }, + { error: "Error: An invalid value was given to req.body.chains." }, + ], + [ + 539, + "invalid severity specified (non-string)", + { + chains: [], + severities: ["test_chain", 123], + sources: [], + minTimestamp: 0, + maxTimestamp: 0, + noOfAlerts: 1, + }, + { + error: "Error: An invalid value was given to " + "req.body.severities.", + }, + ], + [ + 539, + "invalid source specified (non-string)", + { + chains: [], + severities: [], + sources: ["test_chain", 123], + minTimestamp: 0, + maxTimestamp: 0, + noOfAlerts: 1, + }, + { error: "Error: An invalid value was given to req.body.sources." }, + ], + [ + 539, + "invalid severity specified (non-existing)", + { + chains: [], + severities: ["TEST"], + sources: [], + minTimestamp: 0, + maxTimestamp: 0, + noOfAlerts: 1, + }, + { + error: "Error: An invalid value was given to " + "req.body.severities.", + }, + ], + [ + 539, + "invalid minTimestamp specified (string)", + { + chains: [], + severities: [], + sources: [], + minTimestamp: "TEST", + maxTimestamp: 0, + noOfAlerts: 1, + }, + { + error: + "Error: An invalid value was given to " + "req.body.minTimestamp.", + }, + ], + [ + 539, + "invalid maxTimestamp specified (string)", + { + chains: [], + severities: [], + sources: [], + minTimestamp: 0, + maxTimestamp: "TEST", + noOfAlerts: 1, + }, + { + error: + "Error: An invalid value was given to " + "req.body.maxTimestamp.", + }, + ], + [ + 539, + "invalid noOfAlerts specified (string)", + { + chains: [], + severities: [], + sources: [], + minTimestamp: 0, + maxTimestamp: 0, + noOfAlerts: "TEST", + }, + { + error: "Error: An invalid value was given to " + "req.body.noOfAlerts.", + }, + ], + [ + 539, + "invalid minTimestamp specified (negative number)", + { + chains: [], + severities: [], + sources: [], + minTimestamp: -123, + maxTimestamp: 0, + noOfAlerts: 1, + }, + { + error: + "Error: An invalid value was given to " + "req.body.minTimestamp.", + }, + ], + [ + 539, + "invalid maxTimestamp specified (negative number)", + { + chains: [], + severities: [], + sources: [], + minTimestamp: 0, + maxTimestamp: -123, + noOfAlerts: 1, + }, + { + error: + "Error: An invalid value was given to " + "req.body.maxTimestamp.", + }, + ], + [ + 539, + "invalid noOfAlerts specified (negative number)", + { + chains: [], + severities: [], + sources: [], + minTimestamp: 0, + maxTimestamp: 0, + noOfAlerts: -123, + }, + { + error: "Error: An invalid value was given to " + "req.body.noOfAlerts.", + }, + ], + [ + 539, + "invalid noOfAlerts specified (zero)", + { + chains: [], + severities: [], + sources: [], + minTimestamp: 0, + maxTimestamp: 0, + noOfAlerts: 0, + }, + { + error: "Error: An invalid value was given to " + "req.body.noOfAlerts.", + }, + ], + ])( + "Should return error and %s status code if %s", + async (statusCode: number, _: string, body: any, endpointRet: any) => { + const res = await request(app).post(endpoint).send(body); + expect(res.statusCode).toEqual(statusCode); + expect(res.body).toEqual(endpointRet); + } + ); + + it( + "Should return error and status code 536 if mongo " + "returns an error", + async () => { + mongoAggregateMockReturn = [null, null]; + const res = await request(app) + .post(endpoint) + .send({ + chains: ["test_chain"], + severities: [], + sources: ["test_source"], + minTimestamp: 0, + maxTimestamp: 5000000000, + noOfAlerts: 100, + }); + expect(res.statusCode).toEqual(536); + expect(res.body).toEqual({ + error: "Error: Could not retrieve data from Mongo.", + }); + } + ); }); -describe('Mongo Metrics POST Route', () => { - const endpoint = '/server/mongo/metrics'; - - it.each([ - ['no chains or systems are specified', - { - chains: [], systems: [], minTimestamp: 0, - maxTimestamp: 5000000000, noOfMetricsPerSource: 100 - }, - [], - {result: {metrics: {}}}], - ['a single chain and a single system that has no metrics are specified', - { - chains: ['test_chain'], systems: ['test_system'], - minTimestamp: 0, maxTimestamp: 5000000000, - noOfMetricsPerSource: 100 - }, - [], - {result: {metrics: {test_system: []}}}], - ['a single chain and a single system that has some ' + - 'metrics are specified', - { - chains: ['test_chain'], systems: ['test_system'], - minTimestamp: 0, maxTimestamp: 5000000000, - noOfMetricsPerSource: 100 - }, - metricsSingleSystemMongoRet, - metricsSingleSystemEndpointRet], - ['multiple chains and multiple systems that have some ' + - 'metrics are specified', - { - chains: ['test_chain', 'test_chain2'], - systems: ['test_system', 'test_system2'], minTimestamp: 0, - maxTimestamp: 5000000000, noOfMetricsPerSource: 100 - }, - metricsMultipleSystemsMongoRet, - metricsMultipleSystemsEndpointRet], - ])('Should return correct result and a successful status code if %s', - async (_: string, body: any, mongoRet: any[], endpointRet: any) => { - mongoAggregateMockReturn = mongoRet; - const res = await request(app).post(endpoint).send(body); - expect(res.statusCode).toEqual(200); - expect(res.body).toEqual(endpointRet); - }); - - it.each([ - [532, - 'chains, systems, minTimestamp, maxTimestamp, and ' + - 'noOfMetricsPerSource not specified', - {}, - { - error: 'Error: Missing key(s) chains, systems, minTimestamp, ' + - 'maxTimestamp, noOfMetricsPerSource in body.' - }], - [532, 'chains not specified', - { - systems: [], minTimestamp: 0, maxTimestamp: 0, - noOfMetricsPerSource: 1 - }, - {error: 'Error: Missing key(s) chains in body.'}], - [532, 'systems not specified', - { - chains: [], minTimestamp: 0, maxTimestamp: 0, - noOfMetricsPerSource: 1 - }, - {error: 'Error: Missing key(s) systems in body.'}], - [532, 'minTimestamp not specified', - {chains: [], systems: [], maxTimestamp: 0, noOfMetricsPerSource: 1}, - {error: 'Error: Missing key(s) minTimestamp in body.'}], - [532, 'maxTimestamp not specified', - {chains: [], systems: [], minTimestamp: 0, noOfMetricsPerSource: 1}, - {error: 'Error: Missing key(s) maxTimestamp in body.'}], - [532, 'noOfMetricsPerSource not specified', - {chains: [], systems: [], minTimestamp: 0, maxTimestamp: 0}, - {error: 'Error: Missing key(s) noOfMetricsPerSource in body.'}], - [539, 'invalid chain specified (non-string)', - { - chains: ['test_chain', 123], systems: [], minTimestamp: 0, - maxTimestamp: 0, noOfMetricsPerSource: 1 - }, - {error: 'Error: An invalid value was given to req.body.chains.'}], - [539, 'invalid system specified (non-string)', - { - chains: [], systems: ['test_chain', 123], minTimestamp: 0, - maxTimestamp: 0, noOfMetricsPerSource: 1 - }, - {error: 'Error: An invalid value was given to req.body.systems.'}], - [539, 'invalid minTimestamp specified (string)', - { - chains: [], systems: [], minTimestamp: 'TEST', maxTimestamp: 0, - noOfMetricsPerSource: 1 - }, - { - error: 'Error: An invalid value was given to ' + - 'req.body.minTimestamp.' - }], - [539, 'invalid maxTimestamp specified (string)', - { - chains: [], systems: [], minTimestamp: 0, maxTimestamp: 'TEST', - noOfMetricsPerSource: 1 - }, - { - error: 'Error: An invalid value was given to ' + - 'req.body.maxTimestamp.' - }], - [539, 'invalid noOfMetricsPerSource specified (string)', - { - chains: [], systems: [], minTimestamp: 0, maxTimestamp: 0, - noOfMetricsPerSource: 'TEST' - }, - { - error: 'Error: An invalid value was given to ' + - 'req.body.noOfMetricsPerSource.' - }], - [539, 'invalid minTimestamp specified (negative number)', - { - chains: [], systems: [], minTimestamp: -123, maxTimestamp: 0, - noOfMetricsPerSource: 1 - }, - { - error: 'Error: An invalid value was given to ' + - 'req.body.minTimestamp.' - }], - [539, 'invalid maxTimestamp specified (negative number)', - { - chains: [], systems: [], minTimestamp: 0, maxTimestamp: -123, - noOfMetricsPerSource: 1 - }, - { - error: 'Error: An invalid value was given to ' + - 'req.body.maxTimestamp.' - }], - [539, 'invalid noOfMetricsPerSource specified (negative number)', - { - chains: [], systems: [], minTimestamp: 0, maxTimestamp: 0, - noOfMetricsPerSource: -123 - }, - { - error: 'Error: An invalid value was given to ' + - 'req.body.noOfMetricsPerSource.' - }], - [539, 'invalid noOfMetricsPerSource specified (zero)', - { - chains: [], systems: [], minTimestamp: 0, maxTimestamp: 0, - noOfMetricsPerSource: 0 - }, - { - error: 'Error: An invalid value was given to ' + - 'req.body.noOfMetricsPerSource.' - }], - ])('Should return error and %s status code if %s', - async (statusCode: number, _: string, body: any, - endpointRet: any) => { - const res = await request(app).post(endpoint).send(body); - expect(res.statusCode).toEqual(statusCode); - expect(res.body).toEqual(endpointRet); - }); +describe("Mongo Metrics POST Route", () => { + const endpoint = "/server/mongo/metrics"; + + it.each([ + [ + "no chains or systems are specified", + { + chains: [], + systems: [], + minTimestamp: 0, + maxTimestamp: 5000000000, + noOfMetricsPerSource: 100, + }, + [], + { result: { metrics: {} } }, + ], + [ + "a single chain and a single system that has no metrics are specified", + { + chains: ["test_chain"], + systems: ["test_system"], + minTimestamp: 0, + maxTimestamp: 5000000000, + noOfMetricsPerSource: 100, + }, + [], + { result: { metrics: { test_system: [] } } }, + ], + [ + "a single chain and a single system that has some " + + "metrics are specified", + { + chains: ["test_chain"], + systems: ["test_system"], + minTimestamp: 0, + maxTimestamp: 5000000000, + noOfMetricsPerSource: 100, + }, + metricsSingleSystemMongoRet, + metricsSingleSystemEndpointRet, + ], + [ + "multiple chains and multiple systems that have some " + + "metrics are specified", + { + chains: ["test_chain", "test_chain2"], + systems: ["test_system", "test_system2"], + minTimestamp: 0, + maxTimestamp: 5000000000, + noOfMetricsPerSource: 100, + }, + metricsMultipleSystemsMongoRet, + metricsMultipleSystemsEndpointRet, + ], + ])( + "Should return correct result and a successful status code if %s", + async (_: string, body: any, mongoRet: any[], endpointRet: any) => { + mongoAggregateMockReturn = mongoRet; + const res = await request(app).post(endpoint).send(body); + expect(res.statusCode).toEqual(200); + expect(res.body).toEqual(endpointRet); + } + ); + + it.each([ + [ + 532, + "chains, systems, minTimestamp, maxTimestamp, and " + + "noOfMetricsPerSource not specified", + {}, + { + error: + "Error: Missing key(s) chains, systems, minTimestamp, " + + "maxTimestamp, noOfMetricsPerSource in body.", + }, + ], + [ + 532, + "chains not specified", + { + systems: [], + minTimestamp: 0, + maxTimestamp: 0, + noOfMetricsPerSource: 1, + }, + { error: "Error: Missing key(s) chains in body." }, + ], + [ + 532, + "systems not specified", + { + chains: [], + minTimestamp: 0, + maxTimestamp: 0, + noOfMetricsPerSource: 1, + }, + { error: "Error: Missing key(s) systems in body." }, + ], + [ + 532, + "minTimestamp not specified", + { chains: [], systems: [], maxTimestamp: 0, noOfMetricsPerSource: 1 }, + { error: "Error: Missing key(s) minTimestamp in body." }, + ], + [ + 532, + "maxTimestamp not specified", + { chains: [], systems: [], minTimestamp: 0, noOfMetricsPerSource: 1 }, + { error: "Error: Missing key(s) maxTimestamp in body." }, + ], + [ + 532, + "noOfMetricsPerSource not specified", + { chains: [], systems: [], minTimestamp: 0, maxTimestamp: 0 }, + { error: "Error: Missing key(s) noOfMetricsPerSource in body." }, + ], + [ + 539, + "invalid chain specified (non-string)", + { + chains: ["test_chain", 123], + systems: [], + minTimestamp: 0, + maxTimestamp: 0, + noOfMetricsPerSource: 1, + }, + { error: "Error: An invalid value was given to req.body.chains." }, + ], + [ + 539, + "invalid system specified (non-string)", + { + chains: [], + systems: ["test_chain", 123], + minTimestamp: 0, + maxTimestamp: 0, + noOfMetricsPerSource: 1, + }, + { error: "Error: An invalid value was given to req.body.systems." }, + ], + [ + 539, + "invalid minTimestamp specified (string)", + { + chains: [], + systems: [], + minTimestamp: "TEST", + maxTimestamp: 0, + noOfMetricsPerSource: 1, + }, + { + error: + "Error: An invalid value was given to " + "req.body.minTimestamp.", + }, + ], + [ + 539, + "invalid maxTimestamp specified (string)", + { + chains: [], + systems: [], + minTimestamp: 0, + maxTimestamp: "TEST", + noOfMetricsPerSource: 1, + }, + { + error: + "Error: An invalid value was given to " + "req.body.maxTimestamp.", + }, + ], + [ + 539, + "invalid noOfMetricsPerSource specified (string)", + { + chains: [], + systems: [], + minTimestamp: 0, + maxTimestamp: 0, + noOfMetricsPerSource: "TEST", + }, + { + error: + "Error: An invalid value was given to " + + "req.body.noOfMetricsPerSource.", + }, + ], + [ + 539, + "invalid minTimestamp specified (negative number)", + { + chains: [], + systems: [], + minTimestamp: -123, + maxTimestamp: 0, + noOfMetricsPerSource: 1, + }, + { + error: + "Error: An invalid value was given to " + "req.body.minTimestamp.", + }, + ], + [ + 539, + "invalid maxTimestamp specified (negative number)", + { + chains: [], + systems: [], + minTimestamp: 0, + maxTimestamp: -123, + noOfMetricsPerSource: 1, + }, + { + error: + "Error: An invalid value was given to " + "req.body.maxTimestamp.", + }, + ], + [ + 539, + "invalid noOfMetricsPerSource specified (negative number)", + { + chains: [], + systems: [], + minTimestamp: 0, + maxTimestamp: 0, + noOfMetricsPerSource: -123, + }, + { + error: + "Error: An invalid value was given to " + + "req.body.noOfMetricsPerSource.", + }, + ], + [ + 539, + "invalid noOfMetricsPerSource specified (zero)", + { + chains: [], + systems: [], + minTimestamp: 0, + maxTimestamp: 0, + noOfMetricsPerSource: 0, + }, + { + error: + "Error: An invalid value was given to " + + "req.body.noOfMetricsPerSource.", + }, + ], + ])( + "Should return error and %s status code if %s", + async (statusCode: number, _: string, body: any, endpointRet: any) => { + const res = await request(app).post(endpoint).send(body); + expect(res.statusCode).toEqual(statusCode); + expect(res.body).toEqual(endpointRet); + } + ); }); -describe('Redis Alerts Overview POST Route', () => { - const endpoint = '/server/redis/alertsOverview'; - - it.each([ - ['a single chain with no sources is specified', - { - parentIds: { - unique_chain_id: { - include_chain_sourced_alerts: true, - systems: [], nodes: [], - github_repos: [], dockerhub_repos: [] - } - } - }, - {}, - { - result: { - unique_chain_id: { - info: 0, critical: 0, warning: 0, error: 0, - problems: {}, releases: {}, tags: {} - } - } - }], - ['a single chain with a single system that has no alerts is specified', - { - parentIds: { - unique_chain_id: { - include_chain_sourced_alerts: true, - systems: ['test_system'], nodes: [], - github_repos: [], dockerhub_repos: [] - } - } - }, - {}, - { - result: { - unique_chain_id: { - info: 7, critical: 0, warning: 0, error: 0, - problems: {}, releases: {}, tags: {} - } - } - }], - ['a single chain with a single node that has no alerts is specified', - { - parentIds: { - unique_chain_id: { - include_chain_sourced_alerts: true, - systems: [], nodes: ['test_node'], - github_repos: [], dockerhub_repos: [] - } - } - }, - {}, - { - result: { - unique_chain_id: { - info: 52, critical: 0, warning: 0, error: 0, - problems: {}, releases: {}, tags: {} - } - } - }], - ['a single chain with a single github repo that has no alerts is' + - ' specified', - { - parentIds: { - unique_chain_id: { - include_chain_sourced_alerts: true, - systems: [], nodes: [], - github_repos: ['test_repo'], dockerhub_repos: [] - } - } - }, - {}, - { - result: { - unique_chain_id: { - info: 3, critical: 0, warning: 0, error: 0, - problems: {}, releases: {}, tags: {} - } - } - }], - ['a single chain with a single dockerhub repo that has no alerts is' + - ' specified', - { - parentIds: { - unique_chain_id: { - include_chain_sourced_alerts: true, - systems: [], nodes: [], - github_repos: [], dockerhub_repos: ['test_repo'] - } - } - }, - {}, - { - result: { - unique_chain_id: { - info: 5, critical: 0, warning: 0, error: 0, - problems: {}, releases: {}, tags: {} - } - } - }], - ['a single chain with a single system that has some alerts is ' + - 'specified', - { - parentIds: { - unique_chain_id: { - include_chain_sourced_alerts: true, - systems: ['test_system'], nodes: [], - github_repos: [], dockerhub_repos: [] - } - } - }, - alertsOverviewSingleSystemRedisRet, - alertsOverviewSingleSystemEndpointRet], - ['a single chain with a single node that has some alerts is specified', - { - parentIds: { - unique_chain_id: { - include_chain_sourced_alerts: true, - systems: [], nodes: ['test_node'], - github_repos: [], dockerhub_repos: [] - } - } - }, - alertsOverviewSingleNodeRedisRet, - alertsOverviewSingleNodeEndpointRet], - ['a single chain with a single github repo that has some releases is' + - ' specified', - { - parentIds: { - unique_chain_id: { - include_chain_sourced_alerts: true, - systems: [], nodes: [], - github_repos: ['test_repo'], dockerhub_repos: [] - } - } - }, - alertsOverviewSingleGitHubRepoRedisRet, - alertsOverviewSingleGitHubRepoEndpointRet], - ['a single chain with a single dockerhub repo that has some tag' + - ' changes is specified', - { - parentIds: { - unique_chain_id: { - include_chain_sourced_alerts: true, - systems: [], nodes: [], - github_repos: [], dockerhub_repos: ['test_repo'] - } - } - }, - alertsOverviewSingleDockerHubRepoRedisRet, - alertsOverviewSingleDockerHubRepoEndpointRet], - ['a single chain which includes chain sourced alerts and chain has' + - ' some alerts is specified', - { - parentIds: { - unique_chain_id: { - include_chain_sourced_alerts: true, - systems: [], nodes: [], - github_repos: [], dockerhub_repos: [] - } - } - }, - alertsOverviewChainSourceRedisRet, - alertsOverviewChainSourceEndpointRet], - ['a single chain which includes chain sourced alerts with unique identifier ' + - 'and chain has some alerts is specified', - { - parentIds: { - unique_chain_id: { - include_chain_sourced_alerts: true, - systems: [], nodes: [], - github_repos: [], dockerhub_repos: [] - } - } - }, - alertsOverviewChainSourceWithUniqueIdentifierRedisRet, - alertsOverviewChainSourceWithUniqueIdentifierEndpointRet], - ['a single chain with multiple sources that have alerts', - { - parentIds: { - unique_chain_id: { - include_chain_sourced_alerts: true, - systems: ['test_system'], - nodes: ['test_node', 'test_evm_node'], - github_repos: ['test_github_repo'], - dockerhub_repos: ['test_dockerhub_repo'] - } - } +describe("Redis Alerts Overview POST Route", () => { + const endpoint = "/server/redis/alertsOverview"; + + it.each([ + [ + "a single chain with no sources is specified", + { + parentIds: { + unique_chain_id: { + include_chain_sourced_alerts: true, + systems: [], + nodes: [], + github_repos: [], + dockerhub_repos: [], + }, + }, + }, + {}, + { + result: { + unique_chain_id: { + info: 0, + critical: 0, + warning: 0, + error: 0, + problems: {}, + releases: {}, + tags: {}, + }, + }, + }, + ], + [ + "a single chain with a single system that has no alerts is specified", + { + parentIds: { + unique_chain_id: { + include_chain_sourced_alerts: true, + systems: ["test_system"], + nodes: [], + github_repos: [], + dockerhub_repos: [], + }, + }, + }, + {}, + { + result: { + unique_chain_id: { + info: 7, + critical: 0, + warning: 0, + error: 0, + problems: {}, + releases: {}, + tags: {}, + }, + }, + }, + ], + [ + "a single chain with a single node that has no alerts is specified", + { + parentIds: { + unique_chain_id: { + include_chain_sourced_alerts: true, + systems: [], + nodes: ["test_node"], + github_repos: [], + dockerhub_repos: [], + }, + }, + }, + {}, + { + result: { + unique_chain_id: { + info: 52, + critical: 0, + warning: 0, + error: 0, + problems: {}, + releases: {}, + tags: {}, + }, + }, + }, + ], + [ + "a single chain with a single github repo that has no alerts is" + + " specified", + { + parentIds: { + unique_chain_id: { + include_chain_sourced_alerts: true, + systems: [], + nodes: [], + github_repos: ["test_repo"], + dockerhub_repos: [], + }, + }, + }, + {}, + { + result: { + unique_chain_id: { + info: 3, + critical: 0, + warning: 0, + error: 0, + problems: {}, + releases: {}, + tags: {}, + }, + }, + }, + ], + [ + "a single chain with a single dockerhub repo that has no alerts is" + + " specified", + { + parentIds: { + unique_chain_id: { + include_chain_sourced_alerts: true, + systems: [], + nodes: [], + github_repos: [], + dockerhub_repos: ["test_repo"], + }, + }, + }, + {}, + { + result: { + unique_chain_id: { + info: 5, + critical: 0, + warning: 0, + error: 0, + problems: {}, + releases: {}, + tags: {}, + }, + }, + }, + ], + [ + "a single chain with a single system that has some alerts is " + + "specified", + { + parentIds: { + unique_chain_id: { + include_chain_sourced_alerts: true, + systems: ["test_system"], + nodes: [], + github_repos: [], + dockerhub_repos: [], + }, + }, + }, + alertsOverviewSingleSystemRedisRet, + alertsOverviewSingleSystemEndpointRet, + ], + [ + "a single chain with a single node that has some alerts is specified", + { + parentIds: { + unique_chain_id: { + include_chain_sourced_alerts: true, + systems: [], + nodes: ["test_node"], + github_repos: [], + dockerhub_repos: [], + }, + }, + }, + alertsOverviewSingleNodeRedisRet, + alertsOverviewSingleNodeEndpointRet, + ], + [ + "a single chain with a single github repo that has some releases is" + + " specified", + { + parentIds: { + unique_chain_id: { + include_chain_sourced_alerts: true, + systems: [], + nodes: [], + github_repos: ["test_repo"], + dockerhub_repos: [], + }, + }, + }, + alertsOverviewSingleGitHubRepoRedisRet, + alertsOverviewSingleGitHubRepoEndpointRet, + ], + [ + "a single chain with a single dockerhub repo that has some tag" + + " changes is specified", + { + parentIds: { + unique_chain_id: { + include_chain_sourced_alerts: true, + systems: [], + nodes: [], + github_repos: [], + dockerhub_repos: ["test_repo"], + }, + }, + }, + alertsOverviewSingleDockerHubRepoRedisRet, + alertsOverviewSingleDockerHubRepoEndpointRet, + ], + [ + "a single chain which includes chain sourced alerts and chain has" + + " some alerts is specified", + { + parentIds: { + unique_chain_id: { + include_chain_sourced_alerts: true, + systems: [], + nodes: [], + github_repos: [], + dockerhub_repos: [], + }, + }, + }, + alertsOverviewChainSourceRedisRet, + alertsOverviewChainSourceEndpointRet, + ], + [ + "a single chain which includes chain sourced alerts with unique identifier " + + "and chain has some alerts is specified", + { + parentIds: { + unique_chain_id: { + include_chain_sourced_alerts: true, + systems: [], + nodes: [], + github_repos: [], + dockerhub_repos: [], + }, + }, + }, + alertsOverviewChainSourceWithUniqueIdentifierRedisRet, + alertsOverviewChainSourceWithUniqueIdentifierEndpointRet, + ], + [ + "a single chain with multiple sources that have alerts", + { + parentIds: { + unique_chain_id: { + include_chain_sourced_alerts: true, + systems: ["test_system"], + nodes: ["test_node", "test_evm_node"], + github_repos: ["test_github_repo"], + dockerhub_repos: ["test_dockerhub_repo"], + }, + }, + }, + alertsOverviewMultipleSourcesRedisRet, + alertsOverviewMultipleSourcesEndpointRet, + ], + ])( + "Should return correct result and a successful status code if %s", + async ( + _: string, + body: any, + redisRet: { [p: string]: string }, + endpointRet: any + ) => { + hgetallMockCallbackReply = redisRet; + const res = await request(app).post(endpoint).send(body); + expect(res.statusCode).toEqual(200); + expect(res.body).toEqual(endpointRet); + } + ); + + it.each([ + [ + 532, + "parentIds not specified", + {}, + { error: "Error: Missing key(s) parentIds in body." }, + ], + [ + 538, + "include_chain_sourced_alerts property not specified", + { + parentIds: { + systems: ["test_system"], + nodes: ["test_node"], + github_repos: ["test_github_repo"], + dockerhub_repos: ["test_dockerhub_repo"], + }, + }, + { error: parentIdsInvalidSchemaError }, + ], + [ + 538, + "invalid properties are specified", + { parentIds: { unique_chain_id: { test: [] } } }, + { error: parentIdsInvalidSchemaError }, + ], + ])( + "Should return error and %s status code if %s", + async (statusCode: number, _: string, body: any, endpointRet: any) => { + const res = await request(app).post(endpoint).send(body); + expect(res.statusCode).toEqual(statusCode); + expect(res.body).toEqual(endpointRet); + } + ); + + it( + "Should return error and status code 534 if redis returns " + "an error", + async () => { + execMockCallbackError = new RedisError("Test Error"); + const res = await request(app) + .post(endpoint) + .send({ + parentIds: { + unique_chain_id: { + include_chain_sourced_alerts: true, + systems: [], + nodes: [], + github_repos: [], + dockerhub_repos: [], }, - alertsOverviewMultipleSourcesRedisRet, - alertsOverviewMultipleSourcesEndpointRet], - ])('Should return correct result and a successful status code if %s', - async (_: string, body: any, redisRet: { [p: string]: string }, - endpointRet: any) => { - hgetallMockCallbackReply = redisRet; - const res = await request(app).post(endpoint).send(body); - expect(res.statusCode).toEqual(200); - expect(res.body).toEqual(endpointRet); + }, }); - - it.each([ - [532, 'parentIds not specified', {}, - {error: 'Error: Missing key(s) parentIds in body.'}], - [538, 'include_chain_sourced_alerts property not specified', - { - parentIds: { - systems: ['test_system'], - nodes: ['test_node'], - github_repos: ['test_github_repo'], - dockerhub_repos: ['test_dockerhub_repo'] - } + expect(res.statusCode).toEqual(534); + expect(res.body).toEqual({ + error: "Error: RedisError retrieved from Redis: Test Error.", + }); + } + ); + + it.each([ + ["a number", "123", "123"], + ["a string (not null string)", "test", "test"], + ["an invalid JSON", '{"test": "test"}', "[object Object]"], + ])( + "Should return error and 540 status code if redis returns %s", + async (_: string, invalidValue: string, invalidValueError: string) => { + hgetallMockCallbackReply = { alert_github1_test_repo: invalidValue }; + const res = await request(app) + .post(endpoint) + .send({ + parentIds: { + unique_chain_id: { + include_chain_sourced_alerts: true, + systems: [], + nodes: [], + github_repos: ["test_repo"], + dockerhub_repos: [], }, - {error: parentIdsInvalidSchemaError}], - [538, 'invalid properties are specified', - {parentIds: {unique_chain_id: {test: []}}}, - {error: parentIdsInvalidSchemaError}], - ])('Should return error and %s status code if %s', - async (statusCode: number, _: string, body: any, - endpointRet: any) => { - const res = await request(app).post(endpoint).send(body); - expect(res.statusCode).toEqual(statusCode); - expect(res.body).toEqual(endpointRet); - }); - - it('Should return error and status code 534 if redis returns ' + - 'an error', - async () => { - execMockCallbackError = new RedisError('Test Error'); - const res = await request(app).post(endpoint) - .send({ - parentIds: { - unique_chain_id: { - include_chain_sourced_alerts: true, - systems: [], nodes: [], - github_repos: [], dockerhub_repos: [] - } - } - }); - expect(res.statusCode).toEqual(534); - expect(res.body) - .toEqual({ - error: 'Error: RedisError retrieved from Redis: Test Error.' - }); - }); - - it.each([ - ['a number', '123', '123'], - ['a string (not null string)', 'test', 'test'], - ['an invalid JSON', '{"test": "test"}', '[object Object]'], - ])('Should return error and 540 status code if redis returns %s', - async (_: string, invalidValue: string, - invalidValueError: string) => { - hgetallMockCallbackReply = {'alert_github1_test_repo': invalidValue} - const res = await request(app).post(endpoint).send({ - parentIds: { - unique_chain_id: { - include_chain_sourced_alerts: true, - systems: [], nodes: [], - github_repos: ['test_repo'], dockerhub_repos: [] - } - } - }); - expect(res.statusCode).toEqual(540); - expect(res.body).toEqual({ - error: 'Error: Invalid value retrieved ' + - 'from Redis: ' + invalidValueError + '.' - }); - }); + }, + }); + expect(res.statusCode).toEqual(540); + expect(res.body).toEqual({ + error: + "Error: Invalid value retrieved " + + "from Redis: " + + invalidValueError + + ".", + }); + } + ); }); -describe('Redis Metrics POST Route', () => { - const endpoint = '/server/redis/metrics'; - - it.each([ - ['a single chain with no sources is specified', - {parentIds: {unique_chain_id: {systems: [], repos: []}}}, - [], - {result: {unique_chain_id: {system: {}, github: {}}}}], - ['a single chain with a single system that has no metrics is specified', - { - parentIds: { - unique_chain_id: { - systems: ['test_system'], repos: [] - } - } - }, - noMetricsSingleSystemRedisRet, - noMetricsSingleSystemRedisEndpointRet], - ['a single chain with a single repo that has no metrics is specified', - {parentIds: {unique_chain_id: {systems: [], repos: ['test_repo']}}}, - noMetricsSingleRepoRedisRet, - noMetricsSingleRepoRedisEndpointRet], - ['a single chain with a single system that has some metrics is ' + - 'specified', - { - parentIds: { - unique_chain_id: { - systems: ['test_system'], repos: [] - } - } - }, - metricsSingleSystemRedisRet, - metricsSingleSystemRedisEndpointRet], - ['a single chain with a single repo that has some metrics is specified', - {parentIds: {unique_chain_id: {systems: [], repos: ['test_repo']}}}, - metricsSingleRepoRedisRet, - metricsSingleRepoRedisEndpointRet], - ])('Should return correct result and a successful status code if %s', - async (_: string, body: any, redisRet: string[], - endpointRet: any) => { - hmgetMockCallbackReply = redisRet; - const res = await request(app).post(endpoint).send(body); - expect(res.statusCode).toEqual(200); - expect(res.body).toEqual(endpointRet); - }); - - it.each([ - [532, 'parentIds not specified', {}, - {error: 'Error: Missing key(s) parentIds in body.'}], - [538, 'invalid properties are specified', - {parentIds: {unique_chain_id: {test: []}}}, - {error: parentIdsInvalidSchemaError}], - ])('Should return error and %s status code if %s', - async (statusCode: number, _: string, body: any, - endpointRet: any) => { - const res = await request(app).post(endpoint).send(body); - expect(res.statusCode).toEqual(statusCode); - expect(res.body).toEqual(endpointRet); - }); - - it('Should return error and status code 534 if redis returns ' + - 'an error', - async () => { - execMockCallbackError = new RedisError('Test Error'); - const res = await request(app).post(endpoint) - .send({ - parentIds: - {unique_chain_id: {systems: [], repos: []}} - }); - expect(res.statusCode).toEqual(534); - expect(res.body) - .toEqual({ - error: 'Error: RedisError retrieved from Redis: Test Error.' - }); - }); - - // For this endpoint, these redis returns do not cause an error since the - // values are first passed to the JSON.stringify function. - it.each([ - ['a number', '123'], - ['a string (not null string)', 'test'], - ['an invalid JSON', '{"test": "test"'], - ])('Should return 200 status code if redis returns %s', - async (_: string, invalidValue: string) => { - hmgetMockCallbackReply = [invalidValue, 'null']; - const res = await request(app).post(endpoint).send({ - parentIds: - {unique_chain_id: {systems: [], repos: []}} - }); - expect(res.statusCode).toEqual(200); - expect(res.body).toEqual({ - result: { - unique_chain_id: { - github: {}, - system: {} - } - } - }); - }); +describe("Redis Metrics POST Route", () => { + const endpoint = "/server/redis/metrics"; + + it.each([ + [ + "a single chain with no sources is specified", + { parentIds: { unique_chain_id: { systems: [], repos: [] } } }, + [], + { result: { unique_chain_id: { system: {}, github: {} } } }, + ], + [ + "a single chain with a single system that has no metrics is specified", + { + parentIds: { + unique_chain_id: { + systems: ["test_system"], + repos: [], + }, + }, + }, + noMetricsSingleSystemRedisRet, + noMetricsSingleSystemRedisEndpointRet, + ], + [ + "a single chain with a single repo that has no metrics is specified", + { parentIds: { unique_chain_id: { systems: [], repos: ["test_repo"] } } }, + noMetricsSingleRepoRedisRet, + noMetricsSingleRepoRedisEndpointRet, + ], + [ + "a single chain with a single system that has some metrics is " + + "specified", + { + parentIds: { + unique_chain_id: { + systems: ["test_system"], + repos: [], + }, + }, + }, + metricsSingleSystemRedisRet, + metricsSingleSystemRedisEndpointRet, + ], + [ + "a single chain with a single repo that has some metrics is specified", + { parentIds: { unique_chain_id: { systems: [], repos: ["test_repo"] } } }, + metricsSingleRepoRedisRet, + metricsSingleRepoRedisEndpointRet, + ], + ])( + "Should return correct result and a successful status code if %s", + async (_: string, body: any, redisRet: string[], endpointRet: any) => { + hmgetMockCallbackReply = redisRet; + const res = await request(app).post(endpoint).send(body); + expect(res.statusCode).toEqual(200); + expect(res.body).toEqual(endpointRet); + } + ); + + it.each([ + [ + 532, + "parentIds not specified", + {}, + { error: "Error: Missing key(s) parentIds in body." }, + ], + [ + 538, + "invalid properties are specified", + { parentIds: { unique_chain_id: { test: [] } } }, + { error: parentIdsInvalidSchemaError }, + ], + ])( + "Should return error and %s status code if %s", + async (statusCode: number, _: string, body: any, endpointRet: any) => { + const res = await request(app).post(endpoint).send(body); + expect(res.statusCode).toEqual(statusCode); + expect(res.body).toEqual(endpointRet); + } + ); + + it( + "Should return error and status code 534 if redis returns " + "an error", + async () => { + execMockCallbackError = new RedisError("Test Error"); + const res = await request(app) + .post(endpoint) + .send({ + parentIds: { unique_chain_id: { systems: [], repos: [] } }, + }); + expect(res.statusCode).toEqual(534); + expect(res.body).toEqual({ + error: "Error: RedisError retrieved from Redis: Test Error.", + }); + } + ); + + // For this endpoint, these redis returns do not cause an error since the + // values are first passed to the JSON.stringify function. + it.each([ + ["a number", "123"], + ["a string (not null string)", "test"], + ["an invalid JSON", '{"test": "test"'], + ])( + "Should return 200 status code if redis returns %s", + async (_: string, invalidValue: string) => { + hmgetMockCallbackReply = [invalidValue, "null"]; + const res = await request(app) + .post(endpoint) + .send({ + parentIds: { unique_chain_id: { systems: [], repos: [] } }, + }); + expect(res.statusCode).toEqual(200); + expect(res.body).toEqual({ + result: { + unique_chain_id: { + github: {}, + system: {}, + }, + }, + }); + } + ); }); -describe('Ping Common Service - Node Exporter POST Route', () => { - const endpoint = '/server/common/node-exporter'; - - it('Should return error and status code if missing url in body', async () => { - let expectedReturn = new MissingKeysInBody('url'); - const res = await request(app).post(endpoint).send({}); - expect(res.statusCode).toEqual(532); - expect(res.body).toEqual({error: expectedReturn.message}); - }); - - it('Should return correct result and a successful status code if node exporter does not return an error', - async () => { - const resp: AxiosResponse = emptyAxiosResponse('node_cpu_seconds_total'); - axiosMock.get.mockResolvedValue(resp); - const res = await request(app).post(endpoint).send({'url': 'test_url.com'}); - expect(res.statusCode).toEqual(200); - expect(res.body).toEqual({result: PingStatus.SUCCESS}); - }); - - it('Should return ping error and status code 400 if node exporter returns an error', - async () => { - axiosMock.get.mockResolvedValue(emptyAxiosResponse()); - const res = await request(app).post(endpoint).send({'url': 'test_url.com'}); - expect(res.statusCode).toEqual(400); - expect(res.body).toEqual({result: PingStatus.ERROR}); - }); - - it('Should return ping timeout and status code 408 if node exporter returns a time out error', - async () => { - axiosMock.get.mockRejectedValue({code: 'ECONNABORTED'}) - const res = await request(app).post(endpoint).send({'url': 'test_url.com'}); - expect(res.statusCode).toEqual(408); - expect(res.body).toEqual({result: PingStatus.TIMEOUT}); - }); +describe("Ping Common Service - Node Exporter POST Route", () => { + const endpoint = "/server/common/node-exporter"; + + it("Should return error and status code if missing url in body", async () => { + let expectedReturn = new MissingKeysInBody("url"); + const res = await request(app).post(endpoint).send({}); + expect(res.statusCode).toEqual(532); + expect(res.body).toEqual({ error: expectedReturn.message }); + }); + + it("Should return correct result and a successful status code if node exporter does not return an error", async () => { + const resp: AxiosResponse = emptyAxiosResponse("node_cpu_seconds_total"); + axiosMock.get.mockResolvedValue(resp); + const res = await request(app).post(endpoint).send({ url: "test_url.com" }); + expect(res.statusCode).toEqual(200); + expect(res.body).toEqual({ result: PingStatus.SUCCESS }); + }); + + it("Should return ping error and status code 400 if node exporter returns an error", async () => { + axiosMock.get.mockResolvedValue(emptyAxiosResponse()); + const res = await request(app).post(endpoint).send({ url: "test_url.com" }); + expect(res.statusCode).toEqual(400); + expect(res.body).toEqual({ result: PingStatus.ERROR }); + }); + + it("Should return ping timeout and status code 408 if node exporter returns a time out error", async () => { + axiosMock.get.mockRejectedValue({ code: "ECONNABORTED" }); + const res = await request(app).post(endpoint).send({ url: "test_url.com" }); + expect(res.statusCode).toEqual(408); + expect(res.body).toEqual({ result: PingStatus.TIMEOUT }); + }); }); -describe('Ping Common Service - Prometheus POST Route', () => { - const endpoint = '/server/common/prometheus'; - - it.each([ - [532, 'url not specified', {'baseChain': 'test'}, - {error: 'Error: Missing key(s) url in body.'}], - [532, 'baseChain not specified', {'url': 'test'}, - {error: 'Error: Missing key(s) baseChain in body.'}], - [532, 'url and baseChain not specified', {}, - {error: 'Error: Missing key(s) url, baseChain in body.'}], - ])('Should return error and %s status code if %s', - async (statusCode: number, _: string, body: any, - endpointRet: any) => { - const res = await request(app).post(endpoint).send(body); - expect(res.statusCode).toEqual(statusCode); - expect(res.body).toEqual(endpointRet); - }); - - it.each([ - ['cosmos'], - ['chainlink'], - ['test chain'], - ])('Should return correct result and a successful status code if prometheus does not return an error for % baseChain', - async (baseChain: string) => { - const resp: AxiosResponse = emptyAxiosResponse(getPrometheusMetricFromBaseChain(baseChain)); - axiosMock.get.mockResolvedValue(resp); - const res = await request(app).post(endpoint).send({ - 'url': 'test_url.com', - 'baseChain': baseChain - }); - expect(res.statusCode).toEqual(200); - expect(res.body).toEqual({result: PingStatus.SUCCESS}); - }); - - it('Should return ping error and status code 400 if prometheus returns an error', - async () => { - axiosMock.get.mockResolvedValue(emptyAxiosResponse()); - const res = await request(app).post(endpoint).send({'url': 'test_url.com', 'baseChain': 'cosmos'}); - expect(res.statusCode).toEqual(400); - expect(res.body).toEqual({result: PingStatus.ERROR}); - }); - - it('Should return ping timeout and status code 408 if prometheus returns a time out error', - async () => { - axiosMock.get.mockRejectedValue({code: 'ECONNABORTED'}) - const res = await request(app).post(endpoint).send({'url': 'test_url.com', 'baseChain': 'cosmos'}); - expect(res.statusCode).toEqual(408); - expect(res.body).toEqual({result: PingStatus.TIMEOUT}); - }); +describe("Ping Common Service - Prometheus POST Route", () => { + const endpoint = "/server/common/prometheus"; + + it.each([ + [ + 532, + "url not specified", + { baseChain: "test" }, + { error: "Error: Missing key(s) url in body." }, + ], + [ + 532, + "baseChain not specified", + { url: "test" }, + { error: "Error: Missing key(s) baseChain in body." }, + ], + [ + 532, + "url and baseChain not specified", + {}, + { error: "Error: Missing key(s) url, baseChain in body." }, + ], + ])( + "Should return error and %s status code if %s", + async (statusCode: number, _: string, body: any, endpointRet: any) => { + const res = await request(app).post(endpoint).send(body); + expect(res.statusCode).toEqual(statusCode); + expect(res.body).toEqual(endpointRet); + } + ); + + it.each([["cosmos"], ["chainlink"], ["test chain"]])( + "Should return correct result and a successful status code if prometheus does not return an error for % baseChain", + async (baseChain: string) => { + const resp: AxiosResponse = emptyAxiosResponse( + getPrometheusMetricFromBaseChain(baseChain) + ); + axiosMock.get.mockResolvedValue(resp); + const res = await request(app).post(endpoint).send({ + url: "test_url.com", + baseChain: baseChain, + }); + expect(res.statusCode).toEqual(200); + expect(res.body).toEqual({ result: PingStatus.SUCCESS }); + } + ); + + it("Should return ping error and status code 400 if prometheus returns an error", async () => { + axiosMock.get.mockResolvedValue(emptyAxiosResponse()); + const res = await request(app) + .post(endpoint) + .send({ url: "test_url.com", baseChain: "cosmos" }); + expect(res.statusCode).toEqual(400); + expect(res.body).toEqual({ result: PingStatus.ERROR }); + }); + + it("Should return ping timeout and status code 408 if prometheus returns a time out error", async () => { + axiosMock.get.mockRejectedValue({ code: "ECONNABORTED" }); + const res = await request(app) + .post(endpoint) + .send({ url: "test_url.com", baseChain: "cosmos" }); + expect(res.statusCode).toEqual(408); + expect(res.body).toEqual({ result: PingStatus.TIMEOUT }); + }); }); -describe('Ping Cosmos Service - Cosmos REST POST Route', () => { - const endpoint = '/server/cosmos/rest'; - - it('Should return error and status code if missing url in body', async () => { - let expectedReturn = new MissingKeysInBody('url'); - const res = await request(app).post(endpoint).send({}); - expect(res.statusCode).toEqual(532); - expect(res.body).toEqual({error: expectedReturn.message}); - }); - - it('Should return correct result and a successful status code if cosmos rest does not return an error', - async () => { - const resp: AxiosResponse = emptyAxiosResponse({node_info: 'test'}); - axiosMock.get.mockResolvedValue(resp); - const res = await request(app).post(endpoint).send({'url': 'test_url.com'}); - expect(res.statusCode).toEqual(200); - expect(res.body).toEqual({result: PingStatus.SUCCESS}); - }); - - it('Should return ping error and status code 400 if cosmos rest returns an error', - async () => { - axiosMock.get.mockResolvedValue(emptyAxiosResponse()); - const res = await request(app).post(endpoint).send({'url': 'test_url.com'}); - expect(res.statusCode).toEqual(400); - expect(res.body).toEqual({result: PingStatus.ERROR}); - }); - - it('Should return ping timeout and status code 408 if cosmos rest returns a time out error', - async () => { - axiosMock.get.mockRejectedValue({code: 'ECONNABORTED'}) - const res = await request(app).post(endpoint).send({'url': 'test_url.com'}); - expect(res.statusCode).toEqual(408); - expect(res.body).toEqual({result: PingStatus.TIMEOUT}); - }); +describe("Ping Cosmos Service - Cosmos REST POST Route", () => { + const endpoint = "/server/cosmos/rest"; + + it("Should return error and status code if missing url in body", async () => { + let expectedReturn = new MissingKeysInBody("url"); + const res = await request(app).post(endpoint).send({}); + expect(res.statusCode).toEqual(532); + expect(res.body).toEqual({ error: expectedReturn.message }); + }); + + it("Should return correct result and a successful status code if cosmos rest does not return an error", async () => { + const resp: AxiosResponse = emptyAxiosResponse({ node_info: "test" }); + axiosMock.get.mockResolvedValue(resp); + const res = await request(app).post(endpoint).send({ url: "test_url.com" }); + expect(res.statusCode).toEqual(200); + expect(res.body).toEqual({ result: PingStatus.SUCCESS }); + }); + + it("Should return ping error and status code 400 if cosmos rest returns an error", async () => { + axiosMock.get.mockResolvedValue(emptyAxiosResponse()); + const res = await request(app).post(endpoint).send({ url: "test_url.com" }); + expect(res.statusCode).toEqual(400); + expect(res.body).toEqual({ result: PingStatus.ERROR }); + }); + + it("Should return ping timeout and status code 408 if cosmos rest returns a time out error", async () => { + axiosMock.get.mockRejectedValue({ code: "ECONNABORTED" }); + const res = await request(app).post(endpoint).send({ url: "test_url.com" }); + expect(res.statusCode).toEqual(408); + expect(res.body).toEqual({ result: PingStatus.TIMEOUT }); + }); }); -describe('Ping Cosmos Service - Tendermint RPC POST Route', () => { - const endpoint = '/server/cosmos/tendermint-rpc'; - - it('Should return error and status code if missing url in body', async () => { - let expectedReturn = new MissingKeysInBody('url'); - const res = await request(app).post(endpoint).send({}); - expect(res.statusCode).toEqual(532); - expect(res.body).toEqual({error: expectedReturn.message}); - }); - - it('Should return correct result and a successful status code if tendermint rpc does not return an error', - async () => { - const resp: AxiosResponse = emptyAxiosResponse({jsonrpc: 'test'}); - axiosMock.get.mockResolvedValue(resp); - const res = await request(app).post(endpoint).send({'url': 'test_url.com'}); - expect(res.statusCode).toEqual(200); - expect(res.body).toEqual({result: PingStatus.SUCCESS}); - }); - - it('Should return ping error and status code 400 if tendermint rpc returns an error', - async () => { - axiosMock.get.mockResolvedValue(emptyAxiosResponse()); - const res = await request(app).post(endpoint).send({'url': 'test_url.com'}); - expect(res.statusCode).toEqual(400); - expect(res.body).toEqual({result: PingStatus.ERROR}); - }); - - it('Should return ping timeout and status code 408 if tendermint rpc returns a time out error', - async () => { - axiosMock.get.mockRejectedValue({code: 'ECONNABORTED'}) - const res = await request(app).post(endpoint).send({'url': 'test_url.com'}); - expect(res.statusCode).toEqual(408); - expect(res.body).toEqual({result: PingStatus.TIMEOUT}); - }); +describe("Ping Cosmos Service - Tendermint RPC POST Route", () => { + const endpoint = "/server/cosmos/tendermint-rpc"; + + it("Should return error and status code if missing url in body", async () => { + let expectedReturn = new MissingKeysInBody("url"); + const res = await request(app).post(endpoint).send({}); + expect(res.statusCode).toEqual(532); + expect(res.body).toEqual({ error: expectedReturn.message }); + }); + + it("Should return correct result and a successful status code if tendermint rpc does not return an error", async () => { + const resp: AxiosResponse = emptyAxiosResponse({ jsonrpc: "test" }); + axiosMock.get.mockResolvedValue(resp); + const res = await request(app).post(endpoint).send({ url: "test_url.com" }); + expect(res.statusCode).toEqual(200); + expect(res.body).toEqual({ result: PingStatus.SUCCESS }); + }); + + it("Should return ping error and status code 400 if tendermint rpc returns an error", async () => { + axiosMock.get.mockResolvedValue(emptyAxiosResponse()); + const res = await request(app).post(endpoint).send({ url: "test_url.com" }); + expect(res.statusCode).toEqual(400); + expect(res.body).toEqual({ result: PingStatus.ERROR }); + }); + + it("Should return ping timeout and status code 408 if tendermint rpc returns a time out error", async () => { + axiosMock.get.mockRejectedValue({ code: "ECONNABORTED" }); + const res = await request(app).post(endpoint).send({ url: "test_url.com" }); + expect(res.statusCode).toEqual(408); + expect(res.body).toEqual({ result: PingStatus.TIMEOUT }); + }); }); -describe('Ping Substrate Service - Websocket POST Route', () => { - beforeEach(() => { - process.env.SUBSTRATE_API_IP = '0.0.0.0'; - process.env.SUBSTRATE_API_PORT = '0000'; - }); - - const endpoint = '/server/substrate/websocket'; - - it('Should return error and status code if missing url in body', - async () => { - let expectedReturn = new MissingKeysInBody('url'); - const res = await request(app).post(endpoint).send({}); - expect(res.statusCode).toEqual(532); - expect(res.body).toEqual({error: expectedReturn.message}); - }); - - it('Should return error if Substrate env variables are not available', - async () => { - delete process.env.SUBSTRATE_API_IP; - delete process.env.SUBSTRATE_API_PORT; - const expectedReturn = new EnvVariablesNotAvailable('Substrate IP or Substrate API'); - const res = await request(app).post(endpoint).send({'url': 'test_url.com'}); - expect(res.statusCode).toEqual(expectedReturn.code); - expect(res.body).toEqual({error: expectedReturn.message}); - }); - - it('Should return correct result and a successful status code if request does not return an error', - async () => { - axiosMock.get.mockResolvedValue(emptyAxiosResponse()); - const res = await request(app).post(endpoint).send({'url': 'test_url.com'}); - expect(res.statusCode).toEqual(200); - expect(res.body).toEqual({result: PingStatus.SUCCESS}); - }); - - it('Should return ping error and status code 400 if request returns an error', - async () => { - axiosMock.get.mockRejectedValue({code: 'RANDOMERROR'}) - const res = await request(app).post(endpoint).send({'url': 'test_url.com'}); - expect(res.statusCode).toEqual(400); - expect(res.body).toEqual({result: PingStatus.ERROR}); - }); - - it('Should return ping timeout and status code 408 if request times out', - async () => { - axiosMock.get.mockRejectedValue({code: 'ECONNABORTED'}) - const res = await request(app).post(endpoint).send({'url': 'test_url.com'}); - expect(res.statusCode).toEqual(408); - expect(res.body).toEqual({result: PingStatus.TIMEOUT}); - }); +describe("Ping Substrate Service - Websocket POST Route", () => { + beforeEach(() => { + process.env.SUBSTRATE_API_IP = "0.0.0.0"; + process.env.SUBSTRATE_API_PORT = "0000"; + }); + + const endpoint = "/server/substrate/websocket"; + + it("Should return error and status code if missing url in body", async () => { + let expectedReturn = new MissingKeysInBody("url"); + const res = await request(app).post(endpoint).send({}); + expect(res.statusCode).toEqual(532); + expect(res.body).toEqual({ error: expectedReturn.message }); + }); + + it("Should return error if Substrate env variables are not available", async () => { + delete process.env.SUBSTRATE_API_IP; + delete process.env.SUBSTRATE_API_PORT; + const expectedReturn = new EnvVariablesNotAvailable( + "Substrate IP or Substrate API" + ); + const res = await request(app).post(endpoint).send({ url: "test_url.com" }); + expect(res.statusCode).toEqual(expectedReturn.code); + expect(res.body).toEqual({ error: expectedReturn.message }); + }); + + it("Should return correct result and a successful status code if request does not return an error", async () => { + axiosMock.get.mockResolvedValue(emptyAxiosResponse()); + const res = await request(app).post(endpoint).send({ url: "test_url.com" }); + expect(res.statusCode).toEqual(200); + expect(res.body).toEqual({ result: PingStatus.SUCCESS }); + }); + + it("Should return ping error and status code 400 if request returns an error", async () => { + axiosMock.get.mockRejectedValue({ code: "RANDOMERROR" }); + const res = await request(app).post(endpoint).send({ url: "test_url.com" }); + expect(res.statusCode).toEqual(400); + expect(res.body).toEqual({ result: PingStatus.ERROR }); + }); + + it("Should return ping timeout and status code 408 if request times out", async () => { + axiosMock.get.mockRejectedValue({ code: "ECONNABORTED" }); + const res = await request(app).post(endpoint).send({ url: "test_url.com" }); + expect(res.statusCode).toEqual(408); + expect(res.body).toEqual({ result: PingStatus.TIMEOUT }); + }); }); -describe('Ping Ethereum Service - RPC POST Route', () => { - const endpoint = '/server/ethereum/rpc'; - - it('Should return error and status code if missing url in body', async () => { - let expectedReturn = new MissingKeysInBody('url'); - const res = await request(app).post(endpoint).send({}); - expect(res.statusCode).toEqual(532); - expect(res.body).toEqual({error: expectedReturn.message}); - }); - - it('Should return correct result and a successful status code if web3 call does not return an error', - async () => { - web3EthGetBlockNumberPromiseResolve = 123; - const res = await request(app).post(endpoint).send({'url': 'test_url.com'}); - expect(res.statusCode).toEqual(200); - expect(res.body).toEqual({result: PingStatus.SUCCESS}); - }); - - it('Should return ping error and status code 400 if web3 call returns an error', - async () => { - const res = await request(app).post(endpoint).send({'url': 'test_url.com'}); - expect(res.statusCode).toEqual(400); - expect(res.body).toEqual({result: PingStatus.ERROR}); - }); - - it('Should return ping timeout and status code 408 if web3 call times out', - async () => { - // While the web3 eth call will not return -1 in case of a timeout, this will mock the behaviour of the - // fulfillWithTimeLimit function, which will return the failure value (-1) if the call times out. The - // behaviour of this function is assumed to be valid and is tested in its respective unit tests. - web3EthGetBlockNumberPromiseResolve = -1; - const res = await request(app).post(endpoint).send({'url': 'test_url.com'}); - expect(res.statusCode).toEqual(408); - expect(res.body).toEqual({result: PingStatus.TIMEOUT}); - }); +describe("Ping Ethereum Service - RPC POST Route", () => { + const endpoint = "/server/ethereum/rpc"; + + it("Should return error and status code if missing url in body", async () => { + let expectedReturn = new MissingKeysInBody("url"); + const res = await request(app).post(endpoint).send({}); + expect(res.statusCode).toEqual(532); + expect(res.body).toEqual({ error: expectedReturn.message }); + }); + + it("Should return correct result and a successful status code if web3 call does not return an error", async () => { + web3EthGetBlockNumberPromiseResolve = 123; + const res = await request(app).post(endpoint).send({ url: "test_url.com" }); + expect(res.statusCode).toEqual(200); + expect(res.body).toEqual({ result: PingStatus.SUCCESS }); + }); + + it("Should return ping error and status code 400 if web3 call returns an error", async () => { + const res = await request(app).post(endpoint).send({ url: "test_url.com" }); + expect(res.statusCode).toEqual(400); + expect(res.body).toEqual({ result: PingStatus.ERROR }); + }); + + it("Should return ping timeout and status code 408 if web3 call times out", async () => { + // While the web3 eth call will not return -1 in case of a timeout, this will mock the behaviour of the + // fulfillWithTimeLimit function, which will return the failure value (-1) if the call times out. The + // behaviour of this function is assumed to be valid and is tested in its respective unit tests. + web3EthGetBlockNumberPromiseResolve = -1; + const res = await request(app).post(endpoint).send({ url: "test_url.com" }); + expect(res.statusCode).toEqual(408); + expect(res.body).toEqual({ result: PingStatus.TIMEOUT }); + }); }); -describe('Send Test Alert Channels Opsgenie POST Route', () => { - const endpoint = '/server/channels/opsgenie'; - - it('Should return correct result and a successful status code if opsgenie does not return an error', - async () => { - const res = await request(app).post(endpoint) - .send({'eu': true, 'apiKey': 'test'}); - expect(res.statusCode).toEqual(200); - expect(res.body).toEqual({ - result: PingStatus.SUCCESS - }); - }); - - it.each([ - [532, 'apiKey not specified', {'eu': true}, - {error: 'Error: Missing key(s) apiKey in body.'}], - [532, 'eu not specified', {'apiKey': 'test'}, - {error: 'Error: Missing key(s) eu in body.'}], - [532, 'apiKey and eu not specified', {}, - {error: 'Error: Missing key(s) apiKey, eu in body.'}], - ])('Should return error and %s status code if %s', - async (statusCode: number, _: string, body: any, - endpointRet: any) => { - const res = await request(app).post(endpoint).send(body); - expect(res.statusCode).toEqual(statusCode); - expect(res.body).toEqual(endpointRet); - }); +describe("Send Test Alert Channels Opsgenie POST Route", () => { + const endpoint = "/server/channels/opsgenie"; - it('Should return ping error and status code 400 if opsgenie returns an error', - async () => { - alertV2CreateMockCallbackError = Error(); - const res = await request(app).post(endpoint) - .send({'eu': true, 'apiKey': 'test'}); - expect(res.statusCode).toEqual(400); - expect(res.body) - .toEqual({ - result: PingStatus.ERROR - }); - }); + it("Should return correct result and a successful status code if opsgenie does not return an error", async () => { + const res = await request(app) + .post(endpoint) + .send({ eu: true, apiKey: "test" }); + expect(res.statusCode).toEqual(200); + expect(res.body).toEqual({ + result: PingStatus.SUCCESS, + }); + }); + + it.each([ + [ + 532, + "apiKey not specified", + { eu: true }, + { error: "Error: Missing key(s) apiKey in body." }, + ], + [ + 532, + "eu not specified", + { apiKey: "test" }, + { error: "Error: Missing key(s) eu in body." }, + ], + [ + 532, + "apiKey and eu not specified", + {}, + { error: "Error: Missing key(s) apiKey, eu in body." }, + ], + ])( + "Should return error and %s status code if %s", + async (statusCode: number, _: string, body: any, endpointRet: any) => { + const res = await request(app).post(endpoint).send(body); + expect(res.statusCode).toEqual(statusCode); + expect(res.body).toEqual(endpointRet); + } + ); + + it("Should return ping error and status code 400 if opsgenie returns an error", async () => { + alertV2CreateMockCallbackError = Error(); + const res = await request(app) + .post(endpoint) + .send({ eu: true, apiKey: "test" }); + expect(res.statusCode).toEqual(400); + expect(res.body).toEqual({ + result: PingStatus.ERROR, + }); + }); }); -describe('Send Test Alert Channels PagerDuty POST Route', () => { - const endpoint = '/server/channels/pagerduty'; - - it('Should return correct result and a successful status code if pagerduty does not return an error', - async () => { - // @ts-ignore - jest.spyOn(pagerDutyApi, 'event').mockResolvedValue('result'); - const res = await request(app).post(endpoint).send({'integrationKey': 'test'}); - expect(res.statusCode).toEqual(200); - expect(res.body).toEqual({ - result: PingStatus.SUCCESS - }); - }); - - it('Should return error and %s status code if %s', - async () => { - const res = await request(app).post(endpoint).send({}); - expect(res.statusCode).toEqual(532); - expect(res.body).toEqual({error: 'Error: Missing key(s) integrationKey in body.'}); - }); +describe("Send Test Alert Channels PagerDuty POST Route", () => { + const endpoint = "/server/channels/pagerduty"; + + it("Should return correct result and a successful status code if pagerduty does not return an error", async () => { + // @ts-ignore + jest.spyOn(pagerDutyApi, "event").mockResolvedValue("result"); + const res = await request(app) + .post(endpoint) + .send({ integrationKey: "test" }); + expect(res.statusCode).toEqual(200); + expect(res.body).toEqual({ + result: PingStatus.SUCCESS, + }); + }); - it('Should return ping error and status code 400 if pagerduty returns an error', - async () => { - jest.spyOn(pagerDutyApi, 'event').mockRejectedValue('error'); - const res = await request(app).post(endpoint).send({'integrationKey': 'test'}); - expect(res.statusCode).toEqual(400); - expect(res.body).toEqual({ - result: PingStatus.ERROR - }); - }); + it("Should return error and %s status code if %s", async () => { + const res = await request(app).post(endpoint).send({}); + expect(res.statusCode).toEqual(532); + expect(res.body).toEqual({ + error: "Error: Missing key(s) integrationKey in body.", + }); + }); + + it("Should return ping error and status code 400 if pagerduty returns an error", async () => { + jest.spyOn(pagerDutyApi, "event").mockRejectedValue("error"); + const res = await request(app) + .post(endpoint) + .send({ integrationKey: "test" }); + expect(res.statusCode).toEqual(400); + expect(res.body).toEqual({ + result: PingStatus.ERROR, + }); + }); }); -describe('Send Test Alert Channels Slack POST Route', () => { - const endpoint = '/server/channels/slack'; - - it('Should return correct result and a successful status code if slack does not raise an error', - async () => { - const res = await request(app).post(endpoint) - .send({'botToken': 'test', 'botChannelId': 'test'}); - expect(res.statusCode).toEqual(200); - expect(res.body).toEqual({ - result: PingStatus.SUCCESS - }); - }); +describe("Send Test Alert Channels Slack POST Route", () => { + const endpoint = "/server/channels/slack"; - it.each([ - [532, 'botToken not specified', {'botChannelId': 'test'}, - {error: 'Error: Missing key(s) botToken in body.'}], - [532, 'botChannelId not specified', {'botToken': 'test'}, - {error: 'Error: Missing key(s) botChannelId in body.'}], - [532, 'botToken and botChannelId not specified', {}, - {error: 'Error: Missing key(s) botToken, botChannelId in body.'}], - ])('Should return error and %s status code if %s', - async (statusCode: number, _: string, body: any, - endpointRet: any) => { - const res = await request(app).post(endpoint).send(body); - expect(res.statusCode).toEqual(statusCode); - expect(res.body).toEqual(endpointRet); - }); + it("Should return correct result and a successful status code if slack does not raise an error", async () => { + const res = await request(app) + .post(endpoint) + .send({ botToken: "test", botChannelId: "test" }); + expect(res.statusCode).toEqual(200); + expect(res.body).toEqual({ + result: PingStatus.SUCCESS, + }); + }); + + it.each([ + [ + 532, + "botToken not specified", + { botChannelId: "test" }, + { error: "Error: Missing key(s) botToken in body." }, + ], + [ + 532, + "botChannelId not specified", + { botToken: "test" }, + { error: "Error: Missing key(s) botChannelId in body." }, + ], + [ + 532, + "botToken and botChannelId not specified", + {}, + { error: "Error: Missing key(s) botToken, botChannelId in body." }, + ], + ])( + "Should return error and %s status code if %s", + async (statusCode: number, _: string, body: any, endpointRet: any) => { + const res = await request(app).post(endpoint).send(body); + expect(res.statusCode).toEqual(statusCode); + expect(res.body).toEqual(endpointRet); + } + ); - it('Should return ping error and status code 400 if slack raises an error', - async () => { - slackMock.chat.postMessage.mockImplementation(() => { - throw new Error(); - }); - const res = await request(app).post(endpoint) - .send({'botToken': 'test', 'botChannelId': 'test'}); - expect(res.statusCode).toEqual(400); - expect(res.body) - .toEqual({ - result: PingStatus.ERROR - }); - }); + it("Should return ping error and status code 400 if slack raises an error", async () => { + slackMock.chat.postMessage.mockImplementation(() => { + throw new Error(); + }); + const res = await request(app) + .post(endpoint) + .send({ botToken: "test", botChannelId: "test" }); + expect(res.statusCode).toEqual(400); + expect(res.body).toEqual({ + result: PingStatus.ERROR, + }); + }); }); -describe('Send Test Alert Channels Telegram POST Route', () => { - const endpoint = '/server/channels/telegram'; - - it('Should return correct result and a successful status code if telegram does not return an error', - async () => { - axiosMock.get.mockResolvedValue(emptyAxiosResponse()); - const res = await request(app).post(endpoint).send({'botToken': 'test', 'botChatId': 'test'}); - expect(res.statusCode).toEqual(200); - expect(res.body).toEqual({result: PingStatus.SUCCESS}); - }); - - it.each([ - [532, 'botToken not specified', {'botChatId': 'test'}, - {error: 'Error: Missing key(s) botToken in body.'}], - [532, 'botChatId not specified', {'botToken': 'test'}, - {error: 'Error: Missing key(s) botChatId in body.'}], - [532, 'botToken and botChatId not specified', {}, - {error: 'Error: Missing key(s) botToken, botChatId in body.'}], - ])('Should return error and %s status code if %s', - async (statusCode: number, _: string, body: any, - endpointRet: any) => { - const res = await request(app).post(endpoint).send(body); - expect(res.statusCode).toEqual(statusCode); - expect(res.body).toEqual(endpointRet); - }); - - it('Should return ping error and status code 408 if telegram returns a time out error', - async () => { - axiosMock.get.mockRejectedValue({code: 'ECONNABORTED'}); - const res = await request(app).post(endpoint).send({'botToken': 'test', 'botChatId': 'test'}); - expect(res.statusCode).toEqual(408); - expect(res.body).toEqual({result: PingStatus.TIMEOUT}); - }); - - it('Should return ping error and status code 400 if telegram returns an error', - async () => { - axiosMock.get.mockRejectedValue({code: 'RANDOMERROR'}); - const res = await request(app).post(endpoint).send({'botToken': 'test', 'botChatId': 'test'}); - expect(res.statusCode).toEqual(400); - expect(res.body).toEqual({result: PingStatus.ERROR}); - }); +describe("Send Test Alert Channels Telegram POST Route", () => { + const endpoint = "/server/channels/telegram"; + + it("Should return correct result and a successful status code if telegram does not return an error", async () => { + axiosMock.get.mockResolvedValue(emptyAxiosResponse()); + const res = await request(app) + .post(endpoint) + .send({ botToken: "test", botChatId: "test" }); + expect(res.statusCode).toEqual(200); + expect(res.body).toEqual({ result: PingStatus.SUCCESS }); + }); + + it.each([ + [ + 532, + "botToken not specified", + { botChatId: "test" }, + { error: "Error: Missing key(s) botToken in body." }, + ], + [ + 532, + "botChatId not specified", + { botToken: "test" }, + { error: "Error: Missing key(s) botChatId in body." }, + ], + [ + 532, + "botToken and botChatId not specified", + {}, + { error: "Error: Missing key(s) botToken, botChatId in body." }, + ], + ])( + "Should return error and %s status code if %s", + async (statusCode: number, _: string, body: any, endpointRet: any) => { + const res = await request(app).post(endpoint).send(body); + expect(res.statusCode).toEqual(statusCode); + expect(res.body).toEqual(endpointRet); + } + ); + + it("Should return ping error and status code 408 if telegram returns a time out error", async () => { + axiosMock.get.mockRejectedValue({ code: "ECONNABORTED" }); + const res = await request(app) + .post(endpoint) + .send({ botToken: "test", botChatId: "test" }); + expect(res.statusCode).toEqual(408); + expect(res.body).toEqual({ result: PingStatus.TIMEOUT }); + }); + + it("Should return ping error and status code 400 if telegram returns an error", async () => { + axiosMock.get.mockRejectedValue({ code: "RANDOMERROR" }); + const res = await request(app) + .post(endpoint) + .send({ botToken: "test", botChatId: "test" }); + expect(res.statusCode).toEqual(400); + expect(res.body).toEqual({ result: PingStatus.ERROR }); + }); }); -describe('Send Test Alert Channels PagerDuty POST Route', () => { - const endpoint = '/server/channels/pagerduty'; - - it('Should return correct result and a successful status code if pagerduty does not return an error', - async () => { - // @ts-ignore - jest.spyOn(pagerDutyApi, 'event').mockResolvedValue('result'); - const res = await request(app).post(endpoint).send({'integrationKey': 'test'}); - expect(res.statusCode).toEqual(200); - expect(res.body).toEqual({ - result: PingStatus.SUCCESS - }); - }); - - it('Should return error and %s status code if %s', - async () => { - const res = await request(app).post(endpoint).send({}); - expect(res.statusCode).toEqual(532); - expect(res.body).toEqual({error: 'Error: Missing key(s) integrationKey in body.'}); - }); +describe("Send Test Alert Channels PagerDuty POST Route", () => { + const endpoint = "/server/channels/pagerduty"; + + it("Should return correct result and a successful status code if pagerduty does not return an error", async () => { + // @ts-ignore + jest.spyOn(pagerDutyApi, "event").mockResolvedValue("result"); + const res = await request(app) + .post(endpoint) + .send({ integrationKey: "test" }); + expect(res.statusCode).toEqual(200); + expect(res.body).toEqual({ + result: PingStatus.SUCCESS, + }); + }); - it('Should return ping error and status code 400 if pagerduty returns an error', - async () => { - jest.spyOn(pagerDutyApi, 'event').mockRejectedValue('error'); - const res = await request(app).post(endpoint).send({'integrationKey': 'test'}); - expect(res.statusCode).toEqual(400); - expect(res.body).toEqual({ - result: PingStatus.ERROR - }); - }); + it("Should return error and %s status code if %s", async () => { + const res = await request(app).post(endpoint).send({}); + expect(res.statusCode).toEqual(532); + expect(res.body).toEqual({ + error: "Error: Missing key(s) integrationKey in body.", + }); + }); + + it("Should return ping error and status code 400 if pagerduty returns an error", async () => { + jest.spyOn(pagerDutyApi, "event").mockRejectedValue("error"); + const res = await request(app) + .post(endpoint) + .send({ integrationKey: "test" }); + expect(res.statusCode).toEqual(400); + expect(res.body).toEqual({ + result: PingStatus.ERROR, + }); + }); }); -describe('Send Test Alert Channels Twilio POST Route', () => { - const endpoint = '/server/channels/twilio'; - - it('Should return correct result and a successful status code if twilio does not return an error', - async () => { - twilioClientCallsCreateResolve = {test: 'test'} - const res = await request(app).post(endpoint).send({ - accountSid: 'test', authToken: 'test', - twilioPhoneNumber: 'test', phoneNumberToDial: 'test' - }); - expect(res.statusCode).toEqual(200); - expect(res.body).toEqual({result: PingStatus.SUCCESS}); - }); - - it.each([ - [532, 'accountSid not specified', - {authToken: 'test', twilioPhoneNumber: 'test', phoneNumberToDial: 'test'}, - {error: 'Error: Missing key(s) accountSid in body.'}], - [532, 'authToken not specified', - {accountSid: 'test', twilioPhoneNumber: 'test', phoneNumberToDial: 'test'}, - {error: 'Error: Missing key(s) authToken in body.'}], - [532, 'twilioPhoneNumber not specified', - {accountSid: 'test', authToken: 'test', phoneNumberToDial: 'test'}, - {error: 'Error: Missing key(s) twilioPhoneNumber in body.'}], - [532, 'phoneNumberToDial not specified', - {accountSid: 'test', authToken: 'test', twilioPhoneNumber: 'test'}, - {error: 'Error: Missing key(s) phoneNumberToDial in body.'}], - [532, 'accountSid and authToken not specified', - {twilioPhoneNumber: 'test', phoneNumberToDial: 'test'}, - {error: 'Error: Missing key(s) accountSid, authToken in body.'}], - [532, 'twilioPhoneNumber and phoneNumberToDial not specified', - {accountSid: 'test', authToken: 'test'}, - {error: 'Error: Missing key(s) twilioPhoneNumber, phoneNumberToDial in body.'}], - [532, 'accountSid, authToken, twilioPhoneNumber, and phoneNumberToDial not specified', {}, - {error: 'Error: Missing key(s) accountSid, authToken, twilioPhoneNumber, phoneNumberToDial in body.'}], - ])('Should return error and %s status code if %s', - async (statusCode: number, _: string, body: any, - endpointRet: any) => { - const res = await request(app).post(endpoint).send(body); - expect(res.statusCode).toEqual(statusCode); - expect(res.body).toEqual(endpointRet); - }); +describe("Send Test Alert Channels Twilio POST Route", () => { + const endpoint = "/server/channels/twilio"; - it('Should return ping error and status code 400 if twilio returns an error', - async () => { - const res = await request(app).post(endpoint).send({ - accountSid: 'test', authToken: 'test', - twilioPhoneNumber: 'test', phoneNumberToDial: 'test' - }); - expect(res.statusCode).toEqual(400); - expect(res.body).toEqual({result: PingStatus.ERROR}); - }); + it("Should return correct result and a successful status code if twilio does not return an error", async () => { + twilioClientCallsCreateResolve = { test: "test" }; + const res = await request(app).post(endpoint).send({ + accountSid: "test", + authToken: "test", + twilioPhoneNumber: "test", + phoneNumberToDial: "test", + }); + expect(res.statusCode).toEqual(200); + expect(res.body).toEqual({ result: PingStatus.SUCCESS }); + }); + + it.each([ + [ + 532, + "accountSid not specified", + { + authToken: "test", + twilioPhoneNumber: "test", + phoneNumberToDial: "test", + }, + { error: "Error: Missing key(s) accountSid in body." }, + ], + [ + 532, + "authToken not specified", + { + accountSid: "test", + twilioPhoneNumber: "test", + phoneNumberToDial: "test", + }, + { error: "Error: Missing key(s) authToken in body." }, + ], + [ + 532, + "twilioPhoneNumber not specified", + { accountSid: "test", authToken: "test", phoneNumberToDial: "test" }, + { error: "Error: Missing key(s) twilioPhoneNumber in body." }, + ], + [ + 532, + "phoneNumberToDial not specified", + { accountSid: "test", authToken: "test", twilioPhoneNumber: "test" }, + { error: "Error: Missing key(s) phoneNumberToDial in body." }, + ], + [ + 532, + "accountSid and authToken not specified", + { twilioPhoneNumber: "test", phoneNumberToDial: "test" }, + { error: "Error: Missing key(s) accountSid, authToken in body." }, + ], + [ + 532, + "twilioPhoneNumber and phoneNumberToDial not specified", + { accountSid: "test", authToken: "test" }, + { + error: + "Error: Missing key(s) twilioPhoneNumber, phoneNumberToDial in body.", + }, + ], + [ + 532, + "accountSid, authToken, twilioPhoneNumber, and phoneNumberToDial not specified", + {}, + { + error: + "Error: Missing key(s) accountSid, authToken, twilioPhoneNumber, phoneNumberToDial in body.", + }, + ], + ])( + "Should return error and %s status code if %s", + async (statusCode: number, _: string, body: any, endpointRet: any) => { + const res = await request(app).post(endpoint).send(body); + expect(res.statusCode).toEqual(statusCode); + expect(res.body).toEqual(endpointRet); + } + ); + + it("Should return ping error and status code 400 if twilio returns an error", async () => { + const res = await request(app).post(endpoint).send({ + accountSid: "test", + authToken: "test", + twilioPhoneNumber: "test", + phoneNumberToDial: "test", + }); + expect(res.statusCode).toEqual(400); + expect(res.body).toEqual({ result: PingStatus.ERROR }); + }); }); -describe('Send Test Alert Channels Email POST Route', () => { - const endpoint = '/server/channels/email'; - - it('Should return correct result and a successful status code if email does not return an error', - async () => { - const res = await request(app).post(endpoint).send({ - smtp: 'test', port: 123, from: 'test', to: 'test', - username: 'test', password: 'test' - }); - expect(res.statusCode).toEqual(200); - expect(res.body).toEqual({result: PingStatus.SUCCESS}); - }); - - it.each([ - [532, 'smtp not specified', - {port: 123, from: 'test', to: 'test', username: '', password: ''}, - {error: 'Error: Missing key(s) smtp in body.'}], - [532, 'port not specified', - {smtp: 'test', from: 'test', to: 'test', username: '', password: ''}, - {error: 'Error: Missing key(s) port in body.'}], - [532, 'from not specified', - {smtp: 'test', port: 123, to: 'test', username: '', password: ''}, - {error: 'Error: Missing key(s) from in body.'}], - [532, 'to not specified', - {smtp: 'test', port: 123, from: 'test', username: '', password: ''}, - {error: 'Error: Missing key(s) to in body.'}], - [532, 'username not specified', - {smtp: 'test', port: 123, from: 'test', to: 'test', password: ''}, - {error: 'Error: Missing key(s) username in body.'}], - [532, 'password not specified', - {smtp: 'test', port: 123, from: 'test', to: 'test', username: ''}, - {error: 'Error: Missing key(s) password in body.'}], - [532, 'smtp and port not specified', - {from: 'test', to: 'test', username: '', password: ''}, - {error: 'Error: Missing key(s) smtp, port in body.'}], - [532, 'username and password not specified', - {smtp: 'test', port: 123, from: 'test', to: 'test'}, - {error: 'Error: Missing key(s) username, password in body.'}], - [532, 'smtp, port, from, to, username, and password not specified', {}, - {error: 'Error: Missing key(s) smtp, port, from, to, username, password in body.'}], - ])('Should return error and %s status code if %s', - async (statusCode: number, _: string, body: any, - endpointRet: any) => { - const res = await request(app).post(endpoint).send(body); - expect(res.statusCode).toEqual(statusCode); - expect(res.body).toEqual(endpointRet); - }); - - it('Should return ping error and status code 400 if email client returns an error', - async () => { - nodemailerVerifyCallbackError = Error(); - const res = await request(app).post(endpoint).send({ - smtp: 'test', port: 123, from: 'test', to: 'test', - username: 'test', password: 'test' - }); - expect(res.statusCode).toEqual(400); - expect(res.body).toEqual({result: PingStatus.ERROR}); - }); - - it('Should return ping error and status code 400 if sending email returns an error', - async () => { - nodemailerSendMailCallbackError = Error(); - const res = await request(app).post(endpoint).send({ - smtp: 'test', port: 123, from: 'test', to: 'test', - username: 'test', password: 'test' - }); - expect(res.statusCode).toEqual(400); - expect(res.body).toEqual({result: PingStatus.ERROR}); - }); +describe("Send Test Alert Channels Email POST Route", () => { + const endpoint = "/server/channels/email"; + + it("Should return correct result and a successful status code if email does not return an error", async () => { + const res = await request(app).post(endpoint).send({ + smtp: "test", + port: 123, + from: "test", + to: "test", + username: "test", + password: "test", + }); + expect(res.statusCode).toEqual(200); + expect(res.body).toEqual({ result: PingStatus.SUCCESS }); + }); + + it.each([ + [ + 532, + "smtp not specified", + { port: 123, from: "test", to: "test", username: "", password: "" }, + { error: "Error: Missing key(s) smtp in body." }, + ], + [ + 532, + "port not specified", + { smtp: "test", from: "test", to: "test", username: "", password: "" }, + { error: "Error: Missing key(s) port in body." }, + ], + [ + 532, + "from not specified", + { smtp: "test", port: 123, to: "test", username: "", password: "" }, + { error: "Error: Missing key(s) from in body." }, + ], + [ + 532, + "to not specified", + { smtp: "test", port: 123, from: "test", username: "", password: "" }, + { error: "Error: Missing key(s) to in body." }, + ], + [ + 532, + "username not specified", + { smtp: "test", port: 123, from: "test", to: "test", password: "" }, + { error: "Error: Missing key(s) username in body." }, + ], + [ + 532, + "password not specified", + { smtp: "test", port: 123, from: "test", to: "test", username: "" }, + { error: "Error: Missing key(s) password in body." }, + ], + [ + 532, + "smtp and port not specified", + { from: "test", to: "test", username: "", password: "" }, + { error: "Error: Missing key(s) smtp, port in body." }, + ], + [ + 532, + "username and password not specified", + { smtp: "test", port: 123, from: "test", to: "test" }, + { error: "Error: Missing key(s) username, password in body." }, + ], + [ + 532, + "smtp, port, from, to, username, and password not specified", + {}, + { + error: + "Error: Missing key(s) smtp, port, from, to, username, password in body.", + }, + ], + ])( + "Should return error and %s status code if %s", + async (statusCode: number, _: string, body: any, endpointRet: any) => { + const res = await request(app).post(endpoint).send(body); + expect(res.statusCode).toEqual(statusCode); + expect(res.body).toEqual(endpointRet); + } + ); + + it("Should return ping error and status code 400 if email client returns an error", async () => { + nodemailerVerifyCallbackError = Error(); + const res = await request(app).post(endpoint).send({ + smtp: "test", + port: 123, + from: "test", + to: "test", + username: "test", + password: "test", + }); + expect(res.statusCode).toEqual(400); + expect(res.body).toEqual({ result: PingStatus.ERROR }); + }); + + it("Should return ping error and status code 400 if sending email returns an error", async () => { + nodemailerSendMailCallbackError = Error(); + const res = await request(app).post(endpoint).send({ + smtp: "test", + port: 123, + from: "test", + to: "test", + username: "test", + password: "test", + }); + expect(res.statusCode).toEqual(400); + expect(res.body).toEqual({ result: PingStatus.ERROR }); + }); }); -describe('Server Defaults', () => { - it.each([ - ['/server/'], ['/server/bad-endpoint'] - ])('Should return status code 531 and an error if GET request %s', - async (endpoint: string) => { - let expectedReturn = new InvalidEndpoint(endpoint); - const res = await request(app).get(endpoint); - expect(res.statusCode).toEqual(expectedReturn.code); - expect(res.body).toEqual({ - 'error': expectedReturn.message - }); - }); - - it.each([ - ['/server/'], ['/server/bad-endpoint'] - ])('Should return status code 531 and an error if POST request %s', - async (endpoint: string) => { - let expectedReturn = new InvalidEndpoint(endpoint); - const res = await request(app).post(endpoint); - expect(res.statusCode).toEqual(expectedReturn.code); - expect(res.body).toEqual({ - 'error': expectedReturn.message - }); - }); +describe("Server Defaults", () => { + it.each([["/server/"], ["/server/bad-endpoint"]])( + "Should return status code 531 and an error if GET request %s", + async (endpoint: string) => { + let expectedReturn = new InvalidEndpoint(endpoint); + const res = await request(app).get(endpoint); + expect(res.statusCode).toEqual(expectedReturn.code); + expect(res.body).toEqual({ + error: expectedReturn.message, + }); + } + ); + + it.each([["/server/"], ["/server/bad-endpoint"]])( + "Should return status code 531 and an error if POST request %s", + async (endpoint: string) => { + let expectedReturn = new InvalidEndpoint(endpoint); + const res = await request(app).post(endpoint); + expect(res.statusCode).toEqual(expectedReturn.code); + expect(res.body).toEqual({ + error: expectedReturn.message, + }); + } + ); }); -describe('Server Redirects', () => { - const expectedReturnedText: string = 'Found. Redirecting to /api-docs'; - const expectedStatusCode: number = 302; - it.each([ - ['/'], ['/bad-endpoint'] - ])('Should return status code 302 and an error if GET request %s', - async (endpoint: string) => { - const res = await request(app).get(endpoint); - expect(res.statusCode).toEqual(expectedStatusCode); - expect(res.text).toEqual(expectedReturnedText); - }); - - it.each([ - ['/'], ['/bad-endpoint'] - ])('Should return status code 302 and an error if POST request %s', - async (endpoint: string) => { - const res = await request(app).post(endpoint); - expect(res.statusCode).toEqual(expectedStatusCode); - expect(res.text).toEqual(expectedReturnedText); - }); +describe("Server Redirects", () => { + const expectedReturnedText: string = "Found. Redirecting to /api-docs"; + const expectedStatusCode: number = 302; + it.each([["/"], ["/bad-endpoint"]])( + "Should return status code 302 and an error if GET request %s", + async (endpoint: string) => { + const res = await request(app).get(endpoint); + expect(res.statusCode).toEqual(expectedStatusCode); + expect(res.text).toEqual(expectedReturnedText); + } + ); + + it.each([["/"], ["/bad-endpoint"]])( + "Should return status code 302 and an error if POST request %s", + async (endpoint: string) => { + const res = await request(app).post(endpoint); + expect(res.statusCode).toEqual(expectedStatusCode); + expect(res.text).toEqual(expectedReturnedText); + } + ); }); diff --git a/docker-compose.yml b/docker-compose.yml index 18274008..9da871aa 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -243,7 +243,7 @@ services: - 'DB_IP=${DB_IP_REPLICA_1}' - 'DB_PORT=${DB_PORT}' - 'DB_NAME=${DB_NAME}' - - 'WAIT_HOSTS=${DB_IP_REPLICA_1}:${DB_PORT}, ${REDIS_IP}:${REDIS_PORT}' + - 'WAIT_HOSTS=${DB_IP_REPLICA_1}:${DB_PORT}, ${REDIS_IP}:${REDIS_PORT}, ${SUBSTRATE_API_IP}:${SUBSTRATE_API_PORT}' - 'UI_DASHBOARD_PORT=${UI_DASHBOARD_PORT}' - 'DEV_MODE=${DEV_MODE}' - 'UI_ACCESS_IP=${UI_ACCESS_IP}' @@ -265,6 +265,7 @@ services: depends_on: - redis - rs1 + - substrate-api ui: build: @@ -285,6 +286,20 @@ services: depends_on: - api + substrate-api: + environment: + - 'SUBSTRATE_API_PORT=${SUBSTRATE_API_PORT}' + build: + context: './' + dockerfile: './substrate-api/Dockerfile' + image: 'simplyvc/panic_substrate_api:1.3.0' + volumes: + - './certificates:/opt/panic/certificates' + restart: always + networks: + panic_net: + ipv4_address: '${SUBSTRATE_API_IP}' + migration: container_name: migration build: diff --git a/substrate-api/Dockerfile b/substrate-api/Dockerfile index f1fa08cd..a7c84831 100644 --- a/substrate-api/Dockerfile +++ b/substrate-api/Dockerfile @@ -1,11 +1,12 @@ -FROM node:16 +FROM node:23-alpine +RUN apk add bash # Create app directory WORKDIR /opt/panic # Change directory, and copy all substrate-api contents from the host to the # container. -WORKDIR ./substrate-api +WORKDIR /opt/panic/substrate-api COPY ./substrate-api ./ # RUN npm install @@ -17,4 +18,4 @@ RUN npm run build # Expose port EXPOSE 8080 -CMD bash run_server.sh +CMD ["node", "/opt/panic/substrate-api/src/server.js"] diff --git a/substrate-api/run_server.sh b/substrate-api/run_server.sh index 81af90eb..f8058620 100644 --- a/substrate-api/run_server.sh +++ b/substrate-api/run_server.sh @@ -1,2 +1,2 @@ #!/bin/bash -node src/server.js +node /opt/panic/substrate-api/src/server.js diff --git a/ui/Dockerfile b/ui/Dockerfile index 407347b4..eae940ef 100644 --- a/ui/Dockerfile +++ b/ui/Dockerfile @@ -1,4 +1,6 @@ -FROM node:14 +FROM node:23-alpine + +RUN apk add bash # Pass args from docker-compose since ENV variables are required in build step. ARG API_PORT @@ -13,15 +15,15 @@ ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true WORKDIR /opt/panic # Change directory, and copy all required types from the host to the container. -WORKDIR ./entities +WORKDIR /opt/panic/entities COPY ./entities ./ # Change directory, and copy all required api contents from the host to the container. -WORKDIR ../api/src/util +WORKDIR /opt/panic/api/src/util COPY ./api/src/util ./ # Change directory, and copy all ui contents from the host to the container. -WORKDIR ../../../ui +WORKDIR /opt/panic/ui COPY ./ui ./ # Clean any old build directories diff --git a/web-installer/Dockerfile b/web-installer/Dockerfile index a99aeacf..27020924 100644 --- a/web-installer/Dockerfile +++ b/web-installer/Dockerfile @@ -1,11 +1,13 @@ -FROM node:16 +FROM node:23-alpine + +RUN apk add bash # Create app directory WORKDIR /opt/panic # Change directory, and copy all installer contents from the host to the # container. -WORKDIR ./web-installer +WORKDIR /opt/panic/web-installer COPY ./web-installer ./ ENV GENERATE_SOURCEMAP=false From 77647ba29f8c3c8abfadd54742dda9b4eb833036 Mon Sep 17 00:00:00 2001 From: Eyal Alsheich Date: Wed, 21 May 2025 15:16:31 +0300 Subject: [PATCH 9/9] Update cosmos.py added tendermint namespace --- alerter/src/monitors/node/cosmos.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/alerter/src/monitors/node/cosmos.py b/alerter/src/monitors/node/cosmos.py index b775f3a2..faeeddf4 100644 --- a/alerter/src/monitors/node/cosmos.py +++ b/alerter/src/monitors/node/cosmos.py @@ -87,8 +87,8 @@ def __init__(self, monitor_name: str, node_config: CosmosNodeConfig, # because it is non-existent for nodes which are not in the validator # set. self._prometheus_metrics = { - 'consensus_latest_block_height': 'strict', - 'consensus_validator_power': 'optional', + 'tendermint_consensus_latest_block_height': 'strict', + 'tendermint_consensus_validator_power': 'optional', } # -------------------------- TENDERMINT RPC ---------------------------