diff --git a/.env b/.env
index 6f6060b1..58126986 100644
--- a/.env
+++ b/.env
@@ -8,10 +8,10 @@
# make sure that it obeys the subnet rules inside the docker-compose.yml file
# UI IP configuration
-UI_ACCESS_IP=X.X.X.X
+UI_ACCESS_IP=1.1.1.1
# Development configuration
-DEV_MODE=false
+DEV_MODE=true
# Mongo configuration
DB_NAME=panicdb
@@ -19,16 +19,16 @@ DB_NAME=panicdb
DB_PORT=27017
DB_IP_REPLICA_1=172.18.0.2
-DB_IP_REPLICA_1_TEST=172.19.0.2
+DB_IP_REPLICA_1_TEST=172.18.0.2
DB_IP_REPLICA_2=172.18.0.3
-DB_IP_REPLICA_2_TEST=172.19.0.3
+DB_IP_REPLICA_2_TEST=172.18.0.3
DB_IP_REPLICA_3=172.18.0.4
-DB_IP_REPLICA_3_TEST=172.19.0.4
+DB_IP_REPLICA_3_TEST=172.18.0.4
DB_IP_REPLICA_STARTUP=172.18.0.5
-DB_IP_REPLICA_STARTUP_TEST=172.19.0.5
+DB_IP_REPLICA_STARTUP_TEST=172.18.0.5
# Alerter configuration
ALERTER_IP=172.18.0.7
@@ -36,21 +36,21 @@ UNIQUE_ALERTER_IDENTIFIER=panic_alerter
# Redis configuration
REDIS_IP=172.18.0.8
-REDIS_IP_TEST=172.19.0.8
+REDIS_IP_TEST=172.18.0.8
REDIS_PORT=6379
REDIS_DB=10
REDIS_DB_TEST=11
# RabbitMQ configuration
RABBIT_IP=172.18.0.9
-RABBIT_IP_TEST=172.19.0.9
+RABBIT_IP_TEST=172.18.0.9
RABBIT_PORT=5672
# Health Checker configuration
HEALTH_CHECKER_IP=172.18.0.10
# Tests configuration
-TESTS_IP=172.19.0.11
+TESTS_IP=172.18.0.11
# UI configuration
UI_DASHBOARD_IP=172.18.0.12
@@ -58,7 +58,7 @@ UI_DASHBOARD_PORT=3333
# API configuration
API_IP=172.18.0.13
-API_IP_TEST=172.19.0.13
+API_IP_TEST=172.18.0.13
API_PORT=9000
API_PORT_TEST=9001
@@ -122,4 +122,4 @@ ENABLE_LOG_ALERTS=True
# Twilio Preferences
TWIML=
-TWIML_IS_URL=false
\ No newline at end of file
+TWIML_IS_URL=false
diff --git a/alerter/src/alerter/alerts/network/cosmos.py b/alerter/src/alerter/alerts/network/cosmos.py
index 984d2e4a..667471c6 100644
--- a/alerter/src/alerter/alerts/network/cosmos.py
+++ b/alerter/src/alerter/alerts/network/cosmos.py
@@ -42,8 +42,8 @@ def __init__(self, origin_name: str, proposal_id: int, title: str,
"No votes: {}\n"
"No with veto votes: {}\n"
).format(proposal_id, status, origin_name, title,
- final_tally_result['yes'], final_tally_result['abstain'],
- final_tally_result['no'], final_tally_result['no_with_veto'])
+ final_tally_result['yes_count'], final_tally_result['abstain_count'],
+ final_tally_result['no_count'], final_tally_result['no_with_veto_count'])
super().__init__(
AlertCode.ProposalConcludedAlert, alert_msg,
severity, timestamp, parent_id, origin_id,
diff --git a/alerter/src/api_wrappers/cosmos.py b/alerter/src/api_wrappers/cosmos.py
index ce30cd05..f62900c0 100644
--- a/alerter/src/api_wrappers/cosmos.py
+++ b/alerter/src/api_wrappers/cosmos.py
@@ -30,11 +30,12 @@ def __init__(self, logger: logging.Logger, verify: bool = False,
def get_syncing(self, cosmos_rest_url: str) -> Dict:
"""
This function retrieves data from the cosmos_rest_url/syncing endpoint,
- and is compatible with both v0.39.2 and v0.42.6 of the Cosmos SDK
+ and is *NOT* compatible with both v0.39.2 and v0.42.6,
+ but *ONLY* compatible with v0.50.1 of the Cosmos SDK
:param cosmos_rest_url: The Cosmos REST url of the data source
:return: Retrieves data from the cosmos_rest_url/syncing endpoint
"""
- endpoint = cosmos_rest_url + '/syncing'
+ endpoint = cosmos_rest_url + '/cosmos/base/tendermint/v1beta1/syncing'
return get_cosmos_json(endpoint=endpoint, logger=self.logger,
verify=self.verify, timeout=self.timeout)
@@ -87,6 +88,34 @@ def get_staking_validators_v0_42_6(
return get_cosmos_json(endpoint=endpoint, logger=self.logger,
params=params, verify=self.verify,
timeout=self.timeout)
+
+ def get_staking_validators_v0_50_1(
+ self, cosmos_rest_url: str, validator_address: str = None,
+ params: Dict = None) -> Dict:
+ """
+ This function retrieves data from the
+ cosmos_rest_url/cosmos/staking/v1beta1/validators and
+ cosmos_rest_url/cosmos/staking/v1beta1/validators/{validatorAddr}
+ endpoints, depending on the inputted function parameters. Note that this
+ function is only compatible with v0.50.1 of the Cosmos SDK, for other
+ versions unexpected behaviour might occur.
+ :param cosmos_rest_url: The Cosmos REST url of the data source
+ :param params: Parameters that need to be added to the endpoint
+ :param validator_address: The address of the validator you want to query
+ :return: Retrieves data from the
+ : cosmos_rest_url/cosmos/staking/v1beta1/validators or
+ : cosmos_rest_url/cosmos/staking/v1beta1/validators/{
+ : validatorAddr} endpoints
+ """
+ cosmos_fn = (
+ '/cosmos/staking/v1beta1/validators' if validator_address is None
+ else '/cosmos/staking/v1beta1/validators/{}'.format(
+ validator_address)
+ )
+ endpoint = cosmos_rest_url + cosmos_fn
+ return get_cosmos_json(endpoint=endpoint, logger=self.logger,
+ params=params, verify=self.verify,
+ timeout=self.timeout)
def get_proposals_v0_39_2(
self, cosmos_rest_url: str, proposal_id: int = None,
@@ -137,6 +166,34 @@ def get_proposals_v0_42_6(
return get_cosmos_json(endpoint=endpoint, logger=self.logger,
params=params, verify=self.verify,
timeout=self.timeout)
+
+ def get_proposals_v0_50_1(
+ self, cosmos_rest_url: str, proposal_id: int = None,
+ params: Dict = None) -> Dict:
+ """
+ This function retrieves data from the
+ cosmos_rest_url/cosmos/gov/v1beta1/proposals and
+ cosmos_rest_url/cosmos/gov/v1beta1/proposals/{proposalId}
+ endpoints, depending on the inputted function parameters. Note that this
+ function is only compatible with v0.50.1 of the Cosmos SDK, for other
+ versions unexpected behaviour might occur.
+ :param cosmos_rest_url: The Cosmos REST url of the data source
+ :param params: Parameters that need to be added to the endpoint
+ :param proposal_id: The ID of the proposal you want to query
+ :return: Retrieves data from the
+ : cosmos_rest_url/cosmos/gov/v1beta1/proposals or
+ : cosmos_rest_url/cosmos/gov/v1beta1/proposals/{
+ : proposalId} endpoints
+ """
+ cosmos_fn = (
+ '/cosmos/gov/v1/proposals' if proposal_id is None
+ else '/cosmos/gov/v1/proposals/{}'.format(
+ proposal_id)
+ )
+ endpoint = cosmos_rest_url + cosmos_fn
+ return get_cosmos_json(endpoint=endpoint, logger=self.logger,
+ params=params, verify=self.verify,
+ timeout=self.timeout)
def execute_with_checks(self, function, args: List[Any],
node_name: str, sdk_version: str) -> Any:
diff --git a/alerter/src/monitorables/networks/cosmos.py b/alerter/src/monitorables/networks/cosmos.py
index 6e200626..8d3c72f9 100644
--- a/alerter/src/monitorables/networks/cosmos.py
+++ b/alerter/src/monitorables/networks/cosmos.py
@@ -31,10 +31,10 @@ def _are_new_proposals_valid(new_proposals: List[Dict]) -> bool:
'description': str,
'status': str,
'final_tally_result': {
- 'yes': Or(float, None),
- 'abstain': Or(float, None),
- 'no': Or(float, None),
- 'no_with_veto': Or(float, None),
+ 'yes_count': Or(float, None),
+ 'abstain_count': Or(float, None),
+ 'no_count': Or(float, None),
+ 'no_with_veto_count': Or(float, None),
},
'submit_time': float,
'deposit_end_time': float,
diff --git a/alerter/src/monitors/cosmos.py b/alerter/src/monitors/cosmos.py
index f7742fd7..0d36ed8f 100644
--- a/alerter/src/monitors/cosmos.py
+++ b/alerter/src/monitors/cosmos.py
@@ -25,6 +25,7 @@
_REST_VERSION_COSMOS_SDK_0_39_2 = 'v0.39.2'
_REST_VERSION_COSMOS_SDK_0_42_6 = 'v0.42.6'
+_REST_VERSION_COSMOS_SDK_0_50_1 = 'v0.50.1'
_VERSION_INCOMPATIBILITY_EXCEPTIONS = [
IncorrectJSONRetrievedException, CosmosSDKVersionIncompatibleException,
TendermintRPCIncompatibleException
@@ -48,7 +49,7 @@ def __init__(self, monitor_name: str, data_sources: List[CosmosNodeConfig],
# This variable stores the latest REST version used to retrieve the
# data. By default, it is set to v0.42.6 of the Cosmos SDK.
- self._last_rest_retrieval_version = _REST_VERSION_COSMOS_SDK_0_42_6
+ self._last_rest_retrieval_version = _REST_VERSION_COSMOS_SDK_0_50_1
@property
def data_sources(self) -> List[CosmosNodeConfig]:
diff --git a/alerter/src/monitors/network/cosmos.py b/alerter/src/monitors/network/cosmos.py
index c8ceed40..e143d75d 100644
--- a/alerter/src/monitors/network/cosmos.py
+++ b/alerter/src/monitors/network/cosmos.py
@@ -8,7 +8,7 @@
from src.configs.nodes.cosmos import CosmosNodeConfig
from src.message_broker.rabbitmq import RabbitMQApi
from src.monitors.cosmos import (
- CosmosMonitor, _REST_VERSION_COSMOS_SDK_0_42_6,
+ _REST_VERSION_COSMOS_SDK_0_50_1, CosmosMonitor, _REST_VERSION_COSMOS_SDK_0_42_6,
_REST_VERSION_COSMOS_SDK_0_39_2, _VERSION_INCOMPATIBILITY_EXCEPTIONS)
from src.utils.constants.cosmos import (
PROPOSAL_STATUS_UNSPECIFIED, PROPOSAL_STATUS_DEPOSIT_PERIOD,
@@ -59,65 +59,69 @@ def chain_name(self) -> str:
@staticmethod
def _parse_proposal(proposal: Dict) -> Dict:
- """
- This function parses the proposal retrieved from the source node and
- returns the corresponding value to be used by the PANIC components.
- Note that this function is compatible with both v0.39.2 and v0.42.6 of
- the Cosmos SDK.
- :param proposal: The proposal retrieved from the source node
- :return: The corresponding proposal to be used by the PANIC components
- :raises: KeyError if the structure of the proposal returned by the
- endpoints is not as expected.
- """
- parsed_proposal = {
- 'proposal_id': (
- proposal['proposal_id']
- if 'proposal_id' in proposal
- else proposal['id']
- ),
- 'title': (
- proposal['content']['value']['title']
- if 'value' in proposal['content']
- else proposal['content']['title']
- ),
- 'description': (
- proposal['content']['value']['description']
- if 'value' in proposal['content']
- else proposal['content']['description']
+ """
+ This function parses the proposal retrieved from the source node and
+ returns the corresponding value to be used by the PANIC components.
+ Note that this function is compatible with both v0.39.2, v0.42.6 and v0.50.1 of
+ the Cosmos SDK.
+ :param proposal: The proposal retrieved from the source node
+ :return: The corresponding proposal to be used by the PANIC components
+ :raises: KeyError if the structure of the proposal returned by the
+ endpoints is not as expected.
+ """
+ parsed_proposal = {
+ 'proposal_id': (
+ proposal['proposal_id']
+ if 'proposal_id' in proposal
+ else proposal['id']
+ ),
+ 'title': (
+ proposal['content']['value']['title']
+ if 'content' in proposal and 'value' in proposal['content']
+ else proposal['content']['title']
+ if 'content' in proposal
+ else proposal['title']
+ ),
+ 'description': (
+ proposal['content']['value']['description']
+ if 'content' in proposal and 'value' in proposal['content']
+ else proposal['content']['description']
+ if 'content' in proposal
+ else proposal['summary']
+ )
+ }
+
+ status = (
+ proposal['status']
+ if 'status' in proposal
+ else proposal['proposal_status']
)
- }
- status = (
- proposal['status']
- if 'status' in proposal
- else proposal['proposal_status']
- )
-
- if type(status) == str:
- status = status.lower()
- if status in [0, "proposal_status_unspecified", "unspecified"]:
- parsed_proposal['status'] = PROPOSAL_STATUS_UNSPECIFIED
- elif status in [1, "proposal_status_deposit_period", "deposit_period"]:
- parsed_proposal['status'] = PROPOSAL_STATUS_DEPOSIT_PERIOD
- elif status in [2, "proposal_status_voting_period", "voting_period"]:
- parsed_proposal['status'] = PROPOSAL_STATUS_VOTING_PERIOD
- elif status in [3, "proposal_status_passed", "passed"]:
- parsed_proposal['status'] = PROPOSAL_STATUS_PASSED
- elif status in [4, "proposal_status_rejected", "rejected"]:
- parsed_proposal['status'] = PROPOSAL_STATUS_REJECTED
- elif status in [5, "proposal_status_failed", "failed"]:
- parsed_proposal['status'] = PROPOSAL_STATUS_FAILED
- else:
- parsed_proposal['status'] = PROPOSAL_STATUS_INVALID
-
- parsed_proposal['final_tally_result'] = proposal['final_tally_result']
- parsed_proposal['submit_time'] = proposal['submit_time']
- parsed_proposal['deposit_end_time'] = proposal['deposit_end_time']
- parsed_proposal['total_deposit'] = proposal['total_deposit']
- parsed_proposal['voting_start_time'] = proposal['voting_start_time']
- parsed_proposal['voting_end_time'] = proposal['voting_end_time']
-
- return parsed_proposal
+ if type(status) == str:
+ status = status.lower()
+ if status in [0, "proposal_status_unspecified", "unspecified"]:
+ parsed_proposal['status'] = PROPOSAL_STATUS_UNSPECIFIED
+ elif status in [1, "proposal_status_deposit_period", "deposit_period"]:
+ parsed_proposal['status'] = PROPOSAL_STATUS_DEPOSIT_PERIOD
+ elif status in [2, "proposal_status_voting_period", "voting_period"]:
+ parsed_proposal['status'] = PROPOSAL_STATUS_VOTING_PERIOD
+ elif status in [3, "proposal_status_passed", "passed"]:
+ parsed_proposal['status'] = PROPOSAL_STATUS_PASSED
+ elif status in [4, "proposal_status_rejected", "rejected"]:
+ parsed_proposal['status'] = PROPOSAL_STATUS_REJECTED
+ elif status in [5, "proposal_status_failed", "failed"]:
+ parsed_proposal['status'] = PROPOSAL_STATUS_FAILED
+ else:
+ parsed_proposal['status'] = PROPOSAL_STATUS_INVALID
+
+ parsed_proposal['final_tally_result'] = proposal['final_tally_result']
+ parsed_proposal['submit_time'] = proposal['submit_time']
+ parsed_proposal['deposit_end_time'] = proposal['deposit_end_time']
+ parsed_proposal['total_deposit'] = proposal['total_deposit']
+ parsed_proposal['voting_start_time'] = proposal['voting_start_time']
+ parsed_proposal['voting_end_time'] = proposal['voting_end_time']
+
+ return parsed_proposal
def _get_cosmos_rest_v0_39_2_indirect_data(
self, source: CosmosNodeConfig) -> Dict:
@@ -197,6 +201,47 @@ def retrieval_process() -> Dict:
return self._execute_cosmos_rest_retrieval_with_exceptions(
retrieval_process, source_name, source_url,
_REST_VERSION_COSMOS_SDK_0_42_6)
+
+ def _get_cosmos_rest_v0_50_1_indirect_data(
+ self, source: CosmosNodeConfig) -> Dict:
+ """
+ This function retrieves network specific metrics. To retrieve this
+ data we use version v0.50.1 of the Cosmos SDK for the REST server.
+ :param source: The chosen data source
+ :return: A dict containing all indirect metrics
+ :raises: CosmosSDKVersionIncompatibleException if the Cosmos SDK version
+ of the source is not compatible with v0.50.1
+ : CosmosRestServerApiCallException if an error occurs during an
+ API call
+ : DataReadingException if data cannot be read from the source
+ : CannotConnectWithDataSourceException if we cannot connect with
+ the data source
+ : InvalidUrlException if the URL of the data source does not have
+ a valid schema
+ : IncorrectJSONRetrievedException if the structure of the data
+ returned by the endpoints is not as expected. This could be
+ both due to a Tendermint or Cosmos SDK update
+ """
+ source_url = source.cosmos_rest_url
+ source_name = source.node_name
+
+ def retrieval_process() -> Dict:
+ paginated_data = self._get_rest_data_with_pagination_keys(
+ self.cosmos_rest_server_api.get_proposals_v0_50_1,
+ [source_url, None], {}, source_name,
+ _REST_VERSION_COSMOS_SDK_0_50_1)
+
+ parsed_proposals = {'proposals': []}
+ for page in paginated_data:
+ for proposal in page['proposals']:
+ parsed_proposals['proposals'].append(
+ self._parse_proposal(proposal))
+
+ return parsed_proposals
+
+ return self._execute_cosmos_rest_retrieval_with_exceptions(
+ retrieval_process, source_name, source_url,
+ _REST_VERSION_COSMOS_SDK_0_50_1)
def _get_cosmos_rest_indirect_data(self, source: CosmosNodeConfig,
sdk_version: str) -> Dict:
@@ -216,6 +261,9 @@ def _get_cosmos_rest_indirect_data(self, source: CosmosNodeConfig,
return self._get_cosmos_rest_v0_39_2_indirect_data(source)
elif sdk_version == _REST_VERSION_COSMOS_SDK_0_42_6:
return self._get_cosmos_rest_v0_42_6_indirect_data(source)
+ elif sdk_version == _REST_VERSION_COSMOS_SDK_0_50_1:
+ return self._get_cosmos_rest_v0_50_1_indirect_data(source)
+
return {
'proposals': None
@@ -269,6 +317,16 @@ def _get_cosmos_rest_v0_42_6_data(self) -> (
"""
return self._get_cosmos_rest_version_data(
_REST_VERSION_COSMOS_SDK_0_42_6)
+
+ def _get_cosmos_rest_v0_50_1_data(self) -> (
+ Dict, bool, Optional[Exception]):
+ """
+ This function calls self._get_cosmos_rest_version_data with
+ _REST_VERSION_COSMOS_SDK_0_50_1
+ :return: The return of self._get_cosmos_rest_version_data
+ """
+ return self._get_cosmos_rest_version_data(
+ _REST_VERSION_COSMOS_SDK_0_50_1)
def _get_cosmos_rest_data(self) -> (Dict, bool, Optional[Exception]):
"""
@@ -283,6 +341,7 @@ def _get_cosmos_rest_data(self) -> (Dict, bool, Optional[Exception]):
supported_retrievals = {
_REST_VERSION_COSMOS_SDK_0_39_2: self._get_cosmos_rest_v0_39_2_data,
_REST_VERSION_COSMOS_SDK_0_42_6: self._get_cosmos_rest_v0_42_6_data,
+ _REST_VERSION_COSMOS_SDK_0_50_1: self._get_cosmos_rest_v0_50_1_data
}
# First check whether REST data can be obtained using the last REST
diff --git a/alerter/src/monitors/node/cosmos.py b/alerter/src/monitors/node/cosmos.py
index b74aa49f..faeeddf4 100644
--- a/alerter/src/monitors/node/cosmos.py
+++ b/alerter/src/monitors/node/cosmos.py
@@ -14,7 +14,7 @@
from src.configs.nodes.cosmos import CosmosNodeConfig
from src.message_broker.rabbitmq import RabbitMQApi
from src.monitors.cosmos import (
- CosmosMonitor, _REST_VERSION_COSMOS_SDK_0_42_6,
+ _REST_VERSION_COSMOS_SDK_0_50_1, CosmosMonitor, _REST_VERSION_COSMOS_SDK_0_42_6,
_REST_VERSION_COSMOS_SDK_0_39_2, _VERSION_INCOMPATIBILITY_EXCEPTIONS)
from src.utils.constants.cosmos import (
BOND_STATUS_BONDED, BOND_STATUS_UNBONDED, BOND_STATUS_UNBONDING,
@@ -83,7 +83,7 @@ def __init__(self, monitor_name: str, node_config: CosmosNodeConfig,
]
# --------------------------- PROMETHEUS -------------------------------
- # tendermint_consensus_validator_power needs to be set as optional
+ # consensus_validator_power needs to be set as optional
# because it is non-existent for nodes which are not in the validator
# set.
self._prometheus_metrics = {
@@ -249,6 +249,58 @@ def retrieval_process() -> Dict:
return self._execute_cosmos_rest_retrieval_with_exceptions(
retrieval_process, source_name, source_url,
_REST_VERSION_COSMOS_SDK_0_42_6)
+
+ def _get_cosmos_rest_v0_50_1_indirect_data_validator(
+ self, source: CosmosNodeConfig) -> Dict:
+ """
+ This function retrieves node specific metrics using a different node as
+ data source. We do not use the node directly since the node may be
+ offline or syncing, thus the data may be corrupt. Note that as a last
+ resource the manager may supply the node itself as data source. To
+ retrieve this data we use version v0.50.1 of the Cosmos SDK for the REST
+ server. NOTE: In this function we are assuming that the node being
+ monitored is a validator.
+ :param source: The chosen data source
+ :return: A dict containing all indirect metrics
+ :raises: CosmosSDKVersionIncompatibleException if the Cosmos SDK version
+ of the source is not compatible with v0.50.1
+ : CosmosRestServerApiCallException if an error occurs during an
+ API call
+ : DataReadingException if data cannot be read from the source
+ : CannotConnectWithDataSourceException if we cannot connect with
+ the data source
+ : InvalidUrlException if the URL of the data source does not have
+ a valid schema
+ : IncorrectJSONRetrievedException if the structure of the data
+ returned by the endpoints is not as expected. This could be
+ both due to a Tendermint or Cosmos SDK update
+ """
+ operator_address = self.node_config.operator_address
+ source_url = source.cosmos_rest_url
+ source_name = source.node_name
+
+ def retrieval_process() -> Dict:
+ staking_validators = \
+ self.cosmos_rest_server_api.execute_with_checks(
+ self.cosmos_rest_server_api.get_staking_validators_v0_50_1,
+ [source_url, operator_address, {}], source_name,
+ _REST_VERSION_COSMOS_SDK_0_50_1)
+ bond_status = self._parse_validator_status(
+ staking_validators['validator']['status'])
+ return {
+ 'bond_status': bond_status,
+
+ # The 'jailed' keyword is normally exposed in
+ # cosmos/staking/v1beta1/validators for v0.50.1 of the Cosmos
+ # SDK. If we encounter nodes on this version which do not
+ # expose it we might need to use
+ # /cosmos/slashing/v1beta1/signing_infos
+ 'jailed': staking_validators['validator']['jailed'],
+ }
+
+ return self._execute_cosmos_rest_retrieval_with_exceptions(
+ retrieval_process, source_name, source_url,
+ _REST_VERSION_COSMOS_SDK_0_50_1)
def _get_cosmos_rest_indirect_data(self, source: CosmosNodeConfig,
sdk_version: str) -> Dict:
@@ -276,6 +328,9 @@ def _get_cosmos_rest_indirect_data(self, source: CosmosNodeConfig,
elif sdk_version == _REST_VERSION_COSMOS_SDK_0_42_6:
return self._get_cosmos_rest_v0_42_6_indirect_data_validator(
source)
+ elif sdk_version == _REST_VERSION_COSMOS_SDK_0_50_1:
+ return self._get_cosmos_rest_v0_50_1_indirect_data_validator(
+ source)
else:
return {
'bond_status': None,
@@ -344,6 +399,16 @@ def _get_cosmos_rest_v0_42_6_data(self) -> (
"""
return self._get_cosmos_rest_version_data(
_REST_VERSION_COSMOS_SDK_0_42_6)
+
+ def _get_cosmos_rest_v0_50_1_data(self) -> (
+ Dict, bool, Optional[Exception]):
+ """
+ This function calls self._get_cosmos_rest_version_data with
+ _REST_VERSION_COSMOS_SDK_0_50_1
+ :return: The return of self._get_cosmos_rest_version_data
+ """
+ return self._get_cosmos_rest_version_data(
+ _REST_VERSION_COSMOS_SDK_0_50_1)
def _get_cosmos_rest_data(self) -> (Dict, bool, Optional[Exception]):
"""
@@ -358,6 +423,7 @@ def _get_cosmos_rest_data(self) -> (Dict, bool, Optional[Exception]):
supported_retrievals = {
_REST_VERSION_COSMOS_SDK_0_39_2: self._get_cosmos_rest_v0_39_2_data,
_REST_VERSION_COSMOS_SDK_0_42_6: self._get_cosmos_rest_v0_42_6_data,
+ _REST_VERSION_COSMOS_SDK_0_50_1: self._get_cosmos_rest_v0_50_1_data,
}
# First check whether REST data can be obtained using the last REST
@@ -533,6 +599,12 @@ def _validator_was_slashed(self, begin_block_events: List[Dict]) -> (
# cannot be active, hence not slashed
return False, None
+ def try_base64_decode(s):
+ try:
+ return base64.b64decode(s).decode('utf-8')
+ except (base64.binascii.Error, UnicodeDecodeError):
+ return s
+
slashed = False
slashed_amount = None
for event in begin_block_events:
@@ -546,10 +618,9 @@ def _validator_was_slashed(self, begin_block_events: List[Dict]) -> (
attributes = event['attributes']
for attribute in attributes:
if 'key' in attribute and 'value' in attribute:
- decoded_key = base64.b64decode(attribute['key']).decode(
- 'utf-8')
- decoded_value = base64.b64decode(
- attribute['value']).decode('utf-8')
+ decoded_key = try_base64_decode(attribute['key'])
+ decoded_value = try_base64_decode(attribute['value'])
+
if str.lower(decoded_key) == "address":
event_address = bech32_to_address(decoded_value)
elif str.lower(decoded_key) == "burned_coins":
@@ -617,7 +688,10 @@ def retrieval_process() -> Dict:
[source_url, {'height': height_to_monitor}],
source_name)
slashed, slashed_amount = self._validator_was_slashed(
- block_results_at_height['result']['begin_block_events'])
+ block_results_at_height['result']['begin_block_events']
+ if 'begin_block_events' in block_results_at_height['result']
+ else block_results_at_height['result']['finalize_block_events']
+ )
if validator_was_active:
previous_block_signatures = block_at_height['result'][
@@ -938,8 +1012,8 @@ def _process_retrieved_prometheus_data(self, data: Dict) -> Dict:
# were set to be optional, so first we need to check if the value is
# None.
one_value_subset_metrics = [
- 'tendermint_consensus_latest_block_height',
- 'tendermint_consensus_validator_power',
+ 'consensus_latest_block_height',
+ 'consensus_validator_power',
]
for metric in one_value_subset_metrics:
value = None
@@ -950,17 +1024,17 @@ def _process_retrieved_prometheus_data(self, data: Dict) -> Dict:
self.logger.debug("%s %s: %s", self.node_config, metric, value)
processed_data['result']['data'][metric] = value
- # If the tendermint_consensus_validator_power is None it means that the
+ # If the consensus_validator_power is None it means that the
# metric could not be obtained, hence the node is not in the validator
# set. This means that we can set the metric to 0 as the node has no
# voting power.
voting_power = processed_data['result']['data'][
- 'tendermint_consensus_validator_power']
+ 'consensus_validator_power']
if voting_power is None:
self.logger.debug("%s %s converted to %s", self.node_config,
- 'tendermint_consensus_validator_power', 0)
+ 'consensus_validator_power', 0)
processed_data['result']['data'][
- 'tendermint_consensus_validator_power'] = 0
+ 'consensus_validator_power'] = 0
return processed_data
diff --git a/alerter/src/utils/constants/data.py b/alerter/src/utils/constants/data.py
index 5ee106ca..1ffbda16 100644
--- a/alerter/src/utils/constants/data.py
+++ b/alerter/src/utils/constants/data.py
@@ -28,8 +28,8 @@
'run_status_update_total_errors': 'total_errored_job_runs',
}
RAW_TO_TRANSFORMED_COSMOS_NODE_PROM_METRICS = {
- 'tendermint_consensus_latest_block_height': 'current_height',
- 'tendermint_consensus_validator_power': 'voting_power',
+ 'consensus_latest_block_height': 'current_height',
+ 'consensus_validator_power': 'voting_power',
}
INT_CHAINLINK_METRICS = ['current_height',
'total_block_headers_received',
diff --git a/alerter/test/data_transformers/node/test_cosmos.py b/alerter/test/data_transformers/node/test_cosmos.py
index d56f6a2e..315a1a0b 100644
--- a/alerter/test/data_transformers/node/test_cosmos.py
+++ b/alerter/test/data_transformers/node/test_cosmos.py
@@ -114,8 +114,8 @@ def setUp(self) -> None:
'operator_address': self.test_operator_address,
},
'data': {
- 'tendermint_consensus_latest_block_height': 10000.0,
- 'tendermint_consensus_validator_power': 345456.0,
+ 'consensus_latest_block_height': 10000.0,
+ 'consensus_validator_power': 345456.0,
},
}
},
@@ -186,7 +186,7 @@ def setUp(self) -> None:
self.raw_data_example_result_options_None = copy.deepcopy(
self.raw_data_example_result_all)
self.raw_data_example_result_options_None['prometheus'][
- 'result']['data']['tendermint_consensus_validator_power'] = None
+ 'result']['data']['consensus_validator_power'] = None
self.raw_data_example_result_options_None['tendermint_rpc']['result'][
'data']['historical'] = None
self.raw_data_example_result_options_None['tendermint_rpc']['result'][
diff --git a/alerter/test/monitors/node/test_cosmos.py b/alerter/test/monitors/node/test_cosmos.py
index 3afdd97a..10cad67c 100644
--- a/alerter/test/monitors/node/test_cosmos.py
+++ b/alerter/test/monitors/node/test_cosmos.py
@@ -60,6 +60,7 @@ def setUp(self) -> None:
self.test_exception_3 = PANICException('test_exception_3', 3)
self.sdk_version_0_39_2 = 'v0.39.2'
self.sdk_version_0_42_6 = 'v0.42.6'
+ self.sdk_version_0_50_1 = 'v0.50.1'
self.test_consensus_address = 'test_consensus_address'
self.test_is_syncing = False
self.test_is_peered_with_sentinel = True
@@ -68,23 +69,23 @@ def setUp(self) -> None:
# --------------- Data retrieval variables and examples ---------------
# Prometheus
self.prometheus_metrics = {
- 'tendermint_consensus_latest_block_height': 'strict',
- 'tendermint_consensus_validator_power': 'optional',
+ 'consensus_latest_block_height': 'strict',
+ 'consensus_validator_power': 'optional',
}
self.retrieved_prometheus_data_example_1 = {
- 'tendermint_consensus_latest_block_height': {
+ 'consensus_latest_block_height': {
'{"chain_id": "cosmoshub-4"}': 8137538.0
},
- 'tendermint_consensus_validator_power': {
+ 'consensus_validator_power': {
'{"chain_id": "cosmoshub-4", "validator_address": '
'"7B3D01F754DFF8474ED0E358812FD437E09389DC"}': 725315.0
}
}
self.retrieved_prometheus_data_example_2 = {
- 'tendermint_consensus_latest_block_height': {
+ 'consensus_latest_block_height': {
'{"chain_id": "cosmoshub-4"}': 538.0
},
- 'tendermint_consensus_validator_power': {
+ 'consensus_validator_power': {
'{"chain_id": "cosmoshub-4", "validator_address": '
'"7B3D01F754DFF8474ED0E358812FD437E09389DC"}': None
}
@@ -153,12 +154,12 @@ def setUp(self) -> None:
# Processed retrieved data example
self.processed_prometheus_data_example_1 = {
- 'tendermint_consensus_latest_block_height': 8137538.0,
- 'tendermint_consensus_validator_power': 725315.0,
+ 'consensus_latest_block_height': 8137538.0,
+ 'consensus_validator_power': 725315.0,
}
self.processed_prometheus_data_example_2 = {
- 'tendermint_consensus_latest_block_height': 538.0,
- 'tendermint_consensus_validator_power': 0,
+ 'consensus_latest_block_height': 538.0,
+ 'consensus_validator_power': 0,
}
# Test monitor instance
@@ -401,6 +402,21 @@ def test_get_cosmos_rest_v0_42_6_indirect_data_validator_return(
self.test_monitor._get_cosmos_rest_v0_42_6_indirect_data_validator(
self.data_sources[0])
self.assertEqual(expected_return, actual_return)
+
+ @mock.patch.object(CosmosRestServerApiWrapper,
+ 'get_staking_validators_v0_50_1')
+ def test_get_cosmos_rest_v0_50_1_indirect_data_validator_return(
+ self, staking_validators_return, expected_return,
+ mock_staking_validators) -> None:
+ """
+ We will check that the return is as expected for all cases
+ """
+ mock_staking_validators.return_value = staking_validators_return
+ actual_return = \
+ self.test_monitor._get_cosmos_rest_v0_50_1_indirect_data_validator(
+ self.data_sources[0])
+ self.assertEqual(expected_return, actual_return)
+
def test_get_cosmos_rest_indirect_data_return_if_empty_source_url(
self) -> None:
@@ -419,6 +435,11 @@ def test_get_cosmos_rest_indirect_data_return_if_empty_source_url(
actual_ret = self.test_monitor._get_cosmos_rest_indirect_data(
self.data_sources[0], self.sdk_version_0_42_6)
self.assertEqual(expected_ret, actual_ret)
+
+ # Test for v0.50.1
+ actual_ret = self.test_monitor._get_cosmos_rest_indirect_data(
+ self.data_sources[0], self.sdk_version_0_50_1)
+ self.assertEqual(expected_ret, actual_ret)
def test_get_cosmos_rest_indirect_data_return_if_non_validator_node(
self) -> None:
@@ -437,17 +458,26 @@ def test_get_cosmos_rest_indirect_data_return_if_non_validator_node(
actual_ret = self.test_monitor._get_cosmos_rest_indirect_data(
self.data_sources[0], self.sdk_version_0_42_6)
self.assertEqual(expected_ret, actual_ret)
+
+ # Test for v0.50.1
+ actual_ret = self.test_monitor._get_cosmos_rest_indirect_data(
+ self.data_sources[0], self.sdk_version_0_50_1)
+ self.assertEqual(expected_ret, actual_ret)
@mock.patch.object(CosmosNodeMonitor,
'_get_cosmos_rest_v0_39_2_indirect_data_validator')
@mock.patch.object(CosmosNodeMonitor,
'_get_cosmos_rest_v0_42_6_indirect_data_validator')
+ @mock.patch.object(CosmosNodeMonitor,
+ '_get_cosmos_rest_v0_50_1_indirect_data_validator')
def test_get_cosmos_rest_indirect_data_return_if_validator_node(
- self, mock_get_42_indirect, mock_get_39_indirect) -> None:
+ self, mock_get_42_indirect, mock_get_39_indirect, mock_get_50_indirect) -> None:
mock_get_42_indirect.return_value = \
self.retrieved_cosmos_rest_indirect_data_1
mock_get_39_indirect.return_value = \
self.retrieved_cosmos_rest_indirect_data_2
+ mock_get_50_indirect.return_value = \
+ self.retrieved_cosmos_rest_indirect_data_2
expected_invalid_version = {
'bond_status': None,
'jailed': None
@@ -486,12 +516,17 @@ def test_get_cosmos_rest_version_data_return_if_no_indirect_source_selected(
self.sdk_version_0_42_6)
actual_ret_v0_39_2 = self.test_monitor._get_cosmos_rest_version_data(
self.sdk_version_0_39_2)
+ actual_ret_v0_50_1 = self.test_monitor._get_cosmos_rest_version_data(
+ self.sdk_version_0_50_1)
self.assertEqual(({}, True, NoSyncedDataSourceWasAccessibleException(
self.monitor_name, 'indirect Cosmos REST node')),
actual_ret_v0_42_6)
self.assertEqual(({}, True, NoSyncedDataSourceWasAccessibleException(
self.monitor_name, 'indirect Cosmos REST node')),
actual_ret_v0_39_2)
+ self.assertEqual(({}, True, NoSyncedDataSourceWasAccessibleException(
+ self.monitor_name, 'indirect Cosmos REST node')),
+ actual_ret_v0_50_1)
@parameterized.expand([
(NodeIsDownException('node_name_1'),),
@@ -517,8 +552,11 @@ def test_get_cosmos_rest_version_data_ret_if_node_not_reachable_at_rest_url(
self.sdk_version_0_42_6)
actual_ret_v0_39_2 = self.test_monitor._get_cosmos_rest_version_data(
self.sdk_version_0_39_2)
+ actual_ret_v0_50_1 = self.test_monitor._get_cosmos_rest_version_data(
+ self.sdk_version_0_50_1)
self.assertEqual(({}, True, err), actual_ret_v0_42_6)
self.assertEqual(({}, True, err), actual_ret_v0_39_2)
+ self.assertEqual(({}, True, err), actual_ret_v0_50_1)
@mock.patch.object(CosmosNodeMonitor, '_get_cosmos_rest_indirect_data')
@mock.patch.object(CosmosNodeMonitor, '_cosmos_rest_reachable')
@@ -544,12 +582,17 @@ def test_get_cosmos_rest_version_data_return_if_data_obtained_successfully(
self.sdk_version_0_42_6)
actual_ret_v0_39_2 = self.test_monitor._get_cosmos_rest_version_data(
self.sdk_version_0_39_2)
+ actual_ret_v0_50_1 = self.test_monitor._get_cosmos_rest_version_data(
+ self.sdk_version_0_50_1)
self.assertEqual(
(self.retrieved_cosmos_rest_indirect_data_1, False, None),
actual_ret_v0_42_6)
self.assertEqual(
(self.retrieved_cosmos_rest_indirect_data_2, False, None),
actual_ret_v0_39_2)
+ self.assertEqual(
+ (self.retrieved_cosmos_rest_indirect_data_2, False, None),
+ actual_ret_v0_50_1)
@parameterized.expand([
(CannotConnectWithDataSourceException('test_monitor', 'node_name_1',
@@ -579,8 +622,12 @@ def test_get_cosmos_rest_version_data_ret_if_expected_err_in_data_retrieval(
self.sdk_version_0_42_6)
actual_ret_v0_39_2 = self.test_monitor._get_cosmos_rest_version_data(
self.sdk_version_0_39_2)
+ actual_ret_v0_50_1 = self.test_monitor._get_cosmos_rest_version_data(
+ self.sdk_version_0_50_1)
self.assertEqual(({}, True, err), actual_ret_v0_42_6)
self.assertEqual(({}, True, err), actual_ret_v0_39_2)
+ self.assertEqual(({}, True, err), actual_ret_v0_50_1)
+
@mock.patch.object(CosmosNodeMonitor, '_get_cosmos_rest_version_data')
def test_get_cosmos_rest_v0_39_2_data_calls_get_cosmos_rest_version_data(
@@ -664,6 +711,47 @@ def test_get_cosmos_rest_v0_42_6_data_returns_get_cosmos_rest_ver_data_ret(
actual_ret = self.test_monitor._get_cosmos_rest_v0_42_6_data()
self.assertEqual(get_rest_version_data_ret, actual_ret)
+ @mock.patch.object(CosmosNodeMonitor, '_get_cosmos_rest_version_data')
+ def test_get_cosmos_rest_v0_50_1_data_calls_get_cosmos_rest_version_data(
+ self, mock_get_rest_version) -> None:
+ """
+ In this test we will be checking that self._get_cosmos_rest_v0_50_1_data
+ calls self._get_cosmos_rest_version_data correctly.
+ """
+ mock_get_rest_version.return_value = None
+ self.test_monitor._get_cosmos_rest_v0_50_1_data()
+ mock_get_rest_version.assert_called_once_with(self.sdk_version_0_50_1)
+
+ @parameterized.expand([
+ (({}, True,
+ CannotConnectWithDataSourceException('test_monitor', 'node_name_1',
+ 'err')),),
+ (({}, True,
+ DataReadingException('test_monitor', 'cosmos_rest_url_1')),),
+ (({}, True, InvalidUrlException('cosmos_rest_url_1')),),
+ (({}, True,
+ CosmosSDKVersionIncompatibleException('node_name_1', 'v0.42.6')),),
+ (({}, True, CosmosRestServerApiCallException('test_call', 'err_msg')),),
+ (({}, True, IncorrectJSONRetrievedException('REST', 'err')),),
+ (({}, True, NoSyncedDataSourceWasAccessibleException(
+ 'test_monitor_name', 'indirect Cosmos REST node')),),
+ (({}, True, NodeIsDownException('node_name_1')),),
+ (({'indirect_key': 34}, False, None),),
+ ])
+ @mock.patch.object(CosmosNodeMonitor, '_get_cosmos_rest_version_data')
+ def test_get_cosmos_rest_v0_50_1_data_returns_get_cosmos_rest_ver_data_ret(
+ self, get_rest_version_data_ret, mock_get_rest_version) -> None:
+ """
+ In this test we will be checking that self._get_cosmos_rest_v0_50_1_data
+ returns the value returned by self._get_cosmos_rest_version_data. We
+ will test for every possible return that
+ self._get_cosmos_rest_version_data might return using
+ parameterized.expand
+ """
+ mock_get_rest_version.return_value = get_rest_version_data_ret
+ actual_ret = self.test_monitor._get_cosmos_rest_v0_50_1_data()
+ self.assertEqual(get_rest_version_data_ret, actual_ret)
+
@parameterized.expand([
(({}, True,
CannotConnectWithDataSourceException('test_monitor', 'node_name_1',
@@ -677,11 +765,12 @@ def test_get_cosmos_rest_v0_42_6_data_returns_get_cosmos_rest_ver_data_ret(
(({}, True, NodeIsDownException('node_name_1')),),
(({'indirect_key': 34}, False, None),),
])
+ @mock.patch.object(CosmosNodeMonitor, '_get_cosmos_rest_v0_50_1_data')
@mock.patch.object(CosmosNodeMonitor, '_get_cosmos_rest_v0_42_6_data')
@mock.patch.object(CosmosNodeMonitor, '_get_cosmos_rest_v0_39_2_data')
def test_get_cosmos_rest_data_uses_last_retrieval_fn_used_first(
self, retrieval_ret, mock_get_cosmos_rest_v0_39_2,
- mock_get_cosmos_rest_v0_42_6) -> None:
+ mock_get_cosmos_rest_v0_42_6, mock_get_cosmos_rest_v0_50_1) -> None:
"""
In this test we will check that first the self._get_cosmos_rest_data
function first attempts to retrieve data using the last used retrieval
@@ -693,31 +782,45 @@ def test_get_cosmos_rest_data_uses_last_retrieval_fn_used_first(
"""
mock_get_cosmos_rest_v0_39_2.return_value = retrieval_ret
mock_get_cosmos_rest_v0_42_6.return_value = retrieval_ret
-
+ mock_get_cosmos_rest_v0_50_1.return_value = retrieval_ret
# Test for v0.39.2
self.test_monitor._last_rest_retrieval_version = self.sdk_version_0_39_2
self.test_monitor._get_cosmos_rest_data()
mock_get_cosmos_rest_v0_39_2.assert_called_once()
mock_get_cosmos_rest_v0_42_6.assert_not_called()
+ mock_get_cosmos_rest_v0_50_1.assert_not_called()
mock_get_cosmos_rest_v0_39_2.reset_mock()
mock_get_cosmos_rest_v0_42_6.reset_mock()
+ mock_get_cosmos_rest_v0_50_1.reset_mock()
# Test for v0.42.6
self.test_monitor._last_rest_retrieval_version = self.sdk_version_0_42_6
self.test_monitor._get_cosmos_rest_data()
mock_get_cosmos_rest_v0_39_2.assert_not_called()
mock_get_cosmos_rest_v0_42_6.assert_called_once()
+ mock_get_cosmos_rest_v0_50_1.assert_not_called()
+ mock_get_cosmos_rest_v0_39_2.reset_mock()
+ mock_get_cosmos_rest_v0_42_6.reset_mock()
+ mock_get_cosmos_rest_v0_50_1.reset_mock()
+
+ # Test for v0.50.1
+ self.test_monitor._last_rest_retrieval_version = self.sdk_version_0_50_1
+ self.test_monitor._get_cosmos_rest_data()
+ mock_get_cosmos_rest_v0_39_2.assert_not_called()
+ mock_get_cosmos_rest_v0_42_6.assert_not_called()
+ mock_get_cosmos_rest_v0_50_1.assert_called_once()
@parameterized.expand([
(({}, True, IncorrectJSONRetrievedException('REST', 'err')),),
(({}, True,
CosmosSDKVersionIncompatibleException('node_name_1', 'version')),),
])
+ @mock.patch.object(CosmosNodeMonitor, '_get_cosmos_rest_v0_50_1_data')
@mock.patch.object(CosmosNodeMonitor, '_get_cosmos_rest_v0_42_6_data')
@mock.patch.object(CosmosNodeMonitor, '_get_cosmos_rest_v0_39_2_data')
def test_get_cosmos_rest_data_attempts_other_rets_if_last_incompatible(
self, retrieval_ret, mock_get_cosmos_rest_v0_39_2,
- mock_get_cosmos_rest_v0_42_6) -> None:
+ mock_get_cosmos_rest_v0_42_6, mock_get_cosmos_rest_v0_50_1) -> None:
"""
In this test we will check that other retrievals are performed if the
last retrieval performed raises an incompatibility error
@@ -726,21 +829,46 @@ def test_get_cosmos_rest_data_attempts_other_rets_if_last_incompatible(
mock_get_cosmos_rest_v0_39_2.return_value = retrieval_ret
mock_get_cosmos_rest_v0_42_6.return_value = \
(self.retrieved_cosmos_rest_data_1, False, None)
+ mock_get_cosmos_rest_v0_50_1.return_value = \
+ (self.retrieved_cosmos_rest_data_1, False, None)
self.test_monitor._last_rest_retrieval_version = self.sdk_version_0_39_2
self.test_monitor._get_cosmos_rest_data()
mock_get_cosmos_rest_v0_39_2.assert_called_once()
mock_get_cosmos_rest_v0_42_6.assert_called_once()
+ mock_get_cosmos_rest_v0_50_1.assert_called_once()
mock_get_cosmos_rest_v0_39_2.reset_mock()
mock_get_cosmos_rest_v0_42_6.reset_mock()
+ mock_get_cosmos_rest_v0_50_1.reset_mock()
# Test for v0.42.6
mock_get_cosmos_rest_v0_39_2.return_value = \
(self.retrieved_cosmos_rest_data_1, False, None)
mock_get_cosmos_rest_v0_42_6.return_value = retrieval_ret
+ mock_get_cosmos_rest_v0_50_1.return_value = \
+ (self.retrieved_cosmos_rest_data_1, False, None)
self.test_monitor._last_rest_retrieval_version = self.sdk_version_0_42_6
self.test_monitor._get_cosmos_rest_data()
mock_get_cosmos_rest_v0_39_2.assert_called_once()
mock_get_cosmos_rest_v0_42_6.assert_called_once()
+ mock_get_cosmos_rest_v0_50_1.assert_called_once()
+ mock_get_cosmos_rest_v0_39_2.reset_mock()
+ mock_get_cosmos_rest_v0_42_6.reset_mock()
+ mock_get_cosmos_rest_v0_50_1.reset_mock()
+
+ # Test for v0.42.6
+ mock_get_cosmos_rest_v0_39_2.return_value = \
+ (self.retrieved_cosmos_rest_data_1, False, None)
+ mock_get_cosmos_rest_v0_42_6.return_value = \
+ (self.retrieved_cosmos_rest_data_1, False, None)
+ mock_get_cosmos_rest_v0_50_1.return_value = retrieval_ret
+ self.test_monitor._last_rest_retrieval_version = self.sdk_version_0_50_1
+ self.test_monitor._get_cosmos_rest_data()
+ mock_get_cosmos_rest_v0_39_2.assert_called_once()
+ mock_get_cosmos_rest_v0_42_6.assert_called_once()
+ mock_get_cosmos_rest_v0_50_1.assert_called_once()
+ mock_get_cosmos_rest_v0_39_2.reset_mock()
+ mock_get_cosmos_rest_v0_42_6.reset_mock()
+ mock_get_cosmos_rest_v0_50_1.reset_mock()
@parameterized.expand([
(({}, True,
@@ -755,11 +883,12 @@ def test_get_cosmos_rest_data_attempts_other_rets_if_last_incompatible(
(({}, True, NodeIsDownException('node_name_1')),),
(({'indirect_key': 34}, False, None),),
])
+ @mock.patch.object(CosmosNodeMonitor, '_get_cosmos_rest_v0_50_1_data')
@mock.patch.object(CosmosNodeMonitor, '_get_cosmos_rest_v0_42_6_data')
@mock.patch.object(CosmosNodeMonitor, '_get_cosmos_rest_v0_39_2_data')
def test_get_cosmos_rest_data_ret_retrieval_ret_if_no_incompatibility_err(
self, retrieval_ret, mock_get_cosmos_rest_v0_39_2,
- mock_get_cosmos_rest_v0_42_6) -> None:
+ mock_get_cosmos_rest_v0_42_6, mock_get_cosmos_rest_v0_50_1) -> None:
"""
In this test we will check that if data retrieval occurs without an
incompatibility error being returned, then the function returns whatever
@@ -769,20 +898,20 @@ def test_get_cosmos_rest_data_ret_retrieval_ret_if_no_incompatibility_err(
and then for when the last retrieval used function returns an
incompatibility error and another supported version is successful.
"""
- self.test_monitor._last_rest_retrieval_version = self.sdk_version_0_42_6
+ self.test_monitor._last_rest_retrieval_version = self.sdk_version_0_50_1
# Test for when the last used retrieval function does not return an
# incompatibility error
- mock_get_cosmos_rest_v0_42_6.return_value = retrieval_ret
+ mock_get_cosmos_rest_v0_50_1.return_value = retrieval_ret
actual_ret = self.test_monitor._get_cosmos_rest_data()
self.assertEqual(retrieval_ret, actual_ret)
- mock_get_cosmos_rest_v0_42_6.reset_mock()
+ mock_get_cosmos_rest_v0_50_1.reset_mock()
# Test for when the last used retrieval function returns an
# incompatibility error but other retrieval functions do not
- mock_get_cosmos_rest_v0_42_6.return_value = \
+ mock_get_cosmos_rest_v0_50_1.return_value = \
({}, True,
- CosmosSDKVersionIncompatibleException('node_name_1', 'v0.42.6'))
+ CosmosSDKVersionIncompatibleException('node_name_1', 'v0.50.1'))
mock_get_cosmos_rest_v0_39_2.return_value = retrieval_ret
actual_ret = self.test_monitor._get_cosmos_rest_data()
self.assertEqual(retrieval_ret, actual_ret)
@@ -792,11 +921,12 @@ def test_get_cosmos_rest_data_ret_retrieval_ret_if_no_incompatibility_err(
(({}, True,
CosmosSDKVersionIncompatibleException('node_name_1', 'version')),),
])
+ @mock.patch.object(CosmosNodeMonitor, '_get_cosmos_rest_v0_50_1_data')
@mock.patch.object(CosmosNodeMonitor, '_get_cosmos_rest_v0_42_6_data')
@mock.patch.object(CosmosNodeMonitor, '_get_cosmos_rest_v0_39_2_data')
def test_get_cosmos_rest_data_ret_if_incompatibility_issue_and_unsuccessful(
self, retrieval_ret, mock_get_cosmos_rest_v0_39_2,
- mock_get_cosmos_rest_v0_42_6) -> None:
+ mock_get_cosmos_rest_v0_42_6, mock_get_cosmos_rest_v0_50_1) -> None:
"""
In this test we will check that if incompatibility issues persist for
every supported version, then the function returns ({}, True,
@@ -806,6 +936,7 @@ def test_get_cosmos_rest_data_ret_if_incompatibility_issue_and_unsuccessful(
"""
mock_get_cosmos_rest_v0_39_2.return_value = retrieval_ret
mock_get_cosmos_rest_v0_42_6.return_value = retrieval_ret
+ mock_get_cosmos_rest_v0_50_1.return_value = retrieval_ret
actual_ret = self.test_monitor._get_cosmos_rest_data()
expected_ret = ({}, True, CosmosRestServerDataCouldNotBeObtained(
self.data_sources[2].node_name))
@@ -910,7 +1041,44 @@ def test_determine_last_height_monitored_tendermint(
}
]
}
- }], [
+ }, {
+ "jsonrpc": "2.0",
+ "id": -1,
+ "result": {
+ "block_height": "9313442",
+ "validators": [
+ {
+ "address": "addr_5",
+ "pub_key": {
+ "type": "tendermint/PubKeyEd25519",
+ "value": "jzi1FcwjpprsVIqbXujev/Cfzwg7oFrybXmm/7jNeiI="
+ },
+ "voting_power": "121500000001",
+ "proposer_priority": "79425248532"
+ },
+ {
+ "address": "addr_6",
+ "pub_key": {
+ "type": "tendermint/PubKeyEd25519",
+ "value": "CqXadXTNawX+OiayRorBsMebtQx35TttI3IspuzaN/Q="
+ },
+ "voting_power": "121500000001",
+ "proposer_priority": "-204933381423"
+ },
+ {
+ "address": "addr_7",
+ "pub_key": {
+ "type": "tendermint/PubKeyEd25519",
+ "value": "nT9mxX1Ap7O6BdEnvs6ZUG7xuKD7kY0NWtu9GUZIBCk="
+ },
+ "voting_power": "121500000001",
+ "proposer_priority": "11980115931"
+ },
+ ],
+ "count": "5",
+ "total": "5"
+ }
+}], [
{
"address": "addr_1",
"voting_power": "43",
@@ -926,7 +1094,34 @@ def test_determine_last_height_monitored_tendermint(
{
"address": "addr_4",
"voting_power": "46",
- }
+ },
+ {
+ "address": "addr_5",
+ "pub_key": {
+ "type": "tendermint/PubKeyEd25519",
+ "value": "jzi1FcwjpprsVIqbXujev/Cfzwg7oFrybXmm/7jNeiI="
+ },
+ "voting_power": "121500000001",
+ "proposer_priority": "79425248532"
+ },
+ {
+ "address": "addr_6",
+ "pub_key": {
+ "type": "tendermint/PubKeyEd25519",
+ "value": "CqXadXTNawX+OiayRorBsMebtQx35TttI3IspuzaN/Q="
+ },
+ "voting_power": "121500000001",
+ "proposer_priority": "-204933381423"
+ },
+ {
+ "address": "addr_7",
+ "pub_key": {
+ "type": "tendermint/PubKeyEd25519",
+ "value": "nT9mxX1Ap7O6BdEnvs6ZUG7xuKD7kY0NWtu9GUZIBCk="
+ },
+ "voting_power": "121500000001",
+ "proposer_priority": "11980115931"
+ },
],),
([{
"jsonrpc": "2.0",
@@ -973,8 +1168,17 @@ def test_parse_validators_list_parses_correctly(
{
"address": "addr_4",
"voting_power": "46",
- }
- ], 'addr_4', True,),
+ },
+ {
+ "address": "addr_5",
+ "pub_key": {
+ "type": "tendermint/PubKeyEd25519",
+ "value": "jzi1FcwjpprsVIqbXujev/Cfzwg7oFrybXmm/7jNeiI="
+ },
+ "voting_power": "121500000001",
+ "proposer_priority": "79425248532"
+ },
+ ], 'addr_5', True,),
([
{
"address": "addr_1",
@@ -991,8 +1195,17 @@ def test_parse_validators_list_parses_correctly(
{
"address": "addr_4",
"voting_power": "46",
- }
- ], 'addr_5', False,),
+ },
+ {
+ "address": "addr_5",
+ "pub_key": {
+ "type": "tendermint/PubKeyEd25519",
+ "value": "jzi1FcwjpprsVIqbXujev/Cfzwg7oFrybXmm/7jNeiI="
+ },
+ "voting_power": "121500000001",
+ "proposer_priority": "79425248532"
+ },
+ ], 'addr_6', False,),
([
{
"address": "addr_1",
@@ -1027,7 +1240,16 @@ def test_parse_validators_list_parses_correctly(
{
"address": "addr_4",
"voting_power": "46",
- }
+ },
+ {
+ "address": "addr_5",
+ "pub_key": {
+ "type": "tendermint/PubKeyEd25519",
+ "value": "jzi1FcwjpprsVIqbXujev/Cfzwg7oFrybXmm/7jNeiI="
+ },
+ "voting_power": "121500000001",
+ "proposer_priority": "79425248532"
+ },
], "", False,),
([], "addr_1", False,),
])
@@ -1042,15 +1264,13 @@ def test_is_validator_active_returns_correctly(
self.assertEqual(expected_return, actual_return)
@parameterized.expand([
- ([
- {"type": "transfer"},
+ ([ {"type": "transfer"},
{
"type": "slash",
"attributes": [
{
"key": "YWRkcmVzcw==",
- "value": 'Y29zbW9zdmFsY29uczEwdjdzcmE2NW1sdXl3bmtzdWR2'
- 'Z3p0NzV4bHNmOHp3dXpzcThyMw==',
+ "value": "Y29zbW9zdmFsY29uczEwdjdzcmE2NW1sdXl3bmtzdWR2Z3p0NzV4bHNmOHp3dXpzcThyMw==",
"index": True
},
{
@@ -1074,17 +1294,27 @@ def test_is_validator_active_returns_correctly(
"index": True
}
]
- }
+ },
], '7B3D01F754DFF8474ED0E358812FD437E09389DC', (True, None),),
([
- {"type": "transfer"},
+ {
+ "type": "slash",
+ "attributes": [
+ {
+ "key": "address",
+ "value": "cosmosvalcons10v7sra65mluywnksudvgzt75xlsf8zwuzsq8r3",
+ "index": True
+ }
+ ]
+ }
+ ], '7B3D01F754DFF8474ED0E358812FD437E09389DC', (True, None),),
+ ([ {"type": "transfer"},
{
"type": "slash",
"attributes": [
{
"key": "YWRkcmVzcw==",
- "value": 'Y29zbW9zdmFsY29uczEwdjdzcmE2NW1sdXl3b'
- 'mtzdWR2Z3p0NzV4bHNmOHp3dXpzcThyMw==',
+ "value": "Y29zbW9zdmFsY29uczEwdjdzcmE2NW1sdXl3bmtzdWR2Z3p0NzV4bHNmOHp3dXpzcThyMw==",
"index": True
},
{
@@ -1109,8 +1339,7 @@ def test_is_validator_active_returns_correctly(
"attributes": [
{
"key": "YWRkcmVzcw==",
- "value": 'Y29zbW9zdmFsY29uczEwdjdzcmE2NW1sdXl3bmt'
- 'zdWR2Z3p0NzV4bHNmOHp3dXpzcThyMw==',
+ "value": "Y29zbW9zdmFsY29uczEwdjdzcmE2NW1sdXl3bmtzdWR2Z3p0NzV4bHNmOHp3dXpzcThyMw==",
"index": True
},
{
@@ -1129,7 +1358,25 @@ def test_is_validator_active_returns_correctly(
"index": True
}
]
- },
+ }
+ ], '7B3D01F754DFF8474ED0E358812FD437E09389DC', (True, 2000),),
+ ([
+ {"type": "transfer"},
+ {
+ "type": "slash",
+ "attributes": [
+ {
+ "key": "address",
+ "value": "cosmosvalcons10v7sra65mluywnksudvgzt75xlsf8zwuzsq8r3",
+ "index": True
+ },
+ {
+ "key": "burned_coins",
+ "value": "2000",
+ "index": True
+ }
+ ]
+ }
], '7B3D01F754DFF8474ED0E358812FD437E09389DC', (True, 2000),),
([
{"type": "transfer"},
@@ -1168,7 +1415,7 @@ def test_is_validator_active_returns_correctly(
"index": True
}
]
- },
+ }
], 'addr_1', (False, None),),
([
{"type": "transfer"},
@@ -1226,6 +1473,7 @@ def test_validator_was_slashed_returns_correctly(
Given a number of scenarios we will check that the
_validator_was_slashed function will correctly determine if a validator
was slashed or not, and will give the amount if available.
+ Couln't determine if the values where base64 encoded or not, so implemented both cases
"""
self.test_monitor._validator_consensus_address = consensus_address
actual_return = self.test_monitor._validator_was_slashed(
@@ -1343,7 +1591,7 @@ def test_get_tendermint_rpc_archive_data_validator_return(
mock_get_block_results.side_effect = [
{
'result': {
- 'begin_block_events': [
+ 'finalize_block_events': [
{
"type": "slash",
"attributes": [
@@ -1371,7 +1619,7 @@ def test_get_tendermint_rpc_archive_data_validator_return(
},
{
'result': {
- 'begin_block_events': [
+ 'finalize_block_events': [
{
"type": "slash",
"attributes": [
@@ -1399,7 +1647,7 @@ def test_get_tendermint_rpc_archive_data_validator_return(
},
{
'result': {
- 'begin_block_events': [
+ 'finalize_block_events': [
{
"type": "slash",
"attributes": [
diff --git a/alerter/test/monitors/test_cosmos.py b/alerter/test/monitors/test_cosmos.py
index 176f613a..0329e0b2 100644
--- a/alerter/test/monitors/test_cosmos.py
+++ b/alerter/test/monitors/test_cosmos.py
@@ -90,6 +90,7 @@ def setUp(self) -> None:
connection_check_time_interval=self.connection_check_time_interval)
self.sdk_version_0_39_2 = 'v0.39.2'
self.sdk_version_0_42_6 = 'v0.42.6'
+ self.sdk_version_0_50_1 = 'v0.50.1'
# Some dummy retrieval data
self.rest_ret_1 = {
@@ -176,8 +177,11 @@ def test_tendermint_rpc_api_returns_tendermint_rpc_api(self) -> None:
def test_last_rest_retrieval_version_returns_last_rest_retrieval_version(
self) -> None:
- # First we will check that last_rest_retrieval_version is set to v0.42.6
+ # First we will check that last_rest_retrieval_version is set to v0.50.1
# on __init__
+ self.assertEqual(self.sdk_version_0_50_1,
+ self.test_monitor.last_rest_retrieval_version)
+ self.test_monitor._last_rest_retrieval_version = self.sdk_version_0_42_6
self.assertEqual(self.sdk_version_0_42_6,
self.test_monitor.last_rest_retrieval_version)
self.test_monitor._last_rest_retrieval_version = self.sdk_version_0_39_2
@@ -293,7 +297,7 @@ def test_select_cosmos_rest_node_selects_first_reachable_synced_node(
"""
mock_execute_with_checks.return_value = {"syncing": False}
actual = self.test_monitor._select_cosmos_rest_node(
- self.data_sources, self.sdk_version_0_39_2)
+ self.data_sources, self.sdk_version_0_50_1)
self.assertEqual(self.data_sources[0], actual)
@mock.patch.object(CosmosRestServerApiWrapper, 'execute_with_checks')
@@ -310,7 +314,7 @@ def test_select_cosmos_rest_node_does_not_select_syncing_nodes(
{"syncing": False}
]
actual = self.test_monitor._select_cosmos_rest_node(
- self.data_sources, self.sdk_version_0_39_2)
+ self.data_sources, self.sdk_version_0_50_1)
self.assertEqual(self.data_sources[2], actual)
@parameterized.expand([
@@ -322,7 +326,7 @@ def test_select_cosmos_rest_node_does_not_select_syncing_nodes(
(IncompleteRead('test'),),
(ChunkedEncodingError('test'),),
(ProtocolError('test'),),
- (CosmosSDKVersionIncompatibleException('test_node', 'v0.39.2'),),
+ (CosmosSDKVersionIncompatibleException('test_node', 'v0.50.1'),),
(CosmosRestServerApiCallException('test_call', 'err_msg'),),
(KeyError('test'),),
])
@@ -343,7 +347,7 @@ def test_select_cosmos_rest_node_does_not_select_nodes_raising_expected_err(
{"syncing": False}
]
actual = self.test_monitor._select_cosmos_rest_node(
- self.data_sources, self.sdk_version_0_39_2)
+ self.data_sources, self.sdk_version_0_50_1)
self.assertEqual(self.data_sources[2], actual)
@parameterized.expand([
@@ -355,7 +359,7 @@ def test_select_cosmos_rest_node_does_not_select_nodes_raising_expected_err(
(IncompleteRead('test'),),
(ChunkedEncodingError('test'),),
(ProtocolError('test'),),
- (CosmosSDKVersionIncompatibleException('test_node', 'v0.39.2'),),
+ (CosmosSDKVersionIncompatibleException('test_node', 'v0.50.1'),),
(CosmosRestServerApiCallException('test_call', 'err_msg'),),
(KeyError('test'),),
])
@@ -374,7 +378,7 @@ def test_select_cosmos_rest_node_returns_None_if_no_node_selected(
{"syncing": True}
]
actual = self.test_monitor._select_cosmos_rest_node(
- self.data_sources, self.sdk_version_0_39_2)
+ self.data_sources, self.sdk_version_0_50_1)
self.assertIsNone(actual)
@mock.patch.object(TendermintRpcApiWrapper, 'execute_with_checks')
@@ -478,7 +482,7 @@ def test_cosmos_rest_reachable_returns_true_None_if_data_ret_successful(
mock_execute_with_checks.side_effect = {"syncing": False}
actual_reachable, actual_data_retrieval_exception = \
self.test_monitor._cosmos_rest_reachable(self.data_sources[2],
- self.sdk_version_0_39_2)
+ self.sdk_version_0_50_1)
self.assertTrue(actual_reachable)
self.assertIsNone(actual_data_retrieval_exception)
@@ -491,7 +495,7 @@ def test_cosmos_rest_reachable_returns_true_None_if_data_ret_successful(
(IncompleteRead('test'), DataReadingException,),
(ChunkedEncodingError('test'), DataReadingException,),
(ProtocolError('test'), DataReadingException,),
- (CosmosSDKVersionIncompatibleException('test_node', 'v0.39.2'),
+ (CosmosSDKVersionIncompatibleException('test_node', 'v0.50.1'),
CosmosSDKVersionIncompatibleException,),
(CosmosRestServerApiCallException('test_call', 'err_msg'),
CosmosRestServerApiCallException,),
@@ -509,7 +513,7 @@ def test_cosmos_rest_reachable_returns_false_err_if_expected_err_raised(
mock_execute_with_checks.side_effect = raised_exception
actual_reachable, actual_data_retrieval_exception = \
self.test_monitor._cosmos_rest_reachable(self.data_sources[2],
- self.sdk_version_0_39_2)
+ self.sdk_version_0_50_1)
self.assertFalse(actual_reachable)
self.assertIsInstance(actual_data_retrieval_exception,
returned_exception_type)
@@ -523,7 +527,7 @@ def test_fn():
source_url = self.data_sources[0].cosmos_rest_url
actual_ret = \
self.test_monitor._execute_cosmos_rest_retrieval_with_exceptions(
- test_fn, source_name, source_url, self.sdk_version_0_39_2
+ test_fn, source_name, source_url, self.sdk_version_0_50_1
)
self.assertEqual(self.test_data_dict, actual_ret)
@@ -536,7 +540,7 @@ def test_fn():
(IncompleteRead('test'), DataReadingException,),
(ChunkedEncodingError('test'), DataReadingException,),
(ProtocolError('test'), DataReadingException,),
- (CosmosSDKVersionIncompatibleException('test_node', 'v0.39.2'),
+ (CosmosSDKVersionIncompatibleException('test_node', 'v0.50.1'),
CosmosSDKVersionIncompatibleException,),
(CosmosRestServerApiCallException('test_call', 'err_msg'),
CosmosRestServerApiCallException,),
@@ -552,7 +556,7 @@ def test_fn():
self.assertRaises(
expected_raised_exception,
self.test_monitor._execute_cosmos_rest_retrieval_with_exceptions,
- test_fn, source_name, source_url, self.sdk_version_0_39_2
+ test_fn, source_name, source_url, self.sdk_version_0_50_1
)
@parameterized.expand([
@@ -646,7 +650,7 @@ def test_fn():
mock_execute.side_effect = [self.rest_ret_1, self.rest_ret_2]
node_name = self.data_sources[0].node_name
actual_ret = self.test_monitor._get_rest_data_with_pagination_keys(
- test_fn, [], {}, node_name, self.sdk_version_0_42_6)
+ test_fn, [], {}, node_name, self.sdk_version_0_50_1)
self.assertEqual([self.rest_ret_1, self.rest_ret_2], actual_ret)
@mock.patch.object(CosmosRestServerApiWrapper, 'execute_with_checks')
@@ -673,13 +677,13 @@ def test_fn():
}
]
self.test_monitor._get_rest_data_with_pagination_keys(
- test_fn, test_args, test_params, node_name, self.sdk_version_0_42_6)
+ test_fn, test_args, test_params, node_name, self.sdk_version_0_50_1)
calls = mock_execute.call_args_list
self.assertEqual(2, len(calls))
mock_execute.assert_has_calls([
- call(test_fn, test_args_first, node_name, self.sdk_version_0_42_6),
- call(test_fn, test_args_second, node_name, self.sdk_version_0_42_6),
+ call(test_fn, test_args_first, node_name, self.sdk_version_0_50_1),
+ call(test_fn, test_args_second, node_name, self.sdk_version_0_50_1),
])
@mock.patch.object(TendermintRpcApiWrapper, 'execute_with_checks')
diff --git a/api/Dockerfile b/api/Dockerfile
index f7266ddf..edaab4b9 100644
--- a/api/Dockerfile
+++ b/api/Dockerfile
@@ -1,4 +1,6 @@
-FROM node:14
+FROM node:20-alpine
+
+RUN apk add bash
# Create app directory
WORKDIR /opt/panic
@@ -8,7 +10,7 @@ COPY ./entities ./entities
# Change directory, and copy all installer contents from the host to the
# container.
-WORKDIR ./api
+WORKDIR /opt/panic/api
COPY ./api ./
# RUN npm install (ci)
diff --git a/api/package-lock.json b/api/package-lock.json
index 24c14465..a6e6c542 100644
--- a/api/package-lock.json
+++ b/api/package-lock.json
@@ -17,8 +17,8 @@
"dotenv": "^10.0.0",
"express": "^4.17.1",
"mockingoose": "^2.15.2",
- "mongodb": "^3.6.9",
- "mongoose": "^6.4.6",
+ "mongodb": "^6.15.0",
+ "mongoose": "^8.13.2",
"nodemailer": "^6.7.5",
"opsgenie-sdk": "^0.5.1",
"redis": "^3.1.2",
@@ -33,7 +33,6 @@
"@types/express": "^4.17.12",
"@types/jest": "^27.0.2",
"@types/mongodb": "^3.6.17",
- "@types/mongoose": "^5.11.97",
"@types/node": "^15.12.2",
"@types/redis": "^2.8.29",
"@types/supertest": "^2.0.11",
@@ -1379,6 +1378,15 @@
"@jridgewell/sourcemap-codec": "^1.4.10"
}
},
+ "node_modules/@mongodb-js/saslprep": {
+ "version": "1.2.2",
+ "resolved": "https://registry.npmjs.org/@mongodb-js/saslprep/-/saslprep-1.2.2.tgz",
+ "integrity": "sha512-EB0O3SCSNRUFk66iRCpI+cXzIjdswfCs7F6nOC3RAGJ7xr5YhaicvsRwJ9eyzYvYRlCSDUO/c7g4yNulxKC1WA==",
+ "license": "MIT",
+ "dependencies": {
+ "sparse-bitfield": "^3.0.3"
+ }
+ },
"node_modules/@pagerduty/pdjs": {
"version": "2.2.4",
"resolved": "https://registry.npmjs.org/@pagerduty/pdjs/-/pdjs-2.2.4.tgz",
@@ -1725,16 +1733,6 @@
"@types/node": "*"
}
},
- "node_modules/@types/mongoose": {
- "version": "5.11.97",
- "resolved": "https://registry.npmjs.org/@types/mongoose/-/mongoose-5.11.97.tgz",
- "integrity": "sha512-cqwOVYT3qXyLiGw7ueU2kX9noE8DPGRY6z8eUxudhXY8NZ7DMKYAxyZkLSevGfhCX3dO/AoX5/SO9lAzfjon0Q==",
- "deprecated": "Mongoose publishes its own types, so you do not need to install this package.",
- "dev": true,
- "dependencies": {
- "mongoose": "*"
- }
- },
"node_modules/@types/node": {
"version": "15.14.9",
"resolved": "https://registry.npmjs.org/@types/node/-/node-15.14.9.tgz",
@@ -1832,16 +1830,17 @@
}
},
"node_modules/@types/webidl-conversions": {
- "version": "6.1.1",
- "resolved": "https://registry.npmjs.org/@types/webidl-conversions/-/webidl-conversions-6.1.1.tgz",
- "integrity": "sha512-XAahCdThVuCFDQLT7R7Pk/vqeObFNL3YqRyFZg+AqAP/W1/w3xHaIxuW7WszQqTbIBOPRcItYJIou3i/mppu3Q=="
+ "version": "7.0.3",
+ "resolved": "https://registry.npmjs.org/@types/webidl-conversions/-/webidl-conversions-7.0.3.tgz",
+ "integrity": "sha512-CiJJvcRtIgzadHCYXw7dqEnMNRjhGZlYK05Mj9OyktqV8uVT8fD2BFOB7S1uwBE3Kj2Z+4UyPmFw/Ixgw/LAlA==",
+ "license": "MIT"
},
"node_modules/@types/whatwg-url": {
- "version": "8.2.2",
- "resolved": "https://registry.npmjs.org/@types/whatwg-url/-/whatwg-url-8.2.2.tgz",
- "integrity": "sha512-FtQu10RWgn3D9U4aazdwIE2yzphmTJREDqNdODHrbrZmmMqI0vMheC/6NE/J1Yveaj8H+ela+YwWTjq5PGmuhA==",
+ "version": "11.0.5",
+ "resolved": "https://registry.npmjs.org/@types/whatwg-url/-/whatwg-url-11.0.5.tgz",
+ "integrity": "sha512-coYR071JRaHa+xoEvvYqvnIHaVqaYrLPbsufM9BF63HkwI5Lgmy2QR8Q5K/lYDYo5AK82wOvSOS0UsLTpTG7uQ==",
+ "license": "MIT",
"dependencies": {
- "@types/node": "*",
"@types/webidl-conversions": "*"
}
},
@@ -2274,15 +2273,6 @@
"node": "*"
}
},
- "node_modules/bl": {
- "version": "2.2.1",
- "resolved": "https://registry.npmjs.org/bl/-/bl-2.2.1.tgz",
- "integrity": "sha512-6Pesp1w0DEX1N550i/uGV/TqucVL4AM/pgThFSN/Qq9si1/DF9aIHs1BxD8V/QU0HoeHO6cQRTAuYnLPKq1e4g==",
- "dependencies": {
- "readable-stream": "^2.3.5",
- "safe-buffer": "^5.1.1"
- }
- },
"node_modules/blakejs": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/blakejs/-/blakejs-1.2.1.tgz",
@@ -2509,14 +2499,12 @@
}
},
"node_modules/bson": {
- "version": "4.6.5",
- "resolved": "https://registry.npmjs.org/bson/-/bson-4.6.5.tgz",
- "integrity": "sha512-uqrgcjyOaZsHfz7ea8zLRCLe1u+QGUSzMZmvXqO24CDW7DWoW1qiN9folSwa7hSneTSgM2ykDIzF5kcQQ8cwNw==",
- "dependencies": {
- "buffer": "^5.6.0"
- },
+ "version": "6.10.3",
+ "resolved": "https://registry.npmjs.org/bson/-/bson-6.10.3.tgz",
+ "integrity": "sha512-MTxGsqgYTwfshYWTRdmZRC+M7FnG1b4y7RO7p2k3X24Wq0yv1m77Wsj0BzlPzd/IowgESfsruQCUToa7vbOpPQ==",
+ "license": "Apache-2.0",
"engines": {
- "node": ">=6.9.0"
+ "node": ">=16.20.1"
}
},
"node_modules/buffer": {
@@ -2930,11 +2918,6 @@
"integrity": "sha512-JxbCBUdrfr6AQjOXrxoTvAMJO4HBTUIlBzslcJPAz+/KT8yk53fXun51u+RenNYvad/+Vc2DIz5o9UxlCDymFQ==",
"dev": true
},
- "node_modules/core-util-is": {
- "version": "1.0.3",
- "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz",
- "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ=="
- },
"node_modules/cors": {
"version": "2.8.5",
"resolved": "https://registry.npmjs.org/cors/-/cors-2.8.5.tgz",
@@ -4631,11 +4614,6 @@
"node": ">= 0.4"
}
},
- "node_modules/ip": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/ip/-/ip-2.0.0.tgz",
- "integrity": "sha512-WKa+XuLG1A1R0UWhl2+1XQSi+fZWMsYKffMZTTYsiZaUD8k2yDAj5atimTUD2TZkyCkNEeYE5NhFZmupOGtjYQ=="
- },
"node_modules/ipaddr.js": {
"version": "1.9.1",
"resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz",
@@ -4939,11 +4917,6 @@
"url": "https://github.com/sponsors/ljharb"
}
},
- "node_modules/isarray": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz",
- "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ=="
- },
"node_modules/isexe": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
@@ -5899,9 +5872,13 @@
}
},
"node_modules/kareem": {
- "version": "2.4.1",
- "resolved": "https://registry.npmjs.org/kareem/-/kareem-2.4.1.tgz",
- "integrity": "sha512-aJ9opVoXroQUPfovYP5kaj2lM7Jn02Gw13bL0lg9v0V7SaUc0qavPs0Eue7d2DcC3NjqI6QAUElXNsuZSeM+EA=="
+ "version": "2.6.3",
+ "resolved": "https://registry.npmjs.org/kareem/-/kareem-2.6.3.tgz",
+ "integrity": "sha512-C3iHfuGUXK2u8/ipq9LfjFfXFxAZMQJJq7vLS45r3D9Y2xQ/m4S8zaR4zMLFWh9AsNPXmcFfUDhTEO8UIC/V6Q==",
+ "license": "Apache-2.0",
+ "engines": {
+ "node": ">=12.0.0"
+ }
},
"node_modules/keccak": {
"version": "3.0.2",
@@ -6114,8 +6091,7 @@
"node_modules/memory-pager": {
"version": "1.5.0",
"resolved": "https://registry.npmjs.org/memory-pager/-/memory-pager-1.5.0.tgz",
- "integrity": "sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg==",
- "optional": true
+ "integrity": "sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg=="
},
"node_modules/merge-descriptors": {
"version": "1.0.1",
@@ -6300,27 +6276,35 @@
}
},
"node_modules/mongodb": {
- "version": "3.7.3",
- "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-3.7.3.tgz",
- "integrity": "sha512-Psm+g3/wHXhjBEktkxXsFMZvd3nemI0r3IPsE0bU+4//PnvNWKkzhZcEsbPcYiWqe8XqXJJEg4Tgtr7Raw67Yw==",
- "dependencies": {
- "bl": "^2.2.1",
- "bson": "^1.1.4",
- "denque": "^1.4.1",
- "optional-require": "^1.1.8",
- "safe-buffer": "^5.1.2"
+ "version": "6.15.0",
+ "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-6.15.0.tgz",
+ "integrity": "sha512-ifBhQ0rRzHDzqp9jAQP6OwHSH7dbYIQjD3SbJs9YYk9AikKEettW/9s/tbSFDTpXcRbF+u1aLrhHxDFaYtZpFQ==",
+ "license": "Apache-2.0",
+ "dependencies": {
+ "@mongodb-js/saslprep": "^1.1.9",
+ "bson": "^6.10.3",
+ "mongodb-connection-string-url": "^3.0.0"
},
"engines": {
- "node": ">=4"
+ "node": ">=16.20.1"
},
- "optionalDependencies": {
- "saslprep": "^1.0.0"
+ "peerDependencies": {
+ "@aws-sdk/credential-providers": "^3.188.0",
+ "@mongodb-js/zstd": "^1.1.0 || ^2.0.0",
+ "gcp-metadata": "^5.2.0",
+ "kerberos": "^2.0.1",
+ "mongodb-client-encryption": ">=6.0.0 <7",
+ "snappy": "^7.2.2",
+ "socks": "^2.7.1"
},
"peerDependenciesMeta": {
- "aws4": {
+ "@aws-sdk/credential-providers": {
"optional": true
},
- "bson-ext": {
+ "@mongodb-js/zstd": {
+ "optional": true
+ },
+ "gcp-metadata": {
"optional": true
},
"kerberos": {
@@ -6329,108 +6313,80 @@
"mongodb-client-encryption": {
"optional": true
},
- "mongodb-extjson": {
+ "snappy": {
"optional": true
},
- "snappy": {
+ "socks": {
"optional": true
}
}
},
"node_modules/mongodb-connection-string-url": {
- "version": "2.5.3",
- "resolved": "https://registry.npmjs.org/mongodb-connection-string-url/-/mongodb-connection-string-url-2.5.3.tgz",
- "integrity": "sha512-f+/WsED+xF4B74l3k9V/XkTVj5/fxFH2o5ToKXd8Iyi5UhM+sO9u0Ape17Mvl/GkZaFtM0HQnzAG5OTmhKw+tQ==",
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/mongodb-connection-string-url/-/mongodb-connection-string-url-3.0.2.tgz",
+ "integrity": "sha512-rMO7CGo/9BFwyZABcKAWL8UJwH/Kc2x0g72uhDWzG48URRax5TCIcJ7Rc3RZqffZzO/Gwff/jyKwCU9TN8gehA==",
+ "license": "Apache-2.0",
"dependencies": {
- "@types/whatwg-url": "^8.2.1",
- "whatwg-url": "^11.0.0"
+ "@types/whatwg-url": "^11.0.2",
+ "whatwg-url": "^14.1.0 || ^13.0.0"
}
},
"node_modules/mongodb-connection-string-url/node_modules/tr46": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/tr46/-/tr46-3.0.0.tgz",
- "integrity": "sha512-l7FvfAHlcmulp8kr+flpQZmVwtu7nfRV7NZujtN0OqES8EL4O4e0qqzL0DC5gAvx/ZC/9lk6rhcUwYvkBnBnYA==",
+ "version": "5.1.1",
+ "resolved": "https://registry.npmjs.org/tr46/-/tr46-5.1.1.tgz",
+ "integrity": "sha512-hdF5ZgjTqgAntKkklYw0R03MG2x/bSzTtkxmIRw/sTNV8YXsCJ1tfLAX23lhxhHJlEf3CRCOCGGWw3vI3GaSPw==",
+ "license": "MIT",
"dependencies": {
- "punycode": "^2.1.1"
+ "punycode": "^2.3.1"
},
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/mongodb-connection-string-url/node_modules/webidl-conversions": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz",
"integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==",
+ "license": "BSD-2-Clause",
"engines": {
"node": ">=12"
}
},
"node_modules/mongodb-connection-string-url/node_modules/whatwg-url": {
- "version": "11.0.0",
- "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-11.0.0.tgz",
- "integrity": "sha512-RKT8HExMpoYx4igMiVMY83lN6UeITKJlBQ+vR/8ZJ8OCdSiN3RwCq+9gH0+Xzj0+5IrM6i4j/6LuvzbZIQgEcQ==",
+ "version": "14.2.0",
+ "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-14.2.0.tgz",
+ "integrity": "sha512-De72GdQZzNTUBBChsXueQUnPKDkg/5A5zp7pFDuQAj5UFoENpiACU0wlCvzpAGnTkj++ihpKwKyYewn/XNUbKw==",
+ "license": "MIT",
"dependencies": {
- "tr46": "^3.0.0",
+ "tr46": "^5.1.0",
"webidl-conversions": "^7.0.0"
},
"engines": {
- "node": ">=12"
- }
- },
- "node_modules/mongodb/node_modules/bson": {
- "version": "1.1.6",
- "resolved": "https://registry.npmjs.org/bson/-/bson-1.1.6.tgz",
- "integrity": "sha512-EvVNVeGo4tHxwi8L6bPj3y3itEvStdwvvlojVxxbyYfoaxJ6keLgrTuKdyfEAszFK+H3olzBuafE0yoh0D1gdg==",
- "engines": {
- "node": ">=0.6.19"
+ "node": ">=18"
}
},
"node_modules/mongoose": {
- "version": "6.5.2",
- "resolved": "https://registry.npmjs.org/mongoose/-/mongoose-6.5.2.tgz",
- "integrity": "sha512-3CFDrSLtK2qjM1pZeZpLTUyqPRkc11Iuh74ZrwS4IwEJ3K2PqGnmyPLw7ex4Kzu37ujIMp3MAuiBlUjfrcb6hw==",
+ "version": "8.13.2",
+ "resolved": "https://registry.npmjs.org/mongoose/-/mongoose-8.13.2.tgz",
+ "integrity": "sha512-riCBqZmNkYBWjXpM3qWLDQw7QmTKsVZDPhLXFJqC87+OjocEVpvS3dA2BPPUiLAu+m0/QmEj5pSXKhH+/DgerQ==",
+ "license": "MIT",
"dependencies": {
- "bson": "^4.6.5",
- "kareem": "2.4.1",
- "mongodb": "4.8.1",
+ "bson": "^6.10.3",
+ "kareem": "2.6.3",
+ "mongodb": "~6.15.0",
"mpath": "0.9.0",
- "mquery": "4.0.3",
+ "mquery": "5.0.0",
"ms": "2.1.3",
- "sift": "16.0.0"
+ "sift": "17.1.3"
},
"engines": {
- "node": ">=12.0.0"
+ "node": ">=16.20.1"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/mongoose"
}
},
- "node_modules/mongoose/node_modules/denque": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/denque/-/denque-2.1.0.tgz",
- "integrity": "sha512-HVQE3AAb/pxF8fQAoiqpvg9i3evqug3hoiwakOyZAwJm+6vZehbkYXZ0l4JxS+I3QxM97v5aaRNhj8v5oBhekw==",
- "engines": {
- "node": ">=0.10"
- }
- },
- "node_modules/mongoose/node_modules/mongodb": {
- "version": "4.8.1",
- "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-4.8.1.tgz",
- "integrity": "sha512-/NyiM3Ox9AwP5zrfT9TXjRKDJbXlLaUDQ9Rg//2lbg8D2A8GXV0VidYYnA/gfdK6uwbnL4FnAflH7FbGw3TS7w==",
- "dependencies": {
- "bson": "^4.6.5",
- "denque": "^2.0.1",
- "mongodb-connection-string-url": "^2.5.2",
- "socks": "^2.6.2"
- },
- "engines": {
- "node": ">=12.9.0"
- },
- "optionalDependencies": {
- "saslprep": "^1.0.3"
- }
- },
"node_modules/mongoose/node_modules/ms": {
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
@@ -6445,22 +6401,24 @@
}
},
"node_modules/mquery": {
- "version": "4.0.3",
- "resolved": "https://registry.npmjs.org/mquery/-/mquery-4.0.3.tgz",
- "integrity": "sha512-J5heI+P08I6VJ2Ky3+33IpCdAvlYGTSUjwTPxkAr8i8EoduPMBX2OY/wa3IKZIQl7MU4SbFk8ndgSKyB/cl1zA==",
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/mquery/-/mquery-5.0.0.tgz",
+ "integrity": "sha512-iQMncpmEK8R8ncT8HJGsGc9Dsp8xcgYMVSbs5jgnm1lFHTZqMJTUWTDx1LBO8+mK3tPNZWFLBghQEIOULSTHZg==",
+ "license": "MIT",
"dependencies": {
"debug": "4.x"
},
"engines": {
- "node": ">=12.0.0"
+ "node": ">=14.0.0"
}
},
"node_modules/mquery/node_modules/debug": {
- "version": "4.3.4",
- "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz",
- "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==",
+ "version": "4.4.0",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz",
+ "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==",
+ "license": "MIT",
"dependencies": {
- "ms": "2.1.2"
+ "ms": "^2.1.3"
},
"engines": {
"node": ">=6.0"
@@ -6472,9 +6430,10 @@
}
},
"node_modules/mquery/node_modules/ms": {
- "version": "2.1.2",
- "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
- "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
+ "version": "2.1.3",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
+ "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
+ "license": "MIT"
},
"node_modules/ms": {
"version": "2.0.0",
@@ -6775,17 +6734,6 @@
"node": ">= v0.6.0"
}
},
- "node_modules/optional-require": {
- "version": "1.1.8",
- "resolved": "https://registry.npmjs.org/optional-require/-/optional-require-1.1.8.tgz",
- "integrity": "sha512-jq83qaUb0wNg9Krv1c5OQ+58EK+vHde6aBPzLvPPqJm89UQWsvSuFy9X/OSNJnFeSOKo7btE0n8Nl2+nE+z5nA==",
- "dependencies": {
- "require-at": "^1.0.6"
- },
- "engines": {
- "node": ">=4"
- }
- },
"node_modules/optionator": {
"version": "0.8.3",
"resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz",
@@ -7100,11 +7048,6 @@
"node": ">= 0.6.0"
}
},
- "node_modules/process-nextick-args": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz",
- "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag=="
- },
"node_modules/prompts": {
"version": "2.4.2",
"resolved": "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz",
@@ -7158,9 +7101,10 @@
}
},
"node_modules/punycode": {
- "version": "2.1.1",
- "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz",
- "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==",
+ "version": "2.3.1",
+ "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz",
+ "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==",
+ "license": "MIT",
"engines": {
"node": ">=6"
}
@@ -7263,25 +7207,6 @@
"integrity": "sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==",
"dev": true
},
- "node_modules/readable-stream": {
- "version": "2.3.7",
- "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz",
- "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==",
- "dependencies": {
- "core-util-is": "~1.0.0",
- "inherits": "~2.0.3",
- "isarray": "~1.0.0",
- "process-nextick-args": "~2.0.0",
- "safe-buffer": "~5.1.1",
- "string_decoder": "~1.1.1",
- "util-deprecate": "~1.0.1"
- }
- },
- "node_modules/readable-stream/node_modules/safe-buffer": {
- "version": "5.1.2",
- "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz",
- "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g=="
- },
"node_modules/redis": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/redis/-/redis-3.1.2.tgz",
@@ -7416,14 +7341,6 @@
"request": "2.*.*"
}
},
- "node_modules/require-at": {
- "version": "1.0.6",
- "resolved": "https://registry.npmjs.org/require-at/-/require-at-1.0.6.tgz",
- "integrity": "sha512-7i1auJbMUrXEAZCOQ0VNJgmcT2VOKPRl2YGJwgpHpC9CE91Mv4/4UYIUm4chGJaI381ZDq1JUicFii64Hapd8g==",
- "engines": {
- "node": ">=4"
- }
- },
"node_modules/require-directory": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz",
@@ -7586,18 +7503,6 @@
"resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
},
- "node_modules/saslprep": {
- "version": "1.0.3",
- "resolved": "https://registry.npmjs.org/saslprep/-/saslprep-1.0.3.tgz",
- "integrity": "sha512-/MY/PEMbk2SuY5sScONwhUDsV2p77Znkb/q3nSVstq/yQzYJOH/Azh29p9oJLsl3LnQwSvZDKagDGBsBwSooag==",
- "optional": true,
- "dependencies": {
- "sparse-bitfield": "^3.0.3"
- },
- "engines": {
- "node": ">=6"
- }
- },
"node_modules/saxes": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/saxes/-/saxes-5.0.1.tgz",
@@ -7757,9 +7662,10 @@
}
},
"node_modules/sift": {
- "version": "16.0.0",
- "resolved": "https://registry.npmjs.org/sift/-/sift-16.0.0.tgz",
- "integrity": "sha512-ILTjdP2Mv9V1kIxWMXeMTIRbOBrqKc4JAXmFMnFq3fKeyQ2Qwa3Dw1ubcye3vR+Y6ofA0b9gNDr/y2t6eUeIzQ=="
+ "version": "17.1.3",
+ "resolved": "https://registry.npmjs.org/sift/-/sift-17.1.3.tgz",
+ "integrity": "sha512-Rtlj66/b0ICeFzYTuNvX/EF1igRbbnGSvEyT79McoZa/DeGhMyC5pWKOEsZKnpkqtSeovd5FL/bjHWC3CIIvCQ==",
+ "license": "MIT"
},
"node_modules/signal-exit": {
"version": "3.0.7",
@@ -7822,28 +7728,6 @@
"node": ">=8"
}
},
- "node_modules/smart-buffer": {
- "version": "4.2.0",
- "resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.2.0.tgz",
- "integrity": "sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==",
- "engines": {
- "node": ">= 6.0.0",
- "npm": ">= 3.0.0"
- }
- },
- "node_modules/socks": {
- "version": "2.7.0",
- "resolved": "https://registry.npmjs.org/socks/-/socks-2.7.0.tgz",
- "integrity": "sha512-scnOe9y4VuiNUULJN72GrM26BNOjVsfPXI+j+98PkyEfsIXroa5ofyjT+FzGvn/xHs73U2JtoBYAVx9Hl4quSA==",
- "dependencies": {
- "ip": "^2.0.0",
- "smart-buffer": "^4.2.0"
- },
- "engines": {
- "node": ">= 10.13.0",
- "npm": ">= 3.0.0"
- }
- },
"node_modules/source-map": {
"version": "0.6.1",
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
@@ -7867,7 +7751,6 @@
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/sparse-bitfield/-/sparse-bitfield-3.0.3.tgz",
"integrity": "sha512-kvzhi7vqKTfkh0PZU+2D2PIllw2ymqJKujUcyPMd9Y75Nv4nPbGJZXNhxsgdQab2BmlDct1YnfQCguEvHr7VsQ==",
- "optional": true,
"dependencies": {
"memory-pager": "^1.0.2"
}
@@ -10551,6 +10434,14 @@
"@jridgewell/sourcemap-codec": "^1.4.10"
}
},
+ "@mongodb-js/saslprep": {
+ "version": "1.2.2",
+ "resolved": "https://registry.npmjs.org/@mongodb-js/saslprep/-/saslprep-1.2.2.tgz",
+ "integrity": "sha512-EB0O3SCSNRUFk66iRCpI+cXzIjdswfCs7F6nOC3RAGJ7xr5YhaicvsRwJ9eyzYvYRlCSDUO/c7g4yNulxKC1WA==",
+ "requires": {
+ "sparse-bitfield": "^3.0.3"
+ }
+ },
"@pagerduty/pdjs": {
"version": "2.2.4",
"resolved": "https://registry.npmjs.org/@pagerduty/pdjs/-/pdjs-2.2.4.tgz",
@@ -10869,15 +10760,6 @@
"@types/node": "*"
}
},
- "@types/mongoose": {
- "version": "5.11.97",
- "resolved": "https://registry.npmjs.org/@types/mongoose/-/mongoose-5.11.97.tgz",
- "integrity": "sha512-cqwOVYT3qXyLiGw7ueU2kX9noE8DPGRY6z8eUxudhXY8NZ7DMKYAxyZkLSevGfhCX3dO/AoX5/SO9lAzfjon0Q==",
- "dev": true,
- "requires": {
- "mongoose": "*"
- }
- },
"@types/node": {
"version": "15.14.9",
"resolved": "https://registry.npmjs.org/@types/node/-/node-15.14.9.tgz",
@@ -10975,16 +10857,15 @@
}
},
"@types/webidl-conversions": {
- "version": "6.1.1",
- "resolved": "https://registry.npmjs.org/@types/webidl-conversions/-/webidl-conversions-6.1.1.tgz",
- "integrity": "sha512-XAahCdThVuCFDQLT7R7Pk/vqeObFNL3YqRyFZg+AqAP/W1/w3xHaIxuW7WszQqTbIBOPRcItYJIou3i/mppu3Q=="
+ "version": "7.0.3",
+ "resolved": "https://registry.npmjs.org/@types/webidl-conversions/-/webidl-conversions-7.0.3.tgz",
+ "integrity": "sha512-CiJJvcRtIgzadHCYXw7dqEnMNRjhGZlYK05Mj9OyktqV8uVT8fD2BFOB7S1uwBE3Kj2Z+4UyPmFw/Ixgw/LAlA=="
},
"@types/whatwg-url": {
- "version": "8.2.2",
- "resolved": "https://registry.npmjs.org/@types/whatwg-url/-/whatwg-url-8.2.2.tgz",
- "integrity": "sha512-FtQu10RWgn3D9U4aazdwIE2yzphmTJREDqNdODHrbrZmmMqI0vMheC/6NE/J1Yveaj8H+ela+YwWTjq5PGmuhA==",
+ "version": "11.0.5",
+ "resolved": "https://registry.npmjs.org/@types/whatwg-url/-/whatwg-url-11.0.5.tgz",
+ "integrity": "sha512-coYR071JRaHa+xoEvvYqvnIHaVqaYrLPbsufM9BF63HkwI5Lgmy2QR8Q5K/lYDYo5AK82wOvSOS0UsLTpTG7uQ==",
"requires": {
- "@types/node": "*",
"@types/webidl-conversions": "*"
}
},
@@ -11319,15 +11200,6 @@
"resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.1.0.tgz",
"integrity": "sha512-4LwHK4nfDOraBCtst+wOWIHbu1vhvAPJK8g8nROd4iuc3PSEjWif/qwbkh8jwCJz6yDBvtU4KPynETgrfh7y3A=="
},
- "bl": {
- "version": "2.2.1",
- "resolved": "https://registry.npmjs.org/bl/-/bl-2.2.1.tgz",
- "integrity": "sha512-6Pesp1w0DEX1N550i/uGV/TqucVL4AM/pgThFSN/Qq9si1/DF9aIHs1BxD8V/QU0HoeHO6cQRTAuYnLPKq1e4g==",
- "requires": {
- "readable-stream": "^2.3.5",
- "safe-buffer": "^5.1.1"
- }
- },
"blakejs": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/blakejs/-/blakejs-1.2.1.tgz",
@@ -11529,12 +11401,9 @@
}
},
"bson": {
- "version": "4.6.5",
- "resolved": "https://registry.npmjs.org/bson/-/bson-4.6.5.tgz",
- "integrity": "sha512-uqrgcjyOaZsHfz7ea8zLRCLe1u+QGUSzMZmvXqO24CDW7DWoW1qiN9folSwa7hSneTSgM2ykDIzF5kcQQ8cwNw==",
- "requires": {
- "buffer": "^5.6.0"
- }
+ "version": "6.10.3",
+ "resolved": "https://registry.npmjs.org/bson/-/bson-6.10.3.tgz",
+ "integrity": "sha512-MTxGsqgYTwfshYWTRdmZRC+M7FnG1b4y7RO7p2k3X24Wq0yv1m77Wsj0BzlPzd/IowgESfsruQCUToa7vbOpPQ=="
},
"buffer": {
"version": "5.7.1",
@@ -11855,11 +11724,6 @@
"integrity": "sha512-JxbCBUdrfr6AQjOXrxoTvAMJO4HBTUIlBzslcJPAz+/KT8yk53fXun51u+RenNYvad/+Vc2DIz5o9UxlCDymFQ==",
"dev": true
},
- "core-util-is": {
- "version": "1.0.3",
- "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz",
- "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ=="
- },
"cors": {
"version": "2.8.5",
"resolved": "https://registry.npmjs.org/cors/-/cors-2.8.5.tgz",
@@ -13193,11 +13057,6 @@
"side-channel": "^1.0.4"
}
},
- "ip": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/ip/-/ip-2.0.0.tgz",
- "integrity": "sha512-WKa+XuLG1A1R0UWhl2+1XQSi+fZWMsYKffMZTTYsiZaUD8k2yDAj5atimTUD2TZkyCkNEeYE5NhFZmupOGtjYQ=="
- },
"ipaddr.js": {
"version": "1.9.1",
"resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz",
@@ -13395,11 +13254,6 @@
"call-bind": "^1.0.2"
}
},
- "isarray": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz",
- "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ=="
- },
"isexe": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
@@ -14158,9 +14012,9 @@
}
},
"kareem": {
- "version": "2.4.1",
- "resolved": "https://registry.npmjs.org/kareem/-/kareem-2.4.1.tgz",
- "integrity": "sha512-aJ9opVoXroQUPfovYP5kaj2lM7Jn02Gw13bL0lg9v0V7SaUc0qavPs0Eue7d2DcC3NjqI6QAUElXNsuZSeM+EA=="
+ "version": "2.6.3",
+ "resolved": "https://registry.npmjs.org/kareem/-/kareem-2.6.3.tgz",
+ "integrity": "sha512-C3iHfuGUXK2u8/ipq9LfjFfXFxAZMQJJq7vLS45r3D9Y2xQ/m4S8zaR4zMLFWh9AsNPXmcFfUDhTEO8UIC/V6Q=="
},
"keccak": {
"version": "3.0.2",
@@ -14340,8 +14194,7 @@
"memory-pager": {
"version": "1.5.0",
"resolved": "https://registry.npmjs.org/memory-pager/-/memory-pager-1.5.0.tgz",
- "integrity": "sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg==",
- "optional": true
+ "integrity": "sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg=="
},
"merge-descriptors": {
"version": "1.0.1",
@@ -14481,40 +14334,30 @@
"requires": {}
},
"mongodb": {
- "version": "3.7.3",
- "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-3.7.3.tgz",
- "integrity": "sha512-Psm+g3/wHXhjBEktkxXsFMZvd3nemI0r3IPsE0bU+4//PnvNWKkzhZcEsbPcYiWqe8XqXJJEg4Tgtr7Raw67Yw==",
- "requires": {
- "bl": "^2.2.1",
- "bson": "^1.1.4",
- "denque": "^1.4.1",
- "optional-require": "^1.1.8",
- "safe-buffer": "^5.1.2",
- "saslprep": "^1.0.0"
- },
- "dependencies": {
- "bson": {
- "version": "1.1.6",
- "resolved": "https://registry.npmjs.org/bson/-/bson-1.1.6.tgz",
- "integrity": "sha512-EvVNVeGo4tHxwi8L6bPj3y3itEvStdwvvlojVxxbyYfoaxJ6keLgrTuKdyfEAszFK+H3olzBuafE0yoh0D1gdg=="
- }
+ "version": "6.15.0",
+ "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-6.15.0.tgz",
+ "integrity": "sha512-ifBhQ0rRzHDzqp9jAQP6OwHSH7dbYIQjD3SbJs9YYk9AikKEettW/9s/tbSFDTpXcRbF+u1aLrhHxDFaYtZpFQ==",
+ "requires": {
+ "@mongodb-js/saslprep": "^1.1.9",
+ "bson": "^6.10.3",
+ "mongodb-connection-string-url": "^3.0.0"
}
},
"mongodb-connection-string-url": {
- "version": "2.5.3",
- "resolved": "https://registry.npmjs.org/mongodb-connection-string-url/-/mongodb-connection-string-url-2.5.3.tgz",
- "integrity": "sha512-f+/WsED+xF4B74l3k9V/XkTVj5/fxFH2o5ToKXd8Iyi5UhM+sO9u0Ape17Mvl/GkZaFtM0HQnzAG5OTmhKw+tQ==",
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/mongodb-connection-string-url/-/mongodb-connection-string-url-3.0.2.tgz",
+ "integrity": "sha512-rMO7CGo/9BFwyZABcKAWL8UJwH/Kc2x0g72uhDWzG48URRax5TCIcJ7Rc3RZqffZzO/Gwff/jyKwCU9TN8gehA==",
"requires": {
- "@types/whatwg-url": "^8.2.1",
- "whatwg-url": "^11.0.0"
+ "@types/whatwg-url": "^11.0.2",
+ "whatwg-url": "^14.1.0 || ^13.0.0"
},
"dependencies": {
"tr46": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/tr46/-/tr46-3.0.0.tgz",
- "integrity": "sha512-l7FvfAHlcmulp8kr+flpQZmVwtu7nfRV7NZujtN0OqES8EL4O4e0qqzL0DC5gAvx/ZC/9lk6rhcUwYvkBnBnYA==",
+ "version": "5.1.1",
+ "resolved": "https://registry.npmjs.org/tr46/-/tr46-5.1.1.tgz",
+ "integrity": "sha512-hdF5ZgjTqgAntKkklYw0R03MG2x/bSzTtkxmIRw/sTNV8YXsCJ1tfLAX23lhxhHJlEf3CRCOCGGWw3vI3GaSPw==",
"requires": {
- "punycode": "^2.1.1"
+ "punycode": "^2.3.1"
}
},
"webidl-conversions": {
@@ -14523,47 +14366,30 @@
"integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g=="
},
"whatwg-url": {
- "version": "11.0.0",
- "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-11.0.0.tgz",
- "integrity": "sha512-RKT8HExMpoYx4igMiVMY83lN6UeITKJlBQ+vR/8ZJ8OCdSiN3RwCq+9gH0+Xzj0+5IrM6i4j/6LuvzbZIQgEcQ==",
+ "version": "14.2.0",
+ "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-14.2.0.tgz",
+ "integrity": "sha512-De72GdQZzNTUBBChsXueQUnPKDkg/5A5zp7pFDuQAj5UFoENpiACU0wlCvzpAGnTkj++ihpKwKyYewn/XNUbKw==",
"requires": {
- "tr46": "^3.0.0",
+ "tr46": "^5.1.0",
"webidl-conversions": "^7.0.0"
}
}
}
},
"mongoose": {
- "version": "6.5.2",
- "resolved": "https://registry.npmjs.org/mongoose/-/mongoose-6.5.2.tgz",
- "integrity": "sha512-3CFDrSLtK2qjM1pZeZpLTUyqPRkc11Iuh74ZrwS4IwEJ3K2PqGnmyPLw7ex4Kzu37ujIMp3MAuiBlUjfrcb6hw==",
+ "version": "8.13.2",
+ "resolved": "https://registry.npmjs.org/mongoose/-/mongoose-8.13.2.tgz",
+ "integrity": "sha512-riCBqZmNkYBWjXpM3qWLDQw7QmTKsVZDPhLXFJqC87+OjocEVpvS3dA2BPPUiLAu+m0/QmEj5pSXKhH+/DgerQ==",
"requires": {
- "bson": "^4.6.5",
- "kareem": "2.4.1",
- "mongodb": "4.8.1",
+ "bson": "^6.10.3",
+ "kareem": "2.6.3",
+ "mongodb": "~6.15.0",
"mpath": "0.9.0",
- "mquery": "4.0.3",
+ "mquery": "5.0.0",
"ms": "2.1.3",
- "sift": "16.0.0"
+ "sift": "17.1.3"
},
"dependencies": {
- "denque": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/denque/-/denque-2.1.0.tgz",
- "integrity": "sha512-HVQE3AAb/pxF8fQAoiqpvg9i3evqug3hoiwakOyZAwJm+6vZehbkYXZ0l4JxS+I3QxM97v5aaRNhj8v5oBhekw=="
- },
- "mongodb": {
- "version": "4.8.1",
- "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-4.8.1.tgz",
- "integrity": "sha512-/NyiM3Ox9AwP5zrfT9TXjRKDJbXlLaUDQ9Rg//2lbg8D2A8GXV0VidYYnA/gfdK6uwbnL4FnAflH7FbGw3TS7w==",
- "requires": {
- "bson": "^4.6.5",
- "denque": "^2.0.1",
- "mongodb-connection-string-url": "^2.5.2",
- "saslprep": "^1.0.3",
- "socks": "^2.6.2"
- }
- },
"ms": {
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
@@ -14577,25 +14403,25 @@
"integrity": "sha512-ikJRQTk8hw5DEoFVxHG1Gn9T/xcjtdnOKIU1JTmGjZZlg9LST2mBLmcX3/ICIbgJydT2GOc15RnNy5mHmzfSew=="
},
"mquery": {
- "version": "4.0.3",
- "resolved": "https://registry.npmjs.org/mquery/-/mquery-4.0.3.tgz",
- "integrity": "sha512-J5heI+P08I6VJ2Ky3+33IpCdAvlYGTSUjwTPxkAr8i8EoduPMBX2OY/wa3IKZIQl7MU4SbFk8ndgSKyB/cl1zA==",
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/mquery/-/mquery-5.0.0.tgz",
+ "integrity": "sha512-iQMncpmEK8R8ncT8HJGsGc9Dsp8xcgYMVSbs5jgnm1lFHTZqMJTUWTDx1LBO8+mK3tPNZWFLBghQEIOULSTHZg==",
"requires": {
"debug": "4.x"
},
"dependencies": {
"debug": {
- "version": "4.3.4",
- "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz",
- "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==",
+ "version": "4.4.0",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz",
+ "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==",
"requires": {
- "ms": "2.1.2"
+ "ms": "^2.1.3"
}
},
"ms": {
- "version": "2.1.2",
- "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
- "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
+ "version": "2.1.3",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
+ "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="
}
}
},
@@ -14833,14 +14659,6 @@
"requestretry": "^7.0.0"
}
},
- "optional-require": {
- "version": "1.1.8",
- "resolved": "https://registry.npmjs.org/optional-require/-/optional-require-1.1.8.tgz",
- "integrity": "sha512-jq83qaUb0wNg9Krv1c5OQ+58EK+vHde6aBPzLvPPqJm89UQWsvSuFy9X/OSNJnFeSOKo7btE0n8Nl2+nE+z5nA==",
- "requires": {
- "require-at": "^1.0.6"
- }
- },
"optionator": {
"version": "0.8.3",
"resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz",
@@ -15075,11 +14893,6 @@
"resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz",
"integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A=="
},
- "process-nextick-args": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz",
- "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag=="
- },
"prompts": {
"version": "2.4.2",
"resolved": "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz",
@@ -15127,9 +14940,9 @@
}
},
"punycode": {
- "version": "2.1.1",
- "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz",
- "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A=="
+ "version": "2.3.1",
+ "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz",
+ "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg=="
},
"q": {
"version": "2.0.3",
@@ -15208,27 +15021,6 @@
"integrity": "sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==",
"dev": true
},
- "readable-stream": {
- "version": "2.3.7",
- "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz",
- "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==",
- "requires": {
- "core-util-is": "~1.0.0",
- "inherits": "~2.0.3",
- "isarray": "~1.0.0",
- "process-nextick-args": "~2.0.0",
- "safe-buffer": "~5.1.1",
- "string_decoder": "~1.1.1",
- "util-deprecate": "~1.0.1"
- },
- "dependencies": {
- "safe-buffer": {
- "version": "5.1.2",
- "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz",
- "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g=="
- }
- }
- },
"redis": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/redis/-/redis-3.1.2.tgz",
@@ -15330,11 +15122,6 @@
"lodash": "^4.17.15"
}
},
- "require-at": {
- "version": "1.0.6",
- "resolved": "https://registry.npmjs.org/require-at/-/require-at-1.0.6.tgz",
- "integrity": "sha512-7i1auJbMUrXEAZCOQ0VNJgmcT2VOKPRl2YGJwgpHpC9CE91Mv4/4UYIUm4chGJaI381ZDq1JUicFii64Hapd8g=="
- },
"require-directory": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz",
@@ -15451,15 +15238,6 @@
"resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
},
- "saslprep": {
- "version": "1.0.3",
- "resolved": "https://registry.npmjs.org/saslprep/-/saslprep-1.0.3.tgz",
- "integrity": "sha512-/MY/PEMbk2SuY5sScONwhUDsV2p77Znkb/q3nSVstq/yQzYJOH/Azh29p9oJLsl3LnQwSvZDKagDGBsBwSooag==",
- "optional": true,
- "requires": {
- "sparse-bitfield": "^3.0.3"
- }
- },
"saxes": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/saxes/-/saxes-5.0.1.tgz",
@@ -15590,9 +15368,9 @@
}
},
"sift": {
- "version": "16.0.0",
- "resolved": "https://registry.npmjs.org/sift/-/sift-16.0.0.tgz",
- "integrity": "sha512-ILTjdP2Mv9V1kIxWMXeMTIRbOBrqKc4JAXmFMnFq3fKeyQ2Qwa3Dw1ubcye3vR+Y6ofA0b9gNDr/y2t6eUeIzQ=="
+ "version": "17.1.3",
+ "resolved": "https://registry.npmjs.org/sift/-/sift-17.1.3.tgz",
+ "integrity": "sha512-Rtlj66/b0ICeFzYTuNvX/EF1igRbbnGSvEyT79McoZa/DeGhMyC5pWKOEsZKnpkqtSeovd5FL/bjHWC3CIIvCQ=="
},
"signal-exit": {
"version": "3.0.7",
@@ -15637,20 +15415,6 @@
"integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==",
"dev": true
},
- "smart-buffer": {
- "version": "4.2.0",
- "resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.2.0.tgz",
- "integrity": "sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg=="
- },
- "socks": {
- "version": "2.7.0",
- "resolved": "https://registry.npmjs.org/socks/-/socks-2.7.0.tgz",
- "integrity": "sha512-scnOe9y4VuiNUULJN72GrM26BNOjVsfPXI+j+98PkyEfsIXroa5ofyjT+FzGvn/xHs73U2JtoBYAVx9Hl4quSA==",
- "requires": {
- "ip": "^2.0.0",
- "smart-buffer": "^4.2.0"
- }
- },
"source-map": {
"version": "0.6.1",
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
@@ -15671,7 +15435,6 @@
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/sparse-bitfield/-/sparse-bitfield-3.0.3.tgz",
"integrity": "sha512-kvzhi7vqKTfkh0PZU+2D2PIllw2ymqJKujUcyPMd9Y75Nv4nPbGJZXNhxsgdQab2BmlDct1YnfQCguEvHr7VsQ==",
- "optional": true,
"requires": {
"memory-pager": "^1.0.2"
}
diff --git a/api/package.json b/api/package.json
index cc0b5eb6..c390bcd0 100644
--- a/api/package.json
+++ b/api/package.json
@@ -15,7 +15,6 @@
"@types/express": "^4.17.12",
"@types/jest": "^27.0.2",
"@types/mongodb": "^3.6.17",
- "@types/mongoose": "^5.11.97",
"@types/node": "^15.12.2",
"@types/redis": "^2.8.29",
"@types/supertest": "^2.0.11",
@@ -35,8 +34,8 @@
"dotenv": "^10.0.0",
"express": "^4.17.1",
"mockingoose": "^2.15.2",
- "mongodb": "^3.6.9",
- "mongoose": "^6.4.6",
+ "mongodb": "^6.15.0",
+ "mongoose": "^8.13.2",
"nodemailer": "^6.7.5",
"opsgenie-sdk": "^0.5.1",
"redis": "^3.1.2",
diff --git a/api/run_server.sh b/api/run_server.sh
index ad73dd72..68e8b9cb 100755
--- a/api/run_server.sh
+++ b/api/run_server.sh
@@ -1,2 +1,2 @@
#!/bin/bash
-node src/server.js
\ No newline at end of file
+node --trace-deprecation src/server.js
\ No newline at end of file
diff --git a/api/src/server.ts b/api/src/server.ts
index 498f7ae8..2ecda227 100644
--- a/api/src/server.ts
+++ b/api/src/server.ts
@@ -1,180 +1,197 @@
import * as dotenv from "dotenv";
-import {readFile} from "./server/files";
+import { readFile } from "./server/files";
import path from "path";
import https from "https";
import {
- AlertKeysDockerHubRepo,
- AlertKeysGitHubRepo,
- AlertKeysNode,
- AlertKeysSystem,
- AlertsOverviewAlertData,
- AlertsOverviewInput,
- AlertsOverviewResult,
- GitHubKeys,
- HttpsOptions,
- isAlertsOverviewInputValid,
- isRedisMetricsInputValid,
- MetricsResult,
- MonitorablesInfoResult,
- RedisHashes,
- RedisKeys,
- RedisMetricsInput,
- SystemKeys
+ AlertKeysDockerHubRepo,
+ AlertKeysGitHubRepo,
+ AlertKeysNode,
+ AlertKeysSystem,
+ AlertsOverviewAlertData,
+ AlertsOverviewInput,
+ AlertsOverviewResult,
+ GitHubKeys,
+ HttpsOptions,
+ isAlertsOverviewInputValid,
+ isRedisMetricsInputValid,
+ MetricsResult,
+ MonitorablesInfoResult,
+ RedisHashes,
+ RedisKeys,
+ RedisMetricsInput,
+ SystemKeys,
} from "./server/types";
import {
- CouldNotRetrieveDataFromMongo,
- CouldNotRetrieveDataFromRedis,
- EnvVariablesNotAvailable,
- InvalidBaseChains,
- InvalidEndpoint,
- InvalidJsonSchema,
- InvalidParameterValue,
- InvalidValueRetrievedFromRedis,
- MissingKeysInBody,
- MongoClientNotInitialised,
- RedisClientNotInitialised
-} from './constant/errors'
+ CouldNotRetrieveDataFromMongo,
+ CouldNotRetrieveDataFromRedis,
+ EnvVariablesNotAvailable,
+ InvalidBaseChains,
+ InvalidEndpoint,
+ InvalidJsonSchema,
+ InvalidParameterValue,
+ InvalidValueRetrievedFromRedis,
+ MissingKeysInBody,
+ MongoClientNotInitialised,
+ RedisClientNotInitialised,
+} from "./constant/errors";
import {
- allElementsInList,
- allElementsInListHaveTypeString,
- errorJson,
- fulfillWithTimeLimit,
- getElementsNotInList,
- missingValues,
- resultJson,
- toBool,
- verifyNodeExporterPing,
- verifyPrometheusPing,
+ allElementsInList,
+ allElementsInListHaveTypeString,
+ errorJson,
+ fulfillWithTimeLimit,
+ getElementsNotInList,
+ missingValues,
+ resultJson,
+ toBool,
+ verifyNodeExporterPing,
+ verifyPrometheusPing,
} from "./server/utils";
import express from "express";
import cors from "cors";
import cookieParser from "cookie-parser";
import {
- addPostfixToKeys,
- addPrefixToKeys,
- alertKeysChainSourced,
- alertKeysChainSourcedWithUniqueIdentifier,
- alertKeysClContractPrefix,
- alertKeysClNodePrefix,
- alertKeysCosmosNodePrefix,
- alertKeysDockerHubPrefix,
- alertKeysEvmNodePrefix,
- alertKeysGitHubPrefix,
- alertKeysSubstrateNodePrefix,
- alertKeysSystemPrefix,
- getAlertKeysDockerHubRepo,
- getAlertKeysGitHubRepo,
- getAlertKeysNode,
- getAlertKeysSystem,
- getGitHubKeys,
- getRedisHashes,
- getSystemKeys,
- RedisInterface
-} from "./server/redis"
-import {MongoInterface, MonitorablesCollection} from "./server/mongo";
-import {MongoClientOptions} from "mongodb";
-import {GenericRoute} from "./v1/route/GenericRoute";
-import {baseChains, PingStatus, Severities, Status, testAlertMessage, Timeout} from "./constant/server";
-import {TimeoutError} from "./constant/server.feedback";
-import {MongoConnect} from "./v1/service/MongoConnect";
-import {event} from '@pagerduty/pdjs';
-import {ConfigRoute} from "./v1/route/ConfigRoute";
-import {InstallationRoute} from "./v1/route/InstallationRoute";
-import {BaseChainRoute} from "./v1/route/BaseChainRoute";
-import {ChannelRoute} from "./v1/route/ChannelRoute";
-import {GenericModel} from "./v1/entity/model/GenericModel";
-import {SeverityAlertSubconfigModel} from "./v1/entity/model/SeverityAlertSubconfigSchema";
-import {BaseChainModel} from "./v1/entity/model/BaseChainModel";
-import {Model} from "mongoose";
-import {MongooseUtil} from "./util/MongooseUtil";
-import {ThresholdAlertSubconfigModel} from "./v1/entity/model/ThresholdAlertSubconfigSchema";
-import {TimeWindowAlertSubconfigModel} from "./v1/entity/model/TimeWindowAlertSubconfigSchema";
-
-const axios = require('axios');
-const opsgenie = require('opsgenie-sdk');
-const twilio = require('twilio');
-const {WebClient} = require('@slack/web-api');
-const Web3 = require('web3');
-const nodemailer = require('nodemailer');
-const swaggerUi = require('swagger-ui-express');
-const swaggerDocument = require('./swagger.json');
+ addPostfixToKeys,
+ addPrefixToKeys,
+ alertKeysChainSourced,
+ alertKeysChainSourcedWithUniqueIdentifier,
+ alertKeysClContractPrefix,
+ alertKeysClNodePrefix,
+ alertKeysCosmosNodePrefix,
+ alertKeysDockerHubPrefix,
+ alertKeysEvmNodePrefix,
+ alertKeysGitHubPrefix,
+ alertKeysSubstrateNodePrefix,
+ alertKeysSystemPrefix,
+ getAlertKeysDockerHubRepo,
+ getAlertKeysGitHubRepo,
+ getAlertKeysNode,
+ getAlertKeysSystem,
+ getGitHubKeys,
+ getRedisHashes,
+ getSystemKeys,
+ RedisInterface,
+} from "./server/redis";
+import { MongoInterface, MonitorablesCollection } from "./server/mongo";
+import { MongoClientOptions } from "mongodb";
+import { GenericRoute } from "./v1/route/GenericRoute";
+import {
+ baseChains,
+ PingStatus,
+ Severities,
+ Status,
+ testAlertMessage,
+ Timeout,
+} from "./constant/server";
+import { TimeoutError } from "./constant/server.feedback";
+import { MongoConnect } from "./v1/service/MongoConnect";
+import { event } from "@pagerduty/pdjs";
+import { ConfigRoute } from "./v1/route/ConfigRoute";
+import { InstallationRoute } from "./v1/route/InstallationRoute";
+import { BaseChainRoute } from "./v1/route/BaseChainRoute";
+import { ChannelRoute } from "./v1/route/ChannelRoute";
+import { GenericModel } from "./v1/entity/model/GenericModel";
+import { SeverityAlertSubconfigModel } from "./v1/entity/model/SeverityAlertSubconfigSchema";
+import { BaseChainModel } from "./v1/entity/model/BaseChainModel";
+import { Model } from "mongoose";
+import { MongooseUtil } from "./util/MongooseUtil";
+import { ThresholdAlertSubconfigModel } from "./v1/entity/model/ThresholdAlertSubconfigSchema";
+import { TimeWindowAlertSubconfigModel } from "./v1/entity/model/TimeWindowAlertSubconfigSchema";
+
+const axios = require("axios");
+const opsgenie = require("opsgenie-sdk");
+const twilio = require("twilio");
+const { WebClient } = require("@slack/web-api");
+const Web3 = require("web3");
+const nodemailer = require("nodemailer");
+const swaggerUi = require("swagger-ui-express");
+const swaggerDocument = require("./swagger.json");
// Use the environmental variables from the .env file
dotenv.config();
// Import certificate files
-const httpsKey: Buffer = readFile(path.join(__dirname, '../../', 'certificates',
- 'key.pem'));
-const httpsCert: Buffer = readFile(path.join(__dirname, '../../',
- 'certificates', 'cert.pem'));
+const httpsKey: Buffer = readFile(
+ path.join(__dirname, "../../", "certificates", "key.pem")
+);
+const httpsCert: Buffer = readFile(
+ path.join(__dirname, "../../", "certificates", "cert.pem")
+);
const httpsOptions: HttpsOptions = {
- key: httpsKey,
- cert: httpsCert,
+ key: httpsKey,
+ cert: httpsCert,
};
// Server configuration
const app = express();
-app.disable('x-powered-by');
+app.disable("x-powered-by");
app.use(express.json());
-app.use(express.static(path.join(__dirname, '../', 'build')));
+app.use(express.static(path.join(__dirname, "../", "build")));
app.use(cookieParser());
-app.use((err: any, req: express.Request, res: express.Response,
- next: express.NextFunction) => {
+app.use(
+ (
+ err: any,
+ req: express.Request,
+ res: express.Response,
+ next: express.NextFunction
+ ) => {
// This check makes sure this is a JSON parsing issue, but it might be
// coming from any middleware, not just body-parser.
- if (err instanceof SyntaxError && 'body' in err) {
- console.error(err);
- return res.sendStatus(Status.ERROR); // Bad request
+ if (err instanceof SyntaxError && "body" in err) {
+ console.error(err);
+ return res.sendStatus(Status.ERROR); // Bad request
}
next();
-});
+ }
+);
//timeout
-app.use((req: express.Request, res: express.Response,
- next: express.NextFunction): void => {
-
+app.use(
+ (
+ req: express.Request,
+ res: express.Response,
+ next: express.NextFunction
+ ): void => {
let timeout = Timeout.MAX;
if (req.query && req.query.timeout) {
- timeout = parseInt(req.query.timeout as string) * 1000;
- if (timeout < Timeout.MIN) {
- timeout = Timeout.MIN;
- }
+ timeout = parseInt(req.query.timeout as string) * 1000;
+ if (timeout < Timeout.MIN) {
+ timeout = Timeout.MIN;
+ }
}
res.setTimeout(timeout, () => {
- const error = new TimeoutError();
- next(error);
+ const error = new TimeoutError();
+ next(error);
});
next();
-});
+ }
+);
-app.use('/api-docs', swaggerUi.serve, swaggerUi.setup(swaggerDocument));
+app.use("/api-docs", swaggerUi.serve, swaggerUi.setup(swaggerDocument));
const allowedOrigins: string[] = [];
const is_dev_mode = process.env.DEV_MODE && toBool(process.env.DEV_MODE);
if (!process.env.UI_ACCESS_IP && !is_dev_mode) {
- console.error('UI_ACCESS_IP environmental variable not specified,' +
- ' stopping API.');
- process.exit(1);
+ console.error(
+ "UI_ACCESS_IP environmental variable not specified," + " stopping API."
+ );
+ process.exit(1);
}
-const UI_PORT: string = process.env.UI_DASHBOARD_PORT ?
- process.env.UI_DASHBOARD_PORT : '3333';
+const UI_PORT: string = process.env.UI_DASHBOARD_PORT
+ ? process.env.UI_DASHBOARD_PORT
+ : "3333";
-allowedOrigins.push(
- `https://${process.env.UI_ACCESS_IP}:${UI_PORT}`);
+allowedOrigins.push(`https://${process.env.UI_ACCESS_IP}:${UI_PORT}`);
if (is_dev_mode) {
- console.log('NOTE - Accepting connections from UI Dev Server.')
- allowedOrigins.push(
- `http://localhost:${UI_PORT}`);
- allowedOrigins.push(
- `https://localhost:${UI_PORT}`);
-
+ console.log("NOTE - Accepting connections from UI Dev Server.");
+ allowedOrigins.push(`http://localhost:${UI_PORT}`);
+ allowedOrigins.push(`https://localhost:${UI_PORT}`);
}
-app.use(cors({origin: allowedOrigins}));
+app.use(cors({ origin: allowedOrigins }));
// Connect with Redis
const redisHost = process.env.REDIS_IP || "localhost";
@@ -187,7 +204,7 @@ redisInterface.connect();
// Check the redis connection every 3 seconds. If the connection was dropped,
// re-connect.
const redisInterval = setInterval(() => {
- redisInterface.connect();
+ redisInterface.connect();
}, 3000);
// Connect with Mongo
@@ -195,18 +212,18 @@ const mongoHost = process.env.DB_IP || "localhost";
const mongoPort = parseInt(process.env.DB_PORT || "27017");
const mongoDB = process.env.DB_NAME || "panicdb";
const mongoOptions: MongoClientOptions = {
- useNewUrlParser: true,
- useUnifiedTopology: true,
- socketTimeoutMS: 10000,
- connectTimeoutMS: 10000,
- serverSelectionTimeoutMS: 5000,
+ socketTimeoutMS: 10000,
+ connectTimeoutMS: 10000,
+ serverSelectionTimeoutMS: 5000,
+ replicaSet: process.env.REPLICA_SET_NAME || "rs1",
+ readPreference: "primaryPreferred",
};
const mongoInterface = new MongoInterface(mongoOptions, mongoHost, mongoPort);
// Check the mongo connection every 3 seconds. If the connection was dropped,
// re-connect.
const mongoInterval = setInterval(async () => {
- await mongoInterface.connect();
+ await mongoInterface.connect();
}, 3000);
MongoConnect.start().then();
@@ -215,15 +232,15 @@ MongoConnect.start().then();
// Please note that `SeverityAlertSubconfigModel` shares the `generics` collection with `GenericModel`.
// For this reason in the logic below, `severity_alerts` is dependent on `generics`.
const modelsAndFiles: Array<[Model, string]> = [
- [GenericModel, 'generics'],
- [SeverityAlertSubconfigModel, 'severity_alerts'],
- [ThresholdAlertSubconfigModel, 'threshold_alert'],
- [TimeWindowAlertSubconfigModel, 'time_window_alert'],
- [BaseChainModel, 'base_chains']
+ [GenericModel, "generics"],
+ [SeverityAlertSubconfigModel, "severity_alerts"],
+ [ThresholdAlertSubconfigModel, "threshold_alert"],
+ [TimeWindowAlertSubconfigModel, "time_window_alert"],
+ [BaseChainModel, "base_chains"],
];
modelsAndFiles.forEach(async ([model, file]) => {
- await MongooseUtil.populateModel(model, file);
+ await MongooseUtil.populateModel(model, file);
});
// Routes
@@ -237,1474 +254,1576 @@ new ChannelRoute(app);
// This endpoint expects a list of base chains (cosmos, substrate, chainlink or
// general) inside the body structure.
-app.post('/server/mongo/monitorablesInfo',
- async (req: express.Request, res: express.Response) => {
- console.log('Received POST request for %s %s', req.url, req.body);
- const baseChainsInput = req.body['baseChains'];
-
- // Check if some required keys are missing in the body object, if yes
- // notify the client.
- const missingKeysList: string[] = missingValues({
- baseChains: baseChainsInput
- });
- if (missingKeysList.length !== 0) {
- const err = new MissingKeysInBody(...missingKeysList);
- res.status(err.code).send(errorJson(err.message));
- return;
- }
- // Check if the passed base chains are valid
- if (Array.isArray(baseChainsInput)) {
- if (!allElementsInList(baseChainsInput, baseChains)) {
- const invalidBaseChains: string[] = getElementsNotInList(
- baseChainsInput, baseChains);
- const err = new InvalidBaseChains(...invalidBaseChains);
- res.status(err.code).send(errorJson(err.message));
- return;
- }
- } else {
- const invalidBaseChains: any[] = getElementsNotInList(
- [baseChainsInput], baseChains);
- const err = new InvalidBaseChains(...invalidBaseChains);
- res.status(err.code).send(errorJson(err.message));
- return;
- }
- let result: MonitorablesInfoResult = resultJson({});
+app.post(
+ "/server/mongo/monitorablesInfo",
+ async (req: express.Request, res: express.Response) => {
+ console.log("Received POST request for %s %s", req.url, req.body);
+ const baseChainsInput = req.body["baseChains"];
- for (const [, baseChain] of Object.entries(baseChainsInput)) {
- result.result[baseChain] = {};
- }
+ // Check if some required keys are missing in the body object, if yes
+ // notify the client.
+ const missingKeysList: string[] = missingValues({
+ baseChains: baseChainsInput,
+ });
+ if (missingKeysList.length !== 0) {
+ const err = new MissingKeysInBody(...missingKeysList);
+ res.status(err.code).send(errorJson(err.message));
+ return;
+ }
+ // Check if the passed base chains are valid
+ if (Array.isArray(baseChainsInput)) {
+ if (!allElementsInList(baseChainsInput, baseChains)) {
+ const invalidBaseChains: string[] = getElementsNotInList(
+ baseChainsInput,
+ baseChains
+ );
+ const err = new InvalidBaseChains(...invalidBaseChains);
+ res.status(err.code).send(errorJson(err.message));
+ return;
+ }
+ } else {
+ const invalidBaseChains: any[] = getElementsNotInList(
+ [baseChainsInput],
+ baseChains
+ );
+ const err = new InvalidBaseChains(...invalidBaseChains);
+ res.status(err.code).send(errorJson(err.message));
+ return;
+ }
+ let result: MonitorablesInfoResult = resultJson({});
+
+ for (const [, baseChain] of Object.entries(baseChainsInput)) {
+ result.result[baseChain] = {};
+ }
- if (mongoInterface.client) {
- try {
- const db = mongoInterface.client.db(mongoDB);
- if (baseChainsInput.length > 0) {
- const collection = db.collection(MonitorablesCollection);
- const query = {_id: {$in: baseChainsInput}};
- const docs = await collection.find(query).toArray();
- for (const doc of docs) {
- const baseChainData: any = result.result[doc._id];
- delete doc._id;
- for (const parentID in doc) {
- const chain = doc[parentID];
- const chainName = chain.chain_name;
- baseChainData[chainName] = {
- parent_id: parentID,
- monitored: {}
- };
- delete chain.chain_name;
- for (const sourceType in chain) {
- const monitored = baseChainData[chainName].monitored;
- const chainSource = chain[sourceType];
- monitored[sourceType] = [];
- for (const sourceID in chain[sourceType]) {
- monitored[sourceType].push({
- [sourceID]: chainSource[sourceID].name
- });
- }
- }
- }
- }
+ if (mongoInterface.client) {
+ try {
+ const db = mongoInterface.client.db(mongoDB);
+ if (baseChainsInput.length > 0) {
+ const collection = db.collection(MonitorablesCollection);
+ const query = { _id: { $in: baseChainsInput } };
+ const docs = await collection.find(query).toArray();
+ for (const doc of docs) {
+ const baseChainData: any = result.result[doc._id.toString()];
+ delete doc._id;
+ for (const parentID in doc) {
+ const chain = doc[parentID];
+ const chainName = chain.chain_name;
+ baseChainData[chainName] = {
+ parent_id: parentID,
+ monitored: {},
+ };
+ delete chain.chain_name;
+ for (const sourceType in chain) {
+ const monitored = baseChainData[chainName].monitored;
+ const chainSource = chain[sourceType];
+ monitored[sourceType] = [];
+ for (const sourceID in chain[sourceType]) {
+ monitored[sourceType].push({
+ [sourceID]: chainSource[sourceID].name,
+ });
}
- res.status(Status.SUCCESS).send(result);
- return;
- } catch (err) {
- console.error(err);
- const retrievalErr = new CouldNotRetrieveDataFromMongo();
- res.status(retrievalErr.code).send(errorJson(
- retrievalErr.message));
- return;
+ }
}
- } else {
- // This is done just for the sake of completion, as it is very
- // unlikely to occur.
- const err = new MongoClientNotInitialised();
- res.status(err.code).send(errorJson(err.message));
- return;
+ }
}
+ res.status(Status.SUCCESS).send(result);
+ return;
+ } catch (err) {
+ console.error(err);
+ const retrievalErr = new CouldNotRetrieveDataFromMongo();
+ res.status(retrievalErr.code).send(errorJson(retrievalErr.message));
+ return;
+ }
+ } else {
+ // This is done just for the sake of completion, as it is very
+ // unlikely to occur.
+ const err = new MongoClientNotInitialised();
+ res.status(err.code).send(errorJson(err.message));
+ return;
+ }
+ }
+);
+
+app.post(
+ "/server/mongo/alerts",
+ async (req: express.Request, res: express.Response) => {
+ console.log("Received POST request for %s", req.url);
+ const {
+ chains,
+ severities,
+ sources,
+ minTimestamp,
+ maxTimestamp,
+ noOfAlerts,
+ } = req.body;
+
+ // Check that all parameters have been sent
+ const missingKeysList: string[] = missingValues({
+ chains,
+ severities,
+ sources,
+ minTimestamp,
+ maxTimestamp,
+ noOfAlerts,
});
+ if (missingKeysList.length !== 0) {
+ const err = new MissingKeysInBody(...missingKeysList);
+ res.status(err.code).send(errorJson(err.message));
+ return;
+ }
-app.post('/server/mongo/alerts',
- async (req: express.Request, res: express.Response) => {
- console.log('Received POST request for %s', req.url);
- const {
- chains,
- severities,
- sources,
- minTimestamp,
- maxTimestamp,
- noOfAlerts
- } = req.body;
-
- // Check that all parameters have been sent
- const missingKeysList: string[] = missingValues({
- chains,
- severities,
- sources,
- minTimestamp,
- maxTimestamp,
- noOfAlerts
- });
- if (missingKeysList.length !== 0) {
- const err = new MissingKeysInBody(...missingKeysList);
- res.status(err.code).send(errorJson(err.message));
- return;
- }
+ // --------------------- Input Validation -------------------
- // --------------------- Input Validation -------------------
+ const arrayBasedStringParams = { chains, severities, sources };
+ for (const [param, value] of Object.entries(arrayBasedStringParams)) {
+ if (!Array.isArray(value) || !allElementsInListHaveTypeString(value)) {
+ const err = new InvalidParameterValue(`req.body.${param}`);
+ res.status(err.code).send(errorJson(err.message));
+ return;
+ }
+ }
- const arrayBasedStringParams = {chains, severities, sources};
- for (const [param, value] of Object.entries(arrayBasedStringParams)) {
- if (!Array.isArray(value) ||
- !allElementsInListHaveTypeString(value)) {
- const err = new InvalidParameterValue(
- `req.body.${param}`);
- res.status(err.code).send(errorJson(err.message));
- return;
- }
- }
+ for (const severity of severities) {
+ if (!(severity in Severities)) {
+ const err = new InvalidParameterValue("req.body.severities");
+ res.status(err.code).send(errorJson(err.message));
+ return;
+ }
+ }
- for (const severity of severities) {
- if (!(severity in Severities)) {
- const err = new InvalidParameterValue(
- 'req.body.severities');
- res.status(err.code).send(errorJson(err.message));
- return;
- }
- }
+ const positiveFloats = { minTimestamp, maxTimestamp };
+ for (const [param, value] of Object.entries(positiveFloats)) {
+ const parsedFloat = parseFloat(value);
+ if (isNaN(parsedFloat) || parsedFloat < 0) {
+ const err = new InvalidParameterValue(`req.body.${param}`);
+ res.status(err.code).send(errorJson(err.message));
+ return;
+ }
+ }
+ const parsedMinTimestamp = parseFloat(minTimestamp);
+ const parsedMaxTimestamp = parseFloat(maxTimestamp);
+
+ const parsedNoOfAlerts = parseInt(noOfAlerts);
+ if (isNaN(parsedNoOfAlerts) || parsedNoOfAlerts <= 0) {
+ const err = new InvalidParameterValue("req.body.noOfAlerts");
+ res.status(err.code).send(errorJson(err.message));
+ return;
+ }
- const positiveFloats = {minTimestamp, maxTimestamp};
- for (const [param, value] of Object.entries(positiveFloats)) {
- const parsedFloat = parseFloat(value);
- if (isNaN(parsedFloat) || parsedFloat < 0) {
- const err = new InvalidParameterValue(
- `req.body.${param}`);
- res.status(err.code).send(errorJson(err.message));
- return;
- }
- }
- const parsedMinTimestamp = parseFloat(minTimestamp);
- const parsedMaxTimestamp = parseFloat(maxTimestamp);
-
- const parsedNoOfAlerts = parseInt(noOfAlerts);
- if (isNaN(parsedNoOfAlerts) || parsedNoOfAlerts <= 0) {
- const err = new InvalidParameterValue(
- 'req.body.noOfAlerts');
- res.status(err.code).send(errorJson(err.message));
- return;
+ let result = resultJson({ alerts: [] });
+ try {
+ if (mongoInterface.client) {
+ const db = mongoInterface.client.db(mongoDB);
+ if (chains.length > 0) {
+ let queryList: any = [];
+ for (let i = 1; i < chains.length; i++) {
+ queryList.push({ $unionWith: chains[i] });
+ }
+ queryList.push(
+ { $match: { doc_type: "alert" } },
+ { $unwind: "$alerts" },
+ {
+ $match: {
+ "alerts.severity": { $in: severities },
+ "alerts.origin": { $in: sources },
+ "alerts.timestamp": {
+ $gte: parsedMinTimestamp,
+ $lte: parsedMaxTimestamp,
+ },
+ },
+ },
+ { $sort: { "alerts.timestamp": -1, _id: 1 } },
+ { $limit: parsedNoOfAlerts },
+ { $group: { _id: null, alerts: { $push: "$alerts" } } },
+ { $project: { _id: 0, alerts: "$alerts" } }
+ );
+ const collection = db.collection(chains[0]);
+ const docs = await collection.aggregate(queryList).toArray();
+ for (const doc of docs) {
+ result.result.alerts = result.result.alerts.concat(doc.alerts);
+ }
}
+ res.status(Status.SUCCESS).send(result);
+ return;
+ } else {
+ // This is done just for the sake of completion, as it is very
+ // unlikely to occur.
+ const err = new MongoClientNotInitialised();
+ res.status(err.code).send(errorJson(err.message));
+ return;
+ }
+ } catch (err) {
+ console.error(err);
+ const retrievalErr = new CouldNotRetrieveDataFromMongo();
+ res.status(retrievalErr.code).send(errorJson(retrievalErr.message));
+ return;
+ }
+ }
+);
- let result = resultJson({alerts: []});
- try {
- if (mongoInterface.client) {
- const db = mongoInterface.client.db(mongoDB);
- if (chains.length > 0) {
- let queryList: any = [];
- for (let i = 1; i < chains.length; i++) {
- queryList.push({$unionWith: chains[i]})
- }
- queryList.push(
- {$match: {doc_type: "alert"}},
- {$unwind: "$alerts"},
- {
- $match: {
- "alerts.severity": {$in: severities},
- "alerts.origin": {$in: sources},
- "alerts.timestamp": {
- "$gte": parsedMinTimestamp,
- "$lte": parsedMaxTimestamp
- }
- }
- },
- {$sort: {"alerts.timestamp": -1, _id: 1}},
- {$limit: parsedNoOfAlerts},
- {$group: {_id: null, alerts: {$push: "$alerts"}}},
- {$project: {_id: 0, alerts: "$alerts"}},
- );
- const collection = db.collection(chains[0]);
- const docs = await collection.aggregate(queryList)
- .toArray();
- for (const doc of docs) {
- result.result.alerts = result.result.alerts.concat(
- doc.alerts)
- }
- }
- res.status(Status.SUCCESS).send(result);
- return;
- } else {
- // This is done just for the sake of completion, as it is very
- // unlikely to occur.
- const err = new MongoClientNotInitialised();
- res.status(err.code).send(errorJson(err.message));
- return;
- }
- } catch (err) {
- console.error(err);
- const retrievalErr = new CouldNotRetrieveDataFromMongo();
- res.status(retrievalErr.code).send(errorJson(
- retrievalErr.message));
- return;
- }
+app.post(
+ "/server/mongo/metrics",
+ async (req: express.Request, res: express.Response) => {
+ console.log("Received POST request for %s", req.url);
+ const {
+ chains,
+ systems,
+ minTimestamp,
+ maxTimestamp,
+ noOfMetricsPerSource,
+ } = req.body;
+
+ // Check that all parameters have been sent
+ const missingKeysList: string[] = missingValues({
+ chains,
+ systems,
+ minTimestamp,
+ maxTimestamp,
+ noOfMetricsPerSource,
});
+ if (missingKeysList.length !== 0) {
+ const err = new MissingKeysInBody(...missingKeysList);
+ res.status(err.code).send(errorJson(err.message));
+ return;
+ }
-app.post('/server/mongo/metrics',
- async (req: express.Request, res: express.Response) => {
- console.log('Received POST request for %s', req.url);
- const {
- chains,
- systems,
- minTimestamp,
- maxTimestamp,
- noOfMetricsPerSource
- } = req.body;
-
- // Check that all parameters have been sent
- const missingKeysList: string[] = missingValues({
- chains,
- systems,
- minTimestamp,
- maxTimestamp,
- noOfMetricsPerSource
- });
- if (missingKeysList.length !== 0) {
- const err = new MissingKeysInBody(...missingKeysList);
- res.status(err.code).send(errorJson(err.message));
- return;
- }
+ // --------------------- Input Validation -------------------
- // --------------------- Input Validation -------------------
+ const arrayBasedStringParams = { chains, systems };
+ for (const [param, value] of Object.entries(arrayBasedStringParams)) {
+ if (!Array.isArray(value) || !allElementsInListHaveTypeString(value)) {
+ const err = new InvalidParameterValue(`req.body.${param}`);
+ res.status(err.code).send(errorJson(err.message));
+ return;
+ }
+ }
- const arrayBasedStringParams = {chains, systems};
- for (const [param, value] of Object.entries(arrayBasedStringParams)) {
- if (!Array.isArray(value) ||
- !allElementsInListHaveTypeString(value)) {
- const err = new InvalidParameterValue(
- `req.body.${param}`);
- res.status(err.code).send(errorJson(err.message));
- return;
- }
- }
+ const positiveFloats = { minTimestamp, maxTimestamp };
+ for (const [param, value] of Object.entries(positiveFloats)) {
+ const parsedFloat = parseFloat(value);
+ if (isNaN(parsedFloat) || parsedFloat < 0) {
+ const err = new InvalidParameterValue(`req.body.${param}`);
+ res.status(err.code).send(errorJson(err.message));
+ return;
+ }
+ }
+ const parsedMinTimestamp = parseFloat(minTimestamp);
+ const parsedMaxTimestamp = parseFloat(maxTimestamp);
+
+ const parsedNoOfMetricsPerSource = parseInt(noOfMetricsPerSource);
+ if (isNaN(parsedNoOfMetricsPerSource) || parsedNoOfMetricsPerSource <= 0) {
+ const err = new InvalidParameterValue("req.body.noOfMetricsPerSource");
+ res.status(err.code).send(errorJson(err.message));
+ return;
+ }
- const positiveFloats = {minTimestamp, maxTimestamp};
- for (const [param, value] of Object.entries(positiveFloats)) {
- const parsedFloat = parseFloat(value);
- if (isNaN(parsedFloat) || parsedFloat < 0) {
- const err = new InvalidParameterValue(
- `req.body.${param}`);
- res.status(err.code).send(errorJson(err.message));
- return;
- }
- }
- const parsedMinTimestamp = parseFloat(minTimestamp);
- const parsedMaxTimestamp = parseFloat(maxTimestamp);
-
- const parsedNoOfMetricsPerSource = parseInt(noOfMetricsPerSource);
- if (isNaN(parsedNoOfMetricsPerSource) || parsedNoOfMetricsPerSource <=
- 0) {
- const err = new InvalidParameterValue(
- 'req.body.noOfMetricsPerSource');
- res.status(err.code).send(errorJson(err.message));
- return;
- }
+ // In the future, we need to retrieve node metrics, hence adding a
+ // nodes field. Also, a node ID can be present in both the systems and
+ // the nodes fields.
+
+ let result = resultJson({ metrics: {} });
+ try {
+ if (mongoInterface.client) {
+ const db = mongoInterface.client.db(mongoDB);
+ if (chains.length > 0) {
+ var queryPromise = new Promise((resolve, _) => {
+ systems.forEach(
+ async (source: string, i: number): Promise => {
+ let queryList: any = [];
+ for (let i = 1; i < chains.length; i++) {
+ queryList.push({ $unionWith: chains[i] });
+ }
- // In the future, we need to retrieve node metrics, hence adding a
- // nodes field. Also, a node ID can be present in both the systems and
- // the nodes fields.
-
- let result = resultJson({metrics: {}});
- try {
- if (mongoInterface.client) {
- const db = mongoInterface.client.db(mongoDB);
- if (chains.length > 0) {
- var queryPromise = new Promise((resolve, _) => {
- systems.forEach(async (source: string,
- i: number): Promise => {
- let queryList: any = [];
- for (let i = 1; i < chains.length; i++) {
- queryList.push({$unionWith: chains[i]})
- }
-
- if (!(source in result.result.metrics)) {
- result.result.metrics[source] = []
- }
-
- const originSource = "$".concat(source);
- const timestampSource = source.concat(".timestamp");
- queryList.push(
- {$match: {doc_type: "system"}},
- {$unwind: originSource},
- {
- $match: {
- [timestampSource]: {
- "$gte": parsedMinTimestamp,
- "$lte": parsedMaxTimestamp
- }
- }
- },
- {$sort: {"timestamp": -1, _id: 1}},
- {$limit: parsedNoOfMetricsPerSource},
- );
- const collection = db.collection(chains[0]);
- const docs = await collection.aggregate(queryList)
- .toArray();
- for (const doc of docs) {
- result.result.metrics[source] =
- result.result.metrics[source].concat(
- doc[source])
- }
- if (i === systems.length - 1) resolve();
- });
- });
-
- queryPromise.then(() => {
- res.status(Status.SUCCESS).send(result);
- return;
- });
- } else {
- res.status(Status.SUCCESS).send(result);
- return;
+ if (!(source in result.result.metrics)) {
+ result.result.metrics[source] = [];
}
- } else {
- // This is done just for the sake of completion, as it is very
- // unlikely to occur.
- const err = new MongoClientNotInitialised();
- res.status(err.code).send(errorJson(err.message));
- return;
- }
- } catch (err) {
- console.error(err);
- const retrievalErr = new CouldNotRetrieveDataFromMongo();
- res.status(retrievalErr.code).send(errorJson(
- retrievalErr.message));
+
+ const originSource = "$".concat(source);
+ const timestampSource = source.concat(".timestamp");
+ queryList.push(
+ { $match: { doc_type: "system" } },
+ { $unwind: originSource },
+ {
+ $match: {
+ [timestampSource]: {
+ $gte: parsedMinTimestamp,
+ $lte: parsedMaxTimestamp,
+ },
+ },
+ },
+ { $sort: { timestamp: -1, _id: 1 } },
+ { $limit: parsedNoOfMetricsPerSource }
+ );
+ const collection = db.collection(chains[0]);
+ const docs = await collection.aggregate(queryList).toArray();
+ for (const doc of docs) {
+ result.result.metrics[source] = result.result.metrics[
+ source
+ ].concat(doc[source]);
+ }
+ if (i === systems.length - 1) resolve();
+ }
+ );
+ });
+
+ queryPromise.then(() => {
+ res.status(Status.SUCCESS).send(result);
return;
+ });
+ } else {
+ res.status(Status.SUCCESS).send(result);
+ return;
}
- });
+ } else {
+ // This is done just for the sake of completion, as it is very
+ // unlikely to occur.
+ const err = new MongoClientNotInitialised();
+ res.status(err.code).send(errorJson(err.message));
+ return;
+ }
+ } catch (err) {
+ console.error(err);
+ const retrievalErr = new CouldNotRetrieveDataFromMongo();
+ res.status(retrievalErr.code).send(errorJson(retrievalErr.message));
+ return;
+ }
+ }
+);
// ---------------------------------------- Redis Endpoints
// This endpoint expects a list of parent ids inside the body structure.
-app.post('/server/redis/alertsOverview',
- async (req: express.Request, res: express.Response) => {
- console.log('Received POST request for %s', req.url);
- const parentIds: AlertsOverviewInput = req.body.parentIds;
-
- // Check if some required keys are missing in the body object, if yes
- // notify the client.
- const missingKeysList: string[] = missingValues({parentIds});
- if (missingKeysList.length !== 0) {
- const err = new MissingKeysInBody(...missingKeysList);
- res.status(err.code).send(errorJson(err.message));
- return;
- }
+app.post(
+ "/server/redis/alertsOverview",
+ async (req: express.Request, res: express.Response) => {
+ console.log("Received POST request for %s", req.url);
+ const parentIds: AlertsOverviewInput = req.body.parentIds;
+
+ // Check if some required keys are missing in the body object, if yes
+ // notify the client.
+ const missingKeysList: string[] = missingValues({ parentIds });
+ if (missingKeysList.length !== 0) {
+ const err = new MissingKeysInBody(...missingKeysList);
+ res.status(err.code).send(errorJson(err.message));
+ return;
+ }
+
+ // Check if the passed dict is valid
+ if (!isAlertsOverviewInputValid(parentIds)) {
+ const err = new InvalidJsonSchema("req.body.parentIds");
+ res.status(err.code).send(errorJson(err.message));
+ return;
+ }
- // Check if the passed dict is valid
- if (!isAlertsOverviewInputValid(parentIds)) {
- const err = new InvalidJsonSchema("req.body.parentIds");
- res.status(err.code).send(errorJson(err.message));
+ // Construct the redis hashes and keys. The keys are only used to check
+ // whether they are available within redis since we get all keys.
+ const redisHashes: RedisHashes = getRedisHashes();
+ const redisHashesNamespace: RedisKeys = addPrefixToKeys(
+ redisHashes,
+ `${uniqueAlerterIdentifier}:`
+ );
+ const redisHashesPostfix: RedisKeys = addPostfixToKeys(
+ redisHashesNamespace,
+ "_"
+ );
+ const alertKeysSystem: AlertKeysSystem = getAlertKeysSystem();
+ const alertKeysSystemPostfix: RedisKeys = addPostfixToKeys(
+ alertKeysSystem,
+ "_"
+ );
+ const alertKeysNode: AlertKeysNode = getAlertKeysNode();
+ const alertKeysNodePostfix: RedisKeys = addPostfixToKeys(
+ alertKeysNode,
+ "_"
+ );
+ const alertKeysGitHubRepo: AlertKeysGitHubRepo = getAlertKeysGitHubRepo();
+ const alertKeysGitHubRepoPostfix: RedisKeys = addPostfixToKeys(
+ alertKeysGitHubRepo,
+ "_"
+ );
+ const alertKeysDockerHubRepo: AlertKeysDockerHubRepo =
+ getAlertKeysDockerHubRepo();
+ const alertKeysDockerHubRepoPostfix: RedisKeys = addPostfixToKeys(
+ alertKeysDockerHubRepo,
+ "_"
+ );
+
+ let alertsData: AlertsOverviewAlertData[] = [];
+ let result: AlertsOverviewResult = resultJson({});
+
+ if (redisInterface.client) {
+ let encounteredError = false;
+
+ // Using multi() means that all commands are only performed once
+ // exec() is called, and this is done atomically.
+ const redisMulti = redisInterface.client.multi();
+
+ for (const [parentId, sourcesObject] of Object.entries(parentIds)) {
+ const parentHash: string = `${redisHashesPostfix.parent}${parentId}`;
+ result.result[parentId] = {
+ info: 0,
+ critical: 0,
+ warning: 0,
+ error: 0,
+ problems: {},
+ releases: {},
+ tags: {},
+ };
+
+ redisMulti.hgetall(parentHash, (err: Error | null, values: any) => {
+ if (err) {
+ console.error(err);
+ // Skip resolve if an error was already encountered
+ // since call is already resolved.
+ if (!encounteredError) {
+ encounteredError = true;
+ const retrievalErr = new CouldNotRetrieveDataFromRedis(err);
+ res
+ .status(retrievalErr.code)
+ .send(errorJson(retrievalErr.message));
+ }
return;
- }
+ }
+
+ if (values === null) {
+ values = {};
+ }
+
+ // Update info count with keys not found in retrieved keys.
+ sourcesObject.systems.forEach((systemId) => {
+ const constructedKeys: RedisKeys = addPostfixToKeys(
+ alertKeysSystemPostfix,
+ systemId
+ );
+ Object.values(constructedKeys).forEach((key) => {
+ if (!(key in values)) {
+ result.result[parentId].info++;
+ }
+ });
+ });
+ sourcesObject.nodes.forEach((nodeId) => {
+ const constructedKeys: RedisKeys = addPostfixToKeys(
+ alertKeysNodePostfix,
+ nodeId
+ );
+ Object.values(constructedKeys).forEach((key) => {
+ if (!(key in values)) {
+ result.result[parentId].info++;
+ }
+ });
+ });
+ sourcesObject.github_repos.forEach((repoId) => {
+ const constructedKeys: RedisKeys = addPostfixToKeys(
+ alertKeysGitHubRepoPostfix,
+ repoId
+ );
+ Object.values(constructedKeys).forEach((key) => {
+ if (!(key in values)) {
+ result.result[parentId].info++;
+ }
+ });
+ });
+ sourcesObject.dockerhub_repos.forEach((repoId) => {
+ const constructedKeys: RedisKeys = addPostfixToKeys(
+ alertKeysDockerHubRepoPostfix,
+ repoId
+ );
+ Object.values(constructedKeys).forEach((key) => {
+ if (!(key in values)) {
+ result.result[parentId].info++;
+ }
+ });
+ });
+
+ for (const [key, value] of Object.entries(values)) {
+ // Skip checks if encountered error since
+ // call is already resolved.
+ if (encounteredError) {
+ break;
+ }
+ let found = false;
+
+ sourcesObject.systems.forEach((systemId) => {
+ if (
+ !found &&
+ key.includes(systemId) &&
+ key.includes(alertKeysSystemPrefix)
+ ) {
+ found = true;
+ alertsData.push({
+ parentId: parentId,
+ monitorableId: systemId,
+ key: key,
+ value: value,
+ });
+ }
+ });
+ if (!found) {
+ sourcesObject.nodes.forEach((nodeId) => {
+ if (
+ !found &&
+ key.includes(nodeId) &&
+ (key.includes(alertKeysClNodePrefix) ||
+ key.includes(alertKeysEvmNodePrefix) ||
+ key.includes(alertKeysCosmosNodePrefix) ||
+ key.includes(alertKeysSubstrateNodePrefix) ||
+ key.includes(alertKeysClContractPrefix))
+ ) {
+ found = true;
+ alertsData.push({
+ parentId: parentId,
+ monitorableId: nodeId,
+ key: key,
+ value: value,
+ });
+ }
+ });
+ }
+ if (!found) {
+ sourcesObject.github_repos.forEach((repoId) => {
+ if (
+ !found &&
+ key.includes(repoId) &&
+ key.includes(alertKeysGitHubPrefix)
+ ) {
+ found = true;
+ alertsData.push({
+ parentId: parentId,
+ monitorableId: repoId,
+ key: key,
+ value: value,
+ });
+ }
+ });
+ }
+ if (!found) {
+ sourcesObject.dockerhub_repos.forEach((repoId) => {
+ if (
+ !found &&
+ key.includes(repoId) &&
+ key.includes(alertKeysDockerHubPrefix)
+ ) {
+ found = true;
+ alertsData.push({
+ parentId: parentId,
+ monitorableId: repoId,
+ key: key,
+ value: value,
+ });
+ }
+ });
+ }
- // Construct the redis hashes and keys. The keys are only used to check
- // whether they are available within redis since we get all keys.
- const redisHashes: RedisHashes = getRedisHashes();
- const redisHashesNamespace: RedisKeys = addPrefixToKeys(
- redisHashes, `${uniqueAlerterIdentifier}:`);
- const redisHashesPostfix: RedisKeys = addPostfixToKeys(
- redisHashesNamespace, '_');
- const alertKeysSystem: AlertKeysSystem = getAlertKeysSystem();
- const alertKeysSystemPostfix: RedisKeys = addPostfixToKeys(
- alertKeysSystem, '_');
- const alertKeysNode: AlertKeysNode = getAlertKeysNode();
- const alertKeysNodePostfix: RedisKeys = addPostfixToKeys(
- alertKeysNode, '_');
- const alertKeysGitHubRepo: AlertKeysGitHubRepo =
- getAlertKeysGitHubRepo();
- const alertKeysGitHubRepoPostfix: RedisKeys =
- addPostfixToKeys(alertKeysGitHubRepo, '_');
- const alertKeysDockerHubRepo: AlertKeysDockerHubRepo =
- getAlertKeysDockerHubRepo();
- const alertKeysDockerHubRepoPostfix: RedisKeys =
- addPostfixToKeys(alertKeysDockerHubRepo, '_');
-
- let alertsData: AlertsOverviewAlertData[] = []
- let result: AlertsOverviewResult = resultJson({});
-
- if (redisInterface.client) {
- let encounteredError = false;
-
- // Using multi() means that all commands are only performed once
- // exec() is called, and this is done atomically.
- const redisMulti = redisInterface.client.multi();
-
- for (const [parentId, sourcesObject] of Object.entries(parentIds)) {
- const parentHash: string =
- `${redisHashesPostfix.parent}${parentId}`
- result.result[parentId] = {
- "info": 0,
- "critical": 0,
- "warning": 0,
- "error": 0,
- "problems": {},
- "releases": {},
- "tags": {},
- };
-
- redisMulti.hgetall(parentHash, (err: Error | null, values: any) => {
- if (err) {
- console.error(err);
- // Skip resolve if an error was already encountered
- // since call is already resolved.
- if (!encounteredError) {
- encounteredError = true;
- const retrievalErr =
- new CouldNotRetrieveDataFromRedis(err);
- res.status(retrievalErr.code).send(errorJson(
- retrievalErr.message));
- }
- return;
- }
-
- if (values === null) {
- values = {};
- }
-
- // Update info count with keys not found in retrieved keys.
- sourcesObject.systems.forEach((systemId) => {
- const constructedKeys: RedisKeys = addPostfixToKeys(
- alertKeysSystemPostfix, systemId);
- Object.values(constructedKeys).forEach((key) => {
- if (!(key in values)) {
- result.result[parentId].info++;
- }
- });
- });
- sourcesObject.nodes.forEach((nodeId) => {
- const constructedKeys: RedisKeys = addPostfixToKeys(
- alertKeysNodePostfix, nodeId);
- Object.values(constructedKeys).forEach((key) => {
- if (!(key in values)) {
- result.result[parentId].info++;
- }
- });
- });
- sourcesObject.github_repos.forEach((repoId) => {
- const constructedKeys: RedisKeys = addPostfixToKeys(
- alertKeysGitHubRepoPostfix, repoId);
- Object.values(constructedKeys).forEach((key) => {
- if (!(key in values)) {
- result.result[parentId].info++;
- }
- });
- });
- sourcesObject.dockerhub_repos.forEach((repoId) => {
- const constructedKeys: RedisKeys = addPostfixToKeys(
- alertKeysDockerHubRepoPostfix, repoId);
- Object.values(constructedKeys).forEach((key) => {
- if (!(key in values)) {
- result.result[parentId].info++;
- }
- });
- });
-
- for (const [key, value] of Object.entries(values)) {
- // Skip checks if encountered error since
- // call is already resolved.
- if (encounteredError) {
- break;
- }
- let found = false;
-
- sourcesObject.systems.forEach((systemId) => {
- if (!found && key.includes(systemId) &&
- key.includes(alertKeysSystemPrefix)) {
- found = true;
- alertsData.push({
- parentId: parentId,
- monitorableId: systemId,
- key: key,
- value: value
- })
- }
- });
- if (!found) {
- sourcesObject.nodes.forEach((nodeId) => {
- if (!found && key.includes(nodeId) &&
- (key.includes(alertKeysClNodePrefix)
- || key.includes(alertKeysEvmNodePrefix)
- || (key.includes(
- alertKeysCosmosNodePrefix))
- || (key.includes(
- alertKeysSubstrateNodePrefix))
- || (key.includes(
- alertKeysClContractPrefix)))
- ) {
- found = true;
- alertsData.push({
- parentId: parentId,
- monitorableId: nodeId,
- key: key,
- value: value
- })
- }
- });
- }
- if (!found) {
- sourcesObject.github_repos.forEach((repoId) => {
- if (!found && key.includes(repoId) &&
- key.includes(alertKeysGitHubPrefix)) {
- found = true;
- alertsData.push({
- parentId: parentId,
- monitorableId: repoId,
- key: key,
- value: value
- })
- }
- });
- }
- if (!found) {
- sourcesObject.dockerhub_repos.forEach((repoId) => {
- if (!found && key.includes(repoId) &&
- key.includes(alertKeysDockerHubPrefix)) {
- found = true;
- alertsData.push({
- parentId: parentId,
- monitorableId: repoId,
- key: key,
- value: value
- })
- }
- });
- }
-
- if (!found && sourcesObject.include_chain_sourced_alerts) {
- if (alertKeysChainSourced.includes(key) ||
- alertKeysChainSourcedWithUniqueIdentifier.some(
- alertKey => key.includes(alertKey))) {
- alertsData.push({
- parentId: parentId,
- monitorableId: parentId,
- key: key,
- value: value
- })
- }
- }
- }
+ if (!found && sourcesObject.include_chain_sourced_alerts) {
+ if (
+ alertKeysChainSourced.includes(key) ||
+ alertKeysChainSourcedWithUniqueIdentifier.some((alertKey) =>
+ key.includes(alertKey)
+ )
+ ) {
+ alertsData.push({
+ parentId: parentId,
+ monitorableId: parentId,
+ key: key,
+ value: value,
});
+ }
}
+ }
+ });
+ }
- redisMulti.exec((err: Error | null, _: any) => {
- if (err) {
- console.error(err);
- // Skip resolve if an error was already encountered
- // since call is already resolved.
- if (!encounteredError) {
- encounteredError = true;
- const retrievalErr =
- new CouldNotRetrieveDataFromRedis(err);
- res.status(retrievalErr.code).send(errorJson(
- retrievalErr.message));
- }
- return
+ redisMulti.exec((err: Error | null, _: any) => {
+ if (err) {
+ console.error(err);
+ // Skip resolve if an error was already encountered
+ // since call is already resolved.
+ if (!encounteredError) {
+ encounteredError = true;
+ const retrievalErr = new CouldNotRetrieveDataFromRedis(err);
+ res.status(retrievalErr.code).send(errorJson(retrievalErr.message));
+ }
+ return;
+ }
+ // Skip resolve if encountered error since call is already
+ // resolved.
+ if (!encounteredError) {
+ const currentTimestamp = Math.floor(Date.now() / 1000);
+ alertsData.forEach(
+ (data: {
+ parentId: string;
+ monitorableId: string;
+ key: string;
+ value: string;
+ }) => {
+ // Skip checks if encountered error since
+ // call is already resolved.
+ if (encounteredError) {
+ return;
+ }
+ let value: any = null;
+ try {
+ value = JSON.parse(data.value);
+ } catch (err) {
+ // This is done just for the sake of
+ // completion, as it is very unlikely
+ // to occur.
+ const invalidValueErr = new InvalidValueRetrievedFromRedis(
+ data.value
+ );
+ res
+ .status(invalidValueErr.code)
+ .send(errorJson(invalidValueErr.message));
+ encounteredError = true;
+ return;
+ }
+ if (
+ value &&
+ value.constructor === Object &&
+ "message" in value &&
+ "severity" in value &&
+ "expiry" in value
+ ) {
+ // Add array of problems if not
+ // initialised yet and there is indeed
+ // problems.
+ if (
+ value.severity !== Severities.INFO &&
+ !result.result[data.parentId].problems[data.monitorableId]
+ ) {
+ result.result[data.parentId].problems[data.monitorableId] =
+ [];
}
- // Skip resolve if encountered error since call is already
- // resolved.
- if (!encounteredError) {
- const currentTimestamp = Math.floor(Date.now() / 1000);
- alertsData.forEach((data: {
- parentId: string,
- monitorableId: string,
- key: string,
- value: string
- }) => {
- // Skip checks if encountered error since
- // call is already resolved.
- if (encounteredError) {
- return;
- }
- let value: any = null;
- try {
- value = JSON.parse(data.value);
- } catch (err) {
- // This is done just for the sake of
- // completion, as it is very unlikely
- // to occur.
- const invalidValueErr =
- new InvalidValueRetrievedFromRedis(data.value);
- res.status(invalidValueErr.code).send(errorJson(
- invalidValueErr.message));
- encounteredError = true;
- return;
- }
- if (value && value.constructor === Object &&
- "message" in value && "severity" in
- value && "expiry" in value) {
- // Add array of problems if not
- // initialised yet and there is indeed
- // problems.
- if (value.severity !== Severities.INFO &&
- !result.result[data.parentId].problems[
- data.monitorableId]) {
- result.result[data.parentId].problems[
- data.monitorableId] = []
- }
- // If the alerter has detected a new
- // release add it to the list of
- // releases
- const newReleaseKey: string =
- addPostfixToKeys(alertKeysGitHubRepoPostfix,
- data.monitorableId).github_release;
- if (data.key === newReleaseKey) {
- result.result[data.parentId].releases[
- data.monitorableId] = value
- }
- // If the alerter has detected a tag
- // change, add it to the list of tags
- const dockerHubTagsKeys =
- addPostfixToKeys(alertKeysDockerHubRepoPostfix,
- data.monitorableId);
- const changedTagsKeys = [
- dockerHubTagsKeys.dockerhub_new_tag,
- dockerHubTagsKeys.dockerhub_updated_tag,
- dockerHubTagsKeys.dockerhub_deleted_tag
- ];
- if (changedTagsKeys.includes(data.key)) {
- if (!(data.monitorableId in
- result.result[data.parentId].tags)) {
- result.result[data.parentId].tags[
- data.monitorableId] = {
- new: {},
- updated: {},
- deleted: {}
- }
- }
- switch (data.key) {
- case changedTagsKeys[0]:
- result.result[data.parentId].tags[
- data.monitorableId]
- ['new'] = value;
- break;
- case changedTagsKeys[1]:
- result.result[data.parentId].tags[
- data.monitorableId]
- ['updated'] = value;
- break;
- case changedTagsKeys[2]:
- result.result[data.parentId].tags[
- data.monitorableId]
- ['deleted'] = value;
- break;
- }
- }
- if (value.expiry && currentTimestamp >=
- value.expiry) {
- result.result[data.parentId].info++;
- } else {
- // Increase the counter and save the
- // problems.
- if (value.severity === Severities.INFO) {
- result.result[data.parentId].info++;
- } else if (value.severity ===
- Severities.CRITICAL) {
- result.result[data.parentId].critical++;
- result.result[data.parentId].problems[
- data.monitorableId].push(value)
- } else if (value.severity ===
- Severities.WARNING) {
- result.result[data.parentId].warning++;
- result.result[data.parentId].problems[
- data.monitorableId].push(value)
- } else if (
- value.severity === Severities.ERROR) {
- result.result[data.parentId].error++;
- result.result[data.parentId].problems[
- data.monitorableId].push(value)
- }
- }
- } else {
- // This is done just for the sake of
- // completion, as it is very unlikely
- // to occur.
- const err =
- new InvalidValueRetrievedFromRedis(
- value);
- res.status(err.code)
- .send(errorJson(err.message));
- encounteredError = true;
- return;
- }
- })
-
- // Skip resolve if encountered error since call is already
- // resolved.
- if (!encounteredError) {
- res.status(Status.SUCCESS).send(result);
- }
+ // If the alerter has detected a new
+ // release add it to the list of
+ // releases
+ const newReleaseKey: string = addPostfixToKeys(
+ alertKeysGitHubRepoPostfix,
+ data.monitorableId
+ ).github_release;
+ if (data.key === newReleaseKey) {
+ result.result[data.parentId].releases[data.monitorableId] =
+ value;
+ }
+ // If the alerter has detected a tag
+ // change, add it to the list of tags
+ const dockerHubTagsKeys = addPostfixToKeys(
+ alertKeysDockerHubRepoPostfix,
+ data.monitorableId
+ );
+ const changedTagsKeys = [
+ dockerHubTagsKeys.dockerhub_new_tag,
+ dockerHubTagsKeys.dockerhub_updated_tag,
+ dockerHubTagsKeys.dockerhub_deleted_tag,
+ ];
+ if (changedTagsKeys.includes(data.key)) {
+ if (
+ !(data.monitorableId in result.result[data.parentId].tags)
+ ) {
+ result.result[data.parentId].tags[data.monitorableId] = {
+ new: {},
+ updated: {},
+ deleted: {},
+ };
+ }
+ switch (data.key) {
+ case changedTagsKeys[0]:
+ result.result[data.parentId].tags[data.monitorableId][
+ "new"
+ ] = value;
+ break;
+ case changedTagsKeys[1]:
+ result.result[data.parentId].tags[data.monitorableId][
+ "updated"
+ ] = value;
+ break;
+ case changedTagsKeys[2]:
+ result.result[data.parentId].tags[data.monitorableId][
+ "deleted"
+ ] = value;
+ break;
+ }
+ }
+ if (value.expiry && currentTimestamp >= value.expiry) {
+ result.result[data.parentId].info++;
+ } else {
+ // Increase the counter and save the
+ // problems.
+ if (value.severity === Severities.INFO) {
+ result.result[data.parentId].info++;
+ } else if (value.severity === Severities.CRITICAL) {
+ result.result[data.parentId].critical++;
+ result.result[data.parentId].problems[
+ data.monitorableId
+ ].push(value);
+ } else if (value.severity === Severities.WARNING) {
+ result.result[data.parentId].warning++;
+ result.result[data.parentId].problems[
+ data.monitorableId
+ ].push(value);
+ } else if (value.severity === Severities.ERROR) {
+ result.result[data.parentId].error++;
+ result.result[data.parentId].problems[
+ data.monitorableId
+ ].push(value);
+ }
}
+ } else {
+ // This is done just for the sake of
+ // completion, as it is very unlikely
+ // to occur.
+ const err = new InvalidValueRetrievedFromRedis(value);
+ res.status(err.code).send(errorJson(err.message));
+ encounteredError = true;
return;
- });
- } else {
- // This is done just for the sake of completion, as it is very
- // unlikely to occur.
- const err = new RedisClientNotInitialised();
- res.status(err.code).send(errorJson(err.message));
- return;
+ }
+ }
+ );
+
+ // Skip resolve if encountered error since call is already
+ // resolved.
+ if (!encounteredError) {
+ res.status(Status.SUCCESS).send(result);
+ }
}
- });
+ return;
+ });
+ } else {
+ // This is done just for the sake of completion, as it is very
+ // unlikely to occur.
+ const err = new RedisClientNotInitialised();
+ res.status(err.code).send(errorJson(err.message));
+ return;
+ }
+ }
+);
// This endpoint returns metrics and their values, for the requested sources
// and their chains
-app.post('/server/redis/metrics',
- async (req: express.Request, res: express.Response) => {
- console.log('Received POST request for %s', req.url);
- const parentIds: RedisMetricsInput = req.body.parentIds;
-
- // Check if some required keys are missing in the body object, if yes
- // notify the client.
- const missingKeysList: string[] = missingValues({parentIds});
- if (missingKeysList.length !== 0) {
- const err = new MissingKeysInBody(...missingKeysList);
- res.status(err.code).send(errorJson(err.message));
- return;
- }
+app.post(
+ "/server/redis/metrics",
+ async (req: express.Request, res: express.Response) => {
+ console.log("Received POST request for %s", req.url);
+ const parentIds: RedisMetricsInput = req.body.parentIds;
- // Check if the passed dict is valid
- if (!isRedisMetricsInputValid(parentIds)) {
- const err = new InvalidJsonSchema("req.body.parentIds");
- res.status(err.code).send(errorJson(err.message));
- return;
- }
+ // Check if some required keys are missing in the body object, if yes
+ // notify the client.
+ const missingKeysList: string[] = missingValues({ parentIds });
+ if (missingKeysList.length !== 0) {
+ const err = new MissingKeysInBody(...missingKeysList);
+ res.status(err.code).send(errorJson(err.message));
+ return;
+ }
- // Construct the redis keys inside a JSON object indexed by parent hash
- // and the system/repo id. We also need a way to map the hash to the
- // parent id.
- const parentHashKeys: {
- [key: string]: { [key: string]: string[] }
- } = {};
- const parentHashId: { [key: string]: string } = {};
-
- const redisHashes: RedisHashes = getRedisHashes();
- const redisHashesNamespace: RedisKeys = addPrefixToKeys(
- redisHashes, `${uniqueAlerterIdentifier}:`);
- const redisHashesPostfix: RedisKeys = addPostfixToKeys(
- redisHashesNamespace, '_');
-
- const metricKeysSystem: SystemKeys = getSystemKeys();
- const metricKeysSystemPostfix: RedisKeys = addPostfixToKeys(
- metricKeysSystem, '_');
-
- const metricKeysGitHub: GitHubKeys = getGitHubKeys();
- const metricKeysGitHubPostfix: RedisKeys = addPostfixToKeys(
- metricKeysGitHub, '_');
-
- for (const [parentId, sourcesObject] of Object.entries(parentIds)) {
- const parentHash: string = redisHashesPostfix.parent + parentId;
- parentHashKeys[parentHash] = {};
- parentHashId[parentHash] = parentId;
- sourcesObject.systems.forEach((systemId) => {
- const constructedKeys: RedisKeys = addPostfixToKeys(
- metricKeysSystemPostfix, systemId);
- parentHashKeys[parentHash][systemId] = Object.values(
- constructedKeys)
- });
- sourcesObject.repos.forEach((repoId) => {
- const constructedKeys: RedisKeys = addPostfixToKeys(
- metricKeysGitHubPostfix, repoId);
- parentHashKeys[parentHash][repoId] = Object.values(
- constructedKeys)
- });
- }
+ // Check if the passed dict is valid
+ if (!isRedisMetricsInputValid(parentIds)) {
+ const err = new InvalidJsonSchema("req.body.parentIds");
+ res.status(err.code).send(errorJson(err.message));
+ return;
+ }
- let result: MetricsResult = resultJson({});
- if (redisInterface.client) {
- // Using multi() means that all commands are only performed once
- // exec() is called, and this is done atomically.
- const redisMulti = redisInterface.client.multi();
- let encounteredError = false;
- for (const [parentHash, monitorableKeysObject] of Object.entries(
- parentHashKeys)) {
- const parentId: string = parentHashId[parentHash];
- result.result[parentId] = {
- "system": {},
- "github": {},
- };
- for (const [monitorableId, keysList] of
- Object.entries(monitorableKeysObject)) {
- // Skip checks if encountered error since call
- // is already resolved.
- if (encounteredError) {
- break;
- }
- redisMulti.hmget(parentHash, keysList,
- (err: Error | null, values: any) => {
- if (err) {
- console.error(err);
- // Skip resolve if an error was already
- // encountered since call is already resolved.
- if (!encounteredError) {
- encounteredError = true;
- const retrievalErr =
- new CouldNotRetrieveDataFromRedis(err);
- res.status(retrievalErr.code).send(
- errorJson(retrievalErr.message));
- }
- return;
- }
- keysList.forEach(
- (key: string, i: number): void => {
- // Skip checks if encountered error since
- // call is already resolved.
- if (encounteredError) {
- return;
- }
- // Must be stringified JSON.parse does not
- // parse `None`
- let value: any = null;
- try {
- value = JSON.parse(JSON.stringify(
- values[i]));
- } catch (err) {
- // This is done just for the sake of
- // completion, as it is very unlikely
- // to occur.
- const invalidValueErr =
- new InvalidValueRetrievedFromRedis(
- values[i]);
- res.status(invalidValueErr.code)
- .send(errorJson(
- invalidValueErr.message));
- encounteredError = true;
- return;
- }
- if (parentIds[parentId].systems.includes(
- monitorableId)) {
- if (!(monitorableId in
- result.result[parentId].system)) {
- result.result[parentId]
- .system[monitorableId] =
- {}
- }
- result.result[parentId]
- .system[monitorableId][key.replace(
- '_' + monitorableId,
- '')] = value;
- } else if (parentIds[parentId].repos
- .includes(monitorableId)) {
- if (!(monitorableId in
- result.result[parentId].github)) {
- result.result[parentId]
- .github[monitorableId] =
- {}
- }
- result.result[parentId]
- .github[monitorableId][key.replace(
- '_' + monitorableId,
- '')] = value;
- }
-
- // In the future, we need to retrieve node
- // metrics, where a node ID can be present
- // in both the systems and the nodes
- // fields.
- });
- })
- }
- }
- redisMulti.exec((err: Error | null, _: any) => {
- if (err) {
- console.error(err);
- // Skip resolve if an error was already encountered
- // since call is already resolved.
- if (!encounteredError) {
- encounteredError = true;
- const retrievalErr =
- new CouldNotRetrieveDataFromRedis(err);
- res.status(retrievalErr.code).send(errorJson(
- retrievalErr.message));
- }
- return
- }
- // Skip resolve if encountered error since call is already
- // resolved.
+ // Construct the redis keys inside a JSON object indexed by parent hash
+ // and the system/repo id. We also need a way to map the hash to the
+ // parent id.
+ const parentHashKeys: {
+ [key: string]: { [key: string]: string[] };
+ } = {};
+ const parentHashId: { [key: string]: string } = {};
+
+ const redisHashes: RedisHashes = getRedisHashes();
+ const redisHashesNamespace: RedisKeys = addPrefixToKeys(
+ redisHashes,
+ `${uniqueAlerterIdentifier}:`
+ );
+ const redisHashesPostfix: RedisKeys = addPostfixToKeys(
+ redisHashesNamespace,
+ "_"
+ );
+
+ const metricKeysSystem: SystemKeys = getSystemKeys();
+ const metricKeysSystemPostfix: RedisKeys = addPostfixToKeys(
+ metricKeysSystem,
+ "_"
+ );
+
+ const metricKeysGitHub: GitHubKeys = getGitHubKeys();
+ const metricKeysGitHubPostfix: RedisKeys = addPostfixToKeys(
+ metricKeysGitHub,
+ "_"
+ );
+
+ for (const [parentId, sourcesObject] of Object.entries(parentIds)) {
+ const parentHash: string = redisHashesPostfix.parent + parentId;
+ parentHashKeys[parentHash] = {};
+ parentHashId[parentHash] = parentId;
+ sourcesObject.systems.forEach((systemId) => {
+ const constructedKeys: RedisKeys = addPostfixToKeys(
+ metricKeysSystemPostfix,
+ systemId
+ );
+ parentHashKeys[parentHash][systemId] = Object.values(constructedKeys);
+ });
+ sourcesObject.repos.forEach((repoId) => {
+ const constructedKeys: RedisKeys = addPostfixToKeys(
+ metricKeysGitHubPostfix,
+ repoId
+ );
+ parentHashKeys[parentHash][repoId] = Object.values(constructedKeys);
+ });
+ }
+
+ let result: MetricsResult = resultJson({});
+ if (redisInterface.client) {
+ // Using multi() means that all commands are only performed once
+ // exec() is called, and this is done atomically.
+ const redisMulti = redisInterface.client.multi();
+ let encounteredError = false;
+ for (const [parentHash, monitorableKeysObject] of Object.entries(
+ parentHashKeys
+ )) {
+ const parentId: string = parentHashId[parentHash];
+ result.result[parentId] = {
+ system: {},
+ github: {},
+ };
+ for (const [monitorableId, keysList] of Object.entries(
+ monitorableKeysObject
+ )) {
+ // Skip checks if encountered error since call
+ // is already resolved.
+ if (encounteredError) {
+ break;
+ }
+ redisMulti.hmget(
+ parentHash,
+ keysList,
+ (err: Error | null, values: any) => {
+ if (err) {
+ console.error(err);
+ // Skip resolve if an error was already
+ // encountered since call is already resolved.
if (!encounteredError) {
- res.status(Status.SUCCESS).send(result);
+ encounteredError = true;
+ const retrievalErr = new CouldNotRetrieveDataFromRedis(err);
+ res
+ .status(retrievalErr.code)
+ .send(errorJson(retrievalErr.message));
}
return;
- });
- } else {
- // This is done just for the sake of completion, as it is very
- // unlikely to occur.
- const err = new RedisClientNotInitialised();
- res.status(err.code).send(errorJson(err.message));
- return;
+ }
+ keysList.forEach((key: string, i: number): void => {
+ // Skip checks if encountered error since
+ // call is already resolved.
+ if (encounteredError) {
+ return;
+ }
+ // Must be stringified JSON.parse does not
+ // parse `None`
+ let value: any = null;
+ try {
+ value = JSON.parse(JSON.stringify(values[i]));
+ } catch (err) {
+ // This is done just for the sake of
+ // completion, as it is very unlikely
+ // to occur.
+ const invalidValueErr = new InvalidValueRetrievedFromRedis(
+ values[i]
+ );
+ res
+ .status(invalidValueErr.code)
+ .send(errorJson(invalidValueErr.message));
+ encounteredError = true;
+ return;
+ }
+ if (parentIds[parentId].systems.includes(monitorableId)) {
+ if (!(monitorableId in result.result[parentId].system)) {
+ result.result[parentId].system[monitorableId] = <
+ SystemKeys
+ >{};
+ }
+ result.result[parentId].system[monitorableId][
+ key.replace("_" + monitorableId, "")
+ ] = value;
+ } else if (parentIds[parentId].repos.includes(monitorableId)) {
+ if (!(monitorableId in result.result[parentId].github)) {
+ result.result[parentId].github[monitorableId] = <
+ GitHubKeys
+ >{};
+ }
+ result.result[parentId].github[monitorableId][
+ key.replace("_" + monitorableId, "")
+ ] = value;
+ }
+
+ // In the future, we need to retrieve node
+ // metrics, where a node ID can be present
+ // in both the systems and the nodes
+ // fields.
+ });
+ }
+ );
}
- });
+ }
+ redisMulti.exec((err: Error | null, _: any) => {
+ if (err) {
+ console.error(err);
+ // Skip resolve if an error was already encountered
+ // since call is already resolved.
+ if (!encounteredError) {
+ encounteredError = true;
+ const retrievalErr = new CouldNotRetrieveDataFromRedis(err);
+ res.status(retrievalErr.code).send(errorJson(retrievalErr.message));
+ }
+ return;
+ }
+ // Skip resolve if encountered error since call is already
+ // resolved.
+ if (!encounteredError) {
+ res.status(Status.SUCCESS).send(result);
+ }
+ return;
+ });
+ } else {
+ // This is done just for the sake of completion, as it is very
+ // unlikely to occur.
+ const err = new RedisClientNotInitialised();
+ res.status(err.code).send(errorJson(err.message));
+ return;
+ }
+ }
+);
// ---------------------------------------- Ping Endpoints
// ----------------- Common
-app.post('/server/common/node-exporter',
- async (req: express.Request, res: express.Response) => {
- console.log('Received POST request for %s %s', req.url, req.body);
- const nodeExporterUrl = req.body['url'];
-
- // Check if some required keys are missing in the body object, if yes
- // notify the client.
- const missingKeysList: string[] = missingValues({
- url: nodeExporterUrl
- });
- if (missingKeysList.length !== 0) {
- const err = new MissingKeysInBody(...missingKeysList);
- res.status(err.code).send(errorJson(err.message));
- return;
- }
-
- const url = `${nodeExporterUrl}`;
+app.post(
+ "/server/common/node-exporter",
+ async (req: express.Request, res: express.Response) => {
+ console.log("Received POST request for %s %s", req.url, req.body);
+ const nodeExporterUrl = req.body["url"];
- axios.get(url, {timeout: 3000}).then((response) => {
- if (verifyNodeExporterPing(response.data)) {
- res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS));
- } else {
- res.status(Status.ERROR).send(resultJson(PingStatus.ERROR));
- }
- }).catch((err) => {
- if (err.code === 'ECONNABORTED') {
- res.status(Status.TIMEOUT).send(resultJson(PingStatus.TIMEOUT));
- } else {
- console.error(`Axios error: ${err.message}`);
- res.status(Status.ERROR).send(resultJson(PingStatus.ERROR));
- }
- });
+ // Check if some required keys are missing in the body object, if yes
+ // notify the client.
+ const missingKeysList: string[] = missingValues({
+ url: nodeExporterUrl,
});
+ if (missingKeysList.length !== 0) {
+ const err = new MissingKeysInBody(...missingKeysList);
+ res.status(err.code).send(errorJson(err.message));
+ return;
+ }
-app.post('/server/common/prometheus',
- async (req: express.Request, res: express.Response) => {
- console.log('Received POST request for %s %s', req.url, req.body);
- const url = req.body['url'];
- const baseChain = req.body['baseChain'];
-
- // Check if some required keys are missing in the body object, if yes
- // notify the client.
- const missingKeysList: string[] = missingValues({url, baseChain});
- if (missingKeysList.length !== 0) {
- const err = new MissingKeysInBody(...missingKeysList);
- res.status(err.code).send(errorJson(err.message));
- return;
- }
+ const url = `${nodeExporterUrl}`;
- // At request level
- const agent = new https.Agent({
- rejectUnauthorized: false
- });
+ axios
+ .get(url, { timeout: 3000 })
+ .then((response) => {
+ if (verifyNodeExporterPing(response.data)) {
+ res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS));
+ } else {
+ res.status(Status.ERROR).send(resultJson(PingStatus.ERROR));
+ }
+ })
+ .catch((err) => {
+ if (err.code === "ECONNABORTED") {
+ res.status(Status.TIMEOUT).send(resultJson(PingStatus.TIMEOUT));
+ } else {
+ console.error(`Axios error: ${err.message}`);
+ res.status(Status.ERROR).send(resultJson(PingStatus.ERROR));
+ }
+ });
+ }
+);
- axios.get(url, {timeout: 3000, httpsAgent: agent}).then((response) => {
- if (verifyPrometheusPing(response.data, baseChain)) {
- res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS));
- } else {
- res.status(Status.ERROR).send(resultJson(PingStatus.ERROR));
- }
- }).catch((err) => {
- if (err.code === 'ECONNABORTED') {
- res.status(Status.TIMEOUT).send(resultJson(PingStatus.TIMEOUT));
- } else {
- console.error(`Axios error: ${err.message}`);
- res.status(Status.ERROR).send(resultJson(PingStatus.ERROR));
- }
- });
- });
+app.post(
+ "/server/common/prometheus",
+ async (req: express.Request, res: express.Response) => {
+ console.log("Received POST request for %s %s", req.url, req.body);
+ const url = req.body["url"];
+ const baseChain = req.body["baseChain"];
-// ----------------- Cosmos
+ // Check if some required keys are missing in the body object, if yes
+ // notify the client.
+ const missingKeysList: string[] = missingValues({ url, baseChain });
+ if (missingKeysList.length !== 0) {
+ const err = new MissingKeysInBody(...missingKeysList);
+ res.status(err.code).send(errorJson(err.message));
+ return;
+ }
-app.post('/server/cosmos/rest',
- async (req: express.Request, res: express.Response) => {
- console.log('Received POST request for %s %s', req.url, req.body);
- const cosmosRestUrl = req.body['url'];
+ // At request level
+ const agent = new https.Agent({
+ rejectUnauthorized: false,
+ });
- // Check if some required keys are missing in the body object, if yes
- // notify the client.
- const missingKeysList: string[] = missingValues({
- url: cosmosRestUrl
- });
- if (missingKeysList.length !== 0) {
- const err = new MissingKeysInBody(...missingKeysList);
- res.status(err.code).send(errorJson(err.message));
- return;
+ axios
+ .get(url, { timeout: 3000, httpsAgent: agent })
+ .then((response) => {
+ if (verifyPrometheusPing(response.data, baseChain)) {
+ res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS));
+ } else {
+ res.status(Status.ERROR).send(resultJson(PingStatus.ERROR));
+ }
+ })
+ .catch((err) => {
+ if (err.code === "ECONNABORTED") {
+ res.status(Status.TIMEOUT).send(resultJson(PingStatus.TIMEOUT));
+ } else {
+ console.error(`Axios error: ${err.message}`);
+ res.status(Status.ERROR).send(resultJson(PingStatus.ERROR));
}
+ });
+ }
+);
- const url = `${cosmosRestUrl}/node_info`;
+// ----------------- Cosmos
- axios.get(url, {timeout: 3000}).then((response) => {
- if ('node_info' in response.data) {
- res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS));
- } else {
- res.status(Status.ERROR).send(resultJson(PingStatus.ERROR));
- }
- }).catch((err) => {
- if (err.code === 'ECONNABORTED') {
- res.status(Status.TIMEOUT).send(resultJson(PingStatus.TIMEOUT));
- } else {
- console.error(`Axios error: ${err.message}`);
- res.status(Status.ERROR).send(resultJson(PingStatus.ERROR));
- }
- });
+app.post(
+ "/server/cosmos/rest",
+ async (req: express.Request, res: express.Response) => {
+ console.log("Received POST request for %s %s", req.url, req.body);
+ const cosmosRestUrl = req.body["url"];
+
+ // Check if some required keys are missing in the body object, if yes
+ // notify the client.
+ const missingKeysList: string[] = missingValues({
+ url: cosmosRestUrl,
});
+ if (missingKeysList.length !== 0) {
+ const err = new MissingKeysInBody(...missingKeysList);
+ res.status(err.code).send(errorJson(err.message));
+ return;
+ }
-app.post('/server/cosmos/tendermint-rpc',
- async (req: express.Request, res: express.Response) => {
- console.log('Received POST request for %s %s', req.url, req.body);
- const tendermintRpcUrl = req.body['url'];
+ const url = `${cosmosRestUrl}/cosmos/base/tendermint/v1beta1/node_info`;
- // Check if some required keys are missing in the body object, if yes
- // notify the client.
- const missingKeysList: string[] = missingValues({
- url: tendermintRpcUrl
- });
- if (missingKeysList.length !== 0) {
- const err = new MissingKeysInBody(...missingKeysList);
- res.status(err.code).send(errorJson(err.message));
- return;
+ axios
+ .get(url, { timeout: 3000 })
+ .then((response) => {
+ if ("default_node_info" in response.data) {
+ res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS));
+ } else {
+ res.status(Status.ERROR).send(resultJson(PingStatus.ERROR));
+ }
+ })
+ .catch((err) => {
+ if (err.code === "ECONNABORTED") {
+ res.status(Status.TIMEOUT).send(resultJson(PingStatus.TIMEOUT));
+ } else {
+ console.error(`Axios error: ${err.message}`);
+ res.status(Status.ERROR).send(resultJson(PingStatus.ERROR));
}
+ });
+ }
+);
- const url = `${tendermintRpcUrl}/abci_info?`;
+app.post(
+ "/server/cosmos/tendermint-rpc",
+ async (req: express.Request, res: express.Response) => {
+ console.log("Received POST request for %s %s", req.url, req.body);
+ const tendermintRpcUrl = req.body["url"];
- axios.get(url, {timeout: 3000}).then((response) => {
- if ('jsonrpc' in response.data) {
- res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS));
- } else {
- res.status(Status.ERROR).send(resultJson(PingStatus.ERROR));
- }
- }).catch((err) => {
- if (err.code === 'ECONNABORTED') {
- res.status(Status.TIMEOUT).send(resultJson(PingStatus.TIMEOUT));
- } else {
- console.error(`Axios error: ${err.message}`);
- res.status(Status.ERROR).send(resultJson(PingStatus.ERROR));
- }
- });
+ // Check if some required keys are missing in the body object, if yes
+ // notify the client.
+ const missingKeysList: string[] = missingValues({
+ url: tendermintRpcUrl,
});
+ if (missingKeysList.length !== 0) {
+ const err = new MissingKeysInBody(...missingKeysList);
+ res.status(err.code).send(errorJson(err.message));
+ return;
+ }
-// ----------------- Substrate
+ const url = `${tendermintRpcUrl}/abci_info?`;
-app.post('/server/substrate/websocket',
- async (req: express.Request, res: express.Response) => {
- console.log('Received POST request for %s %s', req.url, req.body);
- const substrateWsUrl = req.body['url'];
- // Check if some required keys are missing in the body object, if yes
- // notify the client.
- const missingKeysList: string[] = missingValues({
- url: substrateWsUrl
- });
- if (missingKeysList.length !== 0) {
- const err = new MissingKeysInBody(...missingKeysList);
- res.status(err.code).send(errorJson(err.message));
- return;
+ axios
+ .get(url, { timeout: 3000 })
+ .then((response) => {
+ if ("jsonrpc" in response.data) {
+ res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS));
+ } else {
+ res.status(Status.ERROR).send(resultJson(PingStatus.ERROR));
}
-
- const substrateIp = process.env.SUBSTRATE_API_IP;
- const substrateApi = process.env.SUBSTRATE_API_PORT;
-
- if (!substrateIp || !substrateApi) {
- const err = new EnvVariablesNotAvailable('Substrate IP or Substrate API');
- console.error(err.message);
- res.status(err.code).send(errorJson(err.message));
- return;
+ })
+ .catch((err) => {
+ if (err.code === "ECONNABORTED") {
+ res.status(Status.TIMEOUT).send(resultJson(PingStatus.TIMEOUT));
+ } else {
+ console.error(`Axios error: ${err.message}`);
+ res.status(Status.ERROR).send(resultJson(PingStatus.ERROR));
}
+ });
+ }
+);
- // At request level
- const agent = new https.Agent({
- rejectUnauthorized: false
- });
-
- const url = `https://${substrateIp}:${substrateApi}/api/rpc/system/syncState?websocket=${substrateWsUrl}`;
+// ----------------- Substrate
- axios.get(url, {timeout: 5000, httpsAgent: agent}).then((_) => {
- res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS));
- }).catch((err) => {
- if (err.code === 'ECONNABORTED') {
- res.status(Status.TIMEOUT).send(resultJson(PingStatus.TIMEOUT));
- } else {
- console.error(`Axios error: ${err.message}`);
- res.status(Status.ERROR).send(resultJson(PingStatus.ERROR));
- }
- });
+app.post(
+ "/server/substrate/websocket",
+ async (req: express.Request, res: express.Response) => {
+ console.log("Received POST request for %s %s", req.url, req.body);
+ const substrateWsUrl = req.body["url"];
+ // Check if some required keys are missing in the body object, if yes
+ // notify the client.
+ const missingKeysList: string[] = missingValues({
+ url: substrateWsUrl,
});
+ if (missingKeysList.length !== 0) {
+ const err = new MissingKeysInBody(...missingKeysList);
+ res.status(err.code).send(errorJson(err.message));
+ return;
+ }
-// ----------------- Ethereum
+ const substrateIp = process.env.SUBSTRATE_API_IP;
+ const substrateApi = process.env.SUBSTRATE_API_PORT;
-app.post('/server/ethereum/rpc',
- async (req: express.Request, res: express.Response) => {
- console.log('Received POST request for %s %s', req.url, req.body);
- const url = req.body['url'];
-
- // Check if some required keys are missing in the body object, if yes
- // notify the client.
- const missingKeysList: string[] = missingValues({url});
- if (missingKeysList.length !== 0) {
- const err = new MissingKeysInBody(...missingKeysList);
- res.status(err.code).send(errorJson(err.message));
- return;
- }
-
- try {
- const web3 = new Web3(new Web3.providers.HttpProvider(url));
- const data = await fulfillWithTimeLimit(web3.eth.getBlockNumber(), 3000, -1);
+ if (!substrateIp || !substrateApi) {
+ const err = new EnvVariablesNotAvailable("Substrate IP or Substrate API");
+ console.error(err.message);
+ res.status(err.code).send(errorJson(err.message));
+ return;
+ }
- if (data === -1) {
- res.status(Status.TIMEOUT).send(resultJson(PingStatus.TIMEOUT));
- } else {
- res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS));
- }
- } catch (err) {
- console.error(`Web3 error: ${err.message}`);
- res.status(Status.ERROR).send(resultJson(PingStatus.ERROR));
- }
+ // At request level
+ const agent = new https.Agent({
+ rejectUnauthorized: false,
});
-// ---------------------------------------- Send Test Alert Endpoints
+ const url = `https://${substrateIp}:${substrateApi}/api/rpc/system/syncState?websocket=${substrateWsUrl}`;
-// ----------------- Channels
-
-app.post('/server/channels/opsgenie',
- async (req: express.Request, res: express.Response) => {
- console.log('Received POST request for %s %s', req.url, req.body);
- const apiKey = req.body['apiKey'];
- const eu = req.body['eu'];
-
- // Check if some required keys are missing in the body object, if yes
- // notify the client.
- const missingKeysList: string[] = missingValues({apiKey, eu});
- if (missingKeysList.length !== 0) {
- const err = new MissingKeysInBody(...missingKeysList);
- res.status(err.code).send(errorJson(err.message));
- return;
+ axios
+ .get(url, { timeout: 5000, httpsAgent: agent })
+ .then((_) => {
+ res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS));
+ })
+ .catch((err) => {
+ if (err.code === "ECONNABORTED") {
+ res.status(Status.TIMEOUT).send(resultJson(PingStatus.TIMEOUT));
+ } else {
+ console.error(`Axios error: ${err.message}`);
+ res.status(Status.ERROR).send(resultJson(PingStatus.ERROR));
}
+ });
+ }
+);
- // If the eu=true set the host to the opsgenie EU url otherwise the sdk will
- // run into an authentication error.
- const host = toBool(String(eu)) ? 'https://api.eu.opsgenie.com' : 'https://api.opsgenie.com';
+// ----------------- Ethereum
- // Create OpsGenie client and test alert message
- opsgenie.configure({api_key: apiKey, host});
+app.post(
+ "/server/ethereum/rpc",
+ async (req: express.Request, res: express.Response) => {
+ console.log("Received POST request for %s %s", req.url, req.body);
+ const url = req.body["url"];
- // Test alert object
- const alertObject = {
- message: testAlertMessage,
- description: testAlertMessage,
- priority: 'P5',
- };
+ // Check if some required keys are missing in the body object, if yes
+ // notify the client.
+ const missingKeysList: string[] = missingValues({ url });
+ if (missingKeysList.length !== 0) {
+ const err = new MissingKeysInBody(...missingKeysList);
+ res.status(err.code).send(errorJson(err.message));
+ return;
+ }
- // Send test alert
- opsgenie.alertV2.create(alertObject, (err, _) => {
- if (err) {
- console.error(err);
- res.status(Status.ERROR).send(resultJson(PingStatus.ERROR));
- } else {
- res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS));
- }
- });
- });
+ try {
+ const web3 = new Web3(new Web3.providers.HttpProvider(url));
+ const data = await fulfillWithTimeLimit(
+ web3.eth.getBlockNumber(),
+ 3000,
+ -1
+ );
+
+ if (data === -1) {
+ res.status(Status.TIMEOUT).send(resultJson(PingStatus.TIMEOUT));
+ } else {
+ res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS));
+ }
+ } catch (err) {
+ console.error(`Web3 error: ${err.message}`);
+ res.status(Status.ERROR).send(resultJson(PingStatus.ERROR));
+ }
+ }
+);
-app.post('/server/channels/slack',
- async (req: express.Request, res: express.Response) => {
- console.log('Received POST request for %s %s', req.url, req.body);
- const botToken = req.body['botToken'];
- const botChannelId = req.body['botChannelId'];
-
- // Check if some required keys are missing in the body object, if yes
- // notify the client.
- const missingKeysList: string[] = missingValues({botToken, botChannelId});
- if (missingKeysList.length !== 0) {
- const err = new MissingKeysInBody(...missingKeysList);
- res.status(err.code).send(errorJson(err.message));
- return;
- }
+// ---------------------------------------- Send Test Alert Endpoints
- // Create Slack web client and test alert message
- const client = new WebClient(botToken);
+// ----------------- Channels
- // Test alert object
- const alertObject = {
- text: testAlertMessage,
- channel: botChannelId
- };
+app.post(
+ "/server/channels/opsgenie",
+ async (req: express.Request, res: express.Response) => {
+ console.log("Received POST request for %s %s", req.url, req.body);
+ const apiKey = req.body["apiKey"];
+ const eu = req.body["eu"];
- // Send test alert
- try {
- await client.chat.postMessage(alertObject);
- } catch (err) {
- console.error(err);
- res.status(Status.ERROR).send(resultJson(PingStatus.ERROR));
- return;
- }
+ // Check if some required keys are missing in the body object, if yes
+ // notify the client.
+ const missingKeysList: string[] = missingValues({ apiKey, eu });
+ if (missingKeysList.length !== 0) {
+ const err = new MissingKeysInBody(...missingKeysList);
+ res.status(err.code).send(errorJson(err.message));
+ return;
+ }
+ // If the eu=true set the host to the opsgenie EU url otherwise the sdk will
+ // run into an authentication error.
+ const host = toBool(String(eu))
+ ? "https://api.eu.opsgenie.com"
+ : "https://api.opsgenie.com";
+
+ // Create OpsGenie client and test alert message
+ opsgenie.configure({ api_key: apiKey, host });
+
+ // Test alert object
+ const alertObject = {
+ message: testAlertMessage,
+ description: testAlertMessage,
+ priority: "P5",
+ };
+
+ // Send test alert
+ opsgenie.alertV2.create(alertObject, (err, _) => {
+ if (err) {
+ console.error(err);
+ res.status(Status.ERROR).send(resultJson(PingStatus.ERROR));
+ } else {
res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS));
+ }
});
+ }
+);
-app.post('/server/channels/telegram',
- async (req: express.Request, res: express.Response) => {
- console.log('Received POST request for %s %s', req.url, req.body);
- const botToken = req.body['botToken'];
- const botChatId = req.body['botChatId'];
-
- // Check if some required keys are missing in the body object, if yes
- // notify the client.
- const missingKeysList: string[] = missingValues({botToken, botChatId});
- if (missingKeysList.length !== 0) {
- const err = new MissingKeysInBody(...missingKeysList);
- res.status(err.code).send(errorJson(err.message));
- return;
- }
+app.post(
+ "/server/channels/slack",
+ async (req: express.Request, res: express.Response) => {
+ console.log("Received POST request for %s %s", req.url, req.body);
+ const botToken = req.body["botToken"];
+ const botChannelId = req.body["botChannelId"];
- const url = `https://api.telegram.org/bot${botToken}/sendMessage`;
- const params = {
- chat_id: botChatId,
- text: testAlertMessage,
- parse_mode: 'Markdown'
- }
+ // Check if some required keys are missing in the body object, if yes
+ // notify the client.
+ const missingKeysList: string[] = missingValues({ botToken, botChannelId });
+ if (missingKeysList.length !== 0) {
+ const err = new MissingKeysInBody(...missingKeysList);
+ res.status(err.code).send(errorJson(err.message));
+ return;
+ }
- axios.get(url, {timeout: 3000, params}).then(() => {
- res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS));
- }).catch((err) => {
- console.error(err);
- if (err.code === 'ECONNABORTED') {
- res.status(Status.TIMEOUT).send(resultJson(PingStatus.TIMEOUT));
- } else {
- res.status(Status.ERROR).send(resultJson(PingStatus.ERROR));
- }
- });
- });
+ // Create Slack web client and test alert message
+ const client = new WebClient(botToken);
+
+ // Test alert object
+ const alertObject = {
+ text: testAlertMessage,
+ channel: botChannelId,
+ };
+
+ // Send test alert
+ try {
+ await client.chat.postMessage(alertObject);
+ } catch (err) {
+ console.error(err);
+ res.status(Status.ERROR).send(resultJson(PingStatus.ERROR));
+ return;
+ }
-app.post('/server/channels/pagerduty',
- async (req, res) => {
- console.log('Received POST request for %s', req.url);
- const integrationKey = req.body['integrationKey'];
+ res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS));
+ }
+);
+
+app.post(
+ "/server/channels/telegram",
+ async (req: express.Request, res: express.Response) => {
+ console.log("Received POST request for %s %s", req.url, req.body);
+ const botToken = req.body["botToken"];
+ const botChatId = req.body["botChatId"];
// Check if some required keys are missing in the body object, if yes
// notify the client.
- const missingKeysList: string[] = missingValues({integrationKey});
+ const missingKeysList: string[] = missingValues({ botToken, botChatId });
if (missingKeysList.length !== 0) {
- const err = new MissingKeysInBody(...missingKeysList);
- res.status(err.code).send(errorJson(err.message));
- return;
+ const err = new MissingKeysInBody(...missingKeysList);
+ res.status(err.code).send(errorJson(err.message));
+ return;
}
- // Send test alert event
- event({
- data: {
- routing_key: integrationKey,
- event_action: 'trigger',
- payload: {
- summary: testAlertMessage,
- source: 'PANIC',
- severity: 'info',
- },
- }
- }).then(() => {
+ const url = `https://api.telegram.org/bot${botToken}/sendMessage`;
+ const params = {
+ chat_id: botChatId,
+ text: testAlertMessage,
+ parse_mode: "Markdown",
+ };
+
+ axios
+ .get(url, { timeout: 3000, params })
+ .then(() => {
res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS));
- }).catch((err) => {
+ })
+ .catch((err) => {
console.error(err);
- res.status(Status.ERROR).send(resultJson(PingStatus.ERROR));
+ if (err.code === "ECONNABORTED") {
+ res.status(Status.TIMEOUT).send(resultJson(PingStatus.TIMEOUT));
+ } else {
+ res.status(Status.ERROR).send(resultJson(PingStatus.ERROR));
+ }
+ });
+ }
+);
+
+app.post("/server/channels/pagerduty", async (req, res) => {
+ console.log("Received POST request for %s", req.url);
+ const integrationKey = req.body["integrationKey"];
+
+ // Check if some required keys are missing in the body object, if yes
+ // notify the client.
+ const missingKeysList: string[] = missingValues({ integrationKey });
+ if (missingKeysList.length !== 0) {
+ const err = new MissingKeysInBody(...missingKeysList);
+ res.status(err.code).send(errorJson(err.message));
+ return;
+ }
+
+ // Send test alert event
+ event({
+ data: {
+ routing_key: integrationKey,
+ event_action: "trigger",
+ payload: {
+ summary: testAlertMessage,
+ source: "PANIC",
+ severity: "info",
+ },
+ },
+ })
+ .then(() => {
+ res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS));
+ })
+ .catch((err) => {
+ console.error(err);
+ res.status(Status.ERROR).send(resultJson(PingStatus.ERROR));
});
});
-app.post('/server/channels/twilio',
- async (req: express.Request, res: express.Response) => {
- console.log('Received POST request for %s %s', req.url, req.body);
- const accountSid = req.body['accountSid'];
- const authToken = req.body['authToken'];
- const twilioPhoneNumber = req.body['twilioPhoneNumber'];
- const phoneNumberToDial = req.body['phoneNumberToDial'];
-
- // Check if some required keys are missing in the body object, if yes
- // notify the client.
- const missingKeysList: string[] = missingValues({
- accountSid, authToken, twilioPhoneNumber, phoneNumberToDial
- });
- if (missingKeysList.length !== 0) {
- const err = new MissingKeysInBody(...missingKeysList);
- res.status(err.code).send(errorJson(err.message));
- return;
- }
+app.post(
+ "/server/channels/twilio",
+ async (req: express.Request, res: express.Response) => {
+ console.log("Received POST request for %s %s", req.url, req.body);
+ const accountSid = req.body["accountSid"];
+ const authToken = req.body["authToken"];
+ const twilioPhoneNumber = req.body["twilioPhoneNumber"];
+ const phoneNumberToDial = req.body["phoneNumberToDial"];
- let client;
+ // Check if some required keys are missing in the body object, if yes
+ // notify the client.
+ const missingKeysList: string[] = missingValues({
+ accountSid,
+ authToken,
+ twilioPhoneNumber,
+ phoneNumberToDial,
+ });
+ if (missingKeysList.length !== 0) {
+ const err = new MissingKeysInBody(...missingKeysList);
+ res.status(err.code).send(errorJson(err.message));
+ return;
+ }
- try {
- client = twilio(accountSid, authToken);
- } catch (err) {
- console.error(err);
- res.status(Status.ERROR).send(resultJson(PingStatus.ERROR));
- return;
- }
+ let client;
- client.calls.create({
- twiml: '',
- to: phoneNumberToDial,
- from: twilioPhoneNumber
- }).then(() => {
- res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS));
- }).catch((err) => {
- console.error(err);
- res.status(Status.ERROR).send(resultJson(PingStatus.ERROR));
- });
- });
+ try {
+ client = twilio(accountSid, authToken);
+ } catch (err) {
+ console.error(err);
+ res.status(Status.ERROR).send(resultJson(PingStatus.ERROR));
+ return;
+ }
-app.post('/server/channels/email',
- async (req: express.Request, res: express.Response) => {
- console.log('Received POST request for %s %s', req.url, req.body);
- const smtp = req.body['smtp'];
- const port = req.body['port'];
- const from = req.body['from'];
- const to = req.body['to'];
- const username = req.body['username'];
- const password = req.body['password'];
-
- // Check if some required keys are missing in the body object, if yes
- // notify the client.
- const missingKeysList: string[] = missingValues({
- smtp, port, from, to, username, password
- });
- if (missingKeysList.length !== 0) {
- const err = new MissingKeysInBody(...missingKeysList);
- res.status(err.code).send(errorJson(err.message));
- return;
- }
+ client.calls
+ .create({
+ twiml: "",
+ to: phoneNumberToDial,
+ from: twilioPhoneNumber,
+ })
+ .then(() => {
+ res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS));
+ })
+ .catch((err) => {
+ console.error(err);
+ res.status(Status.ERROR).send(resultJson(PingStatus.ERROR));
+ });
+ }
+);
- // Create mail transport (essentially an email client)
- const transport = nodemailer.createTransport({
- host: smtp,
- port,
- auth: username && password ?
- {
- user: username,
- pass: password
- } : undefined,
- });
+app.post(
+ "/server/channels/email",
+ async (req: express.Request, res: express.Response) => {
+ console.log("Received POST request for %s %s", req.url, req.body);
+ const smtp = req.body["smtp"];
+ const port = req.body["port"];
+ const from = req.body["from"];
+ const to = req.body["to"];
+ const username = req.body["username"];
+ const password = req.body["password"];
- // If transporter valid, create and send test email
- transport.verify((verifyTransportErr, _) => {
- if (verifyTransportErr) {
- console.error(verifyTransportErr);
- res.status(Status.ERROR).send(resultJson(PingStatus.ERROR));
- return;
+ // Check if some required keys are missing in the body object, if yes
+ // notify the client.
+ const missingKeysList: string[] = missingValues({
+ smtp,
+ port,
+ from,
+ to,
+ username,
+ password,
+ });
+ if (missingKeysList.length !== 0) {
+ const err = new MissingKeysInBody(...missingKeysList);
+ res.status(err.code).send(errorJson(err.message));
+ return;
+ }
+
+ // Create mail transport (essentially an email client)
+ const transport = nodemailer.createTransport({
+ host: smtp,
+ port,
+ auth:
+ username && password
+ ? {
+ user: username,
+ pass: password,
}
+ : undefined,
+ });
- const message = {
- from,
- to,
- subject: testAlertMessage,
- text: testAlertMessage,
- };
-
- // Send test email
- transport.sendMail(message, (sendMailErr, _) => {
- if (sendMailErr) {
- console.error(sendMailErr);
- res.status(Status.ERROR).send(resultJson(PingStatus.ERROR));
- return;
- }
- res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS));
- });
- });
+ // If transporter valid, create and send test email
+ transport.verify((verifyTransportErr, _) => {
+ if (verifyTransportErr) {
+ console.error(verifyTransportErr);
+ res.status(Status.ERROR).send(resultJson(PingStatus.ERROR));
+ return;
+ }
+
+ const message = {
+ from,
+ to,
+ subject: testAlertMessage,
+ text: testAlertMessage,
+ };
+
+ // Send test email
+ transport.sendMail(message, (sendMailErr, _) => {
+ if (sendMailErr) {
+ console.error(sendMailErr);
+ res.status(Status.ERROR).send(resultJson(PingStatus.ERROR));
+ return;
+ }
+ res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS));
+ });
});
+ }
+);
// ----------------- Repositories
-app.post('/server/repositories/github',
+app.post(
+ "/server/repositories/github",
async (req: express.Request, res: express.Response) => {
- console.log('Received POST request for %s %s', req.url, req.body);
- const repoName = req.body['name'];
+ console.log("Received POST request for %s %s", req.url, req.body);
+ const repoName = req.body["name"];
- // Check if some required keys are missing in the body object, if yes
- // notify the client.
- const missingKeysList: string[] = missingValues({
- repoName
- });
- if (missingKeysList.length !== 0) {
- const err = new MissingKeysInBody(...missingKeysList);
- res.status(err.code).send(errorJson(err.message));
- return;
- }
+ // Check if some required keys are missing in the body object, if yes
+ // notify the client.
+ const missingKeysList: string[] = missingValues({
+ repoName,
+ });
+ if (missingKeysList.length !== 0) {
+ const err = new MissingKeysInBody(...missingKeysList);
+ res.status(err.code).send(errorJson(err.message));
+ return;
+ }
- const url = `https://api.github.com/repos/${repoName}`;
+ const url = `https://api.github.com/repos/${repoName}`;
- axios.get(url, {timeout: 3000}).then((response) => {
- if(response.status === 200){
- res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS));
- } else {
- res.status(Status.ERROR).send(resultJson(PingStatus.ERROR));
- }
- }).catch((err) => {
- if (err.code === 'ECONNABORTED') {
- res.status(Status.TIMEOUT).send(resultJson(PingStatus.TIMEOUT));
- } else {
- console.error(`Axios error: ${err.message}`);
- res.status(Status.ERROR).send(resultJson(PingStatus.ERROR));
- }
+ axios
+ .get(url, { timeout: 3000 })
+ .then((response) => {
+ if (response.status === 200) {
+ res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS));
+ } else {
+ res.status(Status.ERROR).send(resultJson(PingStatus.ERROR));
+ }
+ })
+ .catch((err) => {
+ if (err.code === "ECONNABORTED") {
+ res.status(Status.TIMEOUT).send(resultJson(PingStatus.TIMEOUT));
+ } else {
+ console.error(`Axios error: ${err.message}`);
+ res.status(Status.ERROR).send(resultJson(PingStatus.ERROR));
+ }
});
- });
+ }
+);
-app.post('/server/repositories/dockerhub',
+app.post(
+ "/server/repositories/dockerhub",
async (req: express.Request, res: express.Response) => {
- console.log('Received POST request for %s %s', req.url, req.body);
- const repoName = req.body['name'];
+ console.log("Received POST request for %s %s", req.url, req.body);
+ const repoName = req.body["name"];
- // Check if some required keys are missing in the body object, if yes
- // notify the client.
- const missingKeysList: string[] = missingValues({
- repoName
- });
- if (missingKeysList.length !== 0) {
- const err = new MissingKeysInBody(...missingKeysList);
- res.status(err.code).send(errorJson(err.message));
- return;
- }
+ // Check if some required keys are missing in the body object, if yes
+ // notify the client.
+ const missingKeysList: string[] = missingValues({
+ repoName,
+ });
+ if (missingKeysList.length !== 0) {
+ const err = new MissingKeysInBody(...missingKeysList);
+ res.status(err.code).send(errorJson(err.message));
+ return;
+ }
- const url = `https://registry.hub.docker.com/v2/repositories/${repoName}`;
+ const url = `https://registry.hub.docker.com/v2/repositories/${repoName}`;
- axios.get(url, {timeout: 3000}).then((response) => {
- if('name' in response.data && 'user' in response.data){
- res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS));
- } else {
- res.status(Status.ERROR).send(resultJson(PingStatus.ERROR));
- }
- }).catch((err) => {
- if (err.code === 'ECONNABORTED') {
- res.status(Status.TIMEOUT).send(resultJson(PingStatus.TIMEOUT));
- } else {
- console.error(`Axios error: ${err.message}`);
- res.status(Status.ERROR).send(resultJson(PingStatus.ERROR));
- }
+ axios
+ .get(url, { timeout: 3000 })
+ .then((response) => {
+ if ("name" in response.data && "user" in response.data) {
+ res.status(Status.SUCCESS).send(resultJson(PingStatus.SUCCESS));
+ } else {
+ res.status(Status.ERROR).send(resultJson(PingStatus.ERROR));
+ }
+ })
+ .catch((err) => {
+ if (err.code === "ECONNABORTED") {
+ res.status(Status.TIMEOUT).send(resultJson(PingStatus.TIMEOUT));
+ } else {
+ console.error(`Axios error: ${err.message}`);
+ res.status(Status.ERROR).send(resultJson(PingStatus.ERROR));
+ }
});
- });
-
-
-
+ }
+);
// ---------------------------------------- Server defaults
-app.get('/server/*', async (req: express.Request,
- res: express.Response) => {
- console.log('Received GET request for %s', req.url);
- const err: InvalidEndpoint = new InvalidEndpoint(req.url);
- res.status(err.code).send(errorJson(err.message));
+app.get("/server/*", async (req: express.Request, res: express.Response) => {
+ console.log("Received GET request for %s", req.url);
+ const err: InvalidEndpoint = new InvalidEndpoint(req.url);
+ res.status(err.code).send(errorJson(err.message));
});
-app.post('/server/*', async (req: express.Request,
- res: express.Response) => {
- console.log('Received POST request for %s', req.url);
- const err = new InvalidEndpoint(req.url);
- res.status(err.code).send(errorJson(err.message));
+app.post("/server/*", async (req: express.Request, res: express.Response) => {
+ console.log("Received POST request for %s", req.url);
+ const err = new InvalidEndpoint(req.url);
+ res.status(err.code).send(errorJson(err.message));
});
// ---------------------------------------- Server redirects
-app.get('/*', async (req: express.Request, res: express.Response) => {
- res.redirect('/api-docs')
+app.get("/*", async (req: express.Request, res: express.Response) => {
+ res.redirect("/api-docs");
});
-app.post('/*', async (req: express.Request,
- res: express.Response) => {
- res.redirect('/api-docs')
+app.post("/*", async (req: express.Request, res: express.Response) => {
+ res.redirect("/api-docs");
});
// ---------------------------------------- Start listen
@@ -1713,15 +1832,15 @@ const port = parseInt(process.env.API_PORT || "9001");
// Create Https server
const server = https.createServer(httpsOptions, app);
-server.once('error', function (err: any) {
- if (err.code === 'EADDRINUSE') {
- console.error('port is currently in use');
- }
+server.once("error", function (err: any) {
+ if (err.code === "EADDRINUSE") {
+ console.error("port is currently in use");
+ }
});
// Listen for requests
-server.listen(port, () => console.log('Listening on %s', port));
+server.listen(port, () => console.log("Listening on %s", port));
//TODO: Need to add authentication, even to the respective middleware functions
-export {app, server, redisInterval, mongoInterval};
+export { app, server, redisInterval, mongoInterval };
diff --git a/api/src/server/mongo.ts b/api/src/server/mongo.ts
index fd04bdf6..89ba8479 100644
--- a/api/src/server/mongo.ts
+++ b/api/src/server/mongo.ts
@@ -1,58 +1,60 @@
-import MongoClient from "mongodb";
-import {MongoClientNotInitialised} from "../constant/errors";
+import { MongoClient, MongoClientOptions } from "mongodb";
+import { MongoClientNotInitialised } from "../constant/errors";
import {
- MSG_MONGO_CONNECTION_ESTABLISHED,
- MSG_MONGO_COULD_NOT_DISCONNECT,
- MSG_MONGO_COULD_NOT_ESTABLISH_CONNECTION,
- MSG_MONGO_DISCONNECTED
+ MSG_MONGO_CONNECTION_ESTABLISHED,
+ MSG_MONGO_COULD_NOT_DISCONNECT,
+ MSG_MONGO_COULD_NOT_ESTABLISH_CONNECTION,
+ MSG_MONGO_DISCONNECTED,
} from "../constant/msg";
-export const MonitorablesCollection = 'monitorables';
+export const MonitorablesCollection = "monitorables";
export class MongoInterface {
- private readonly url: string;
- private readonly options: MongoClient.MongoClientOptions;
- private _client?: MongoClient.MongoClient;
+ private readonly url: string;
+ private readonly options: MongoClientOptions;
+ private _client?: MongoClient;
- constructor(options: MongoClient.MongoClientOptions,
- host: string = "localhost", port: number = 27017) {
-
- this.options = options;
- this.options.readPreference = 'primaryPreferred';
- this.options.replicaSet = 'replica-set';
+ constructor(
+ options: MongoClientOptions,
+ host: string = "localhost",
+ port: number = 27017
+ ) {
+ this.options = options;
+ this.options.readPreference = "primaryPreferred";
+ this.options.replicaSet = "replica-set";
- this.url = `mongodb://rs1:${port},rs2:${port},rs3:${port}`;
- this._client = undefined;
- }
+ this.url = `mongodb://rs1:${port},rs2:${port},rs3:${port}`;
+ this._client = undefined;
+ }
- get client() {
- return this._client
- }
+ get client() {
+ return this._client;
+ }
- async connect() {
- if (this._client) {
- return;
- }
- try {
- this._client = await MongoClient.connect(this.url, this.options);
- console.log(MSG_MONGO_CONNECTION_ESTABLISHED)
- } catch (err) {
- console.error(MSG_MONGO_COULD_NOT_ESTABLISH_CONNECTION);
- console.error(err);
- }
+ async connect() {
+ if (this._client) {
+ return;
+ }
+ try {
+ this._client = await MongoClient.connect(this.url, this.options);
+ console.log(MSG_MONGO_CONNECTION_ESTABLISHED);
+ } catch (err) {
+ console.error(MSG_MONGO_COULD_NOT_ESTABLISH_CONNECTION);
+ console.error(err);
}
+ }
- async disconnect() {
- if (this._client) {
- try {
- await this._client.close();
- console.log(MSG_MONGO_DISCONNECTED)
- } catch (err) {
- console.error(MSG_MONGO_COULD_NOT_DISCONNECT);
- console.error(err)
- }
- } else {
- throw new MongoClientNotInitialised()
- }
+ async disconnect() {
+ if (this._client) {
+ try {
+ await this._client.close();
+ console.log(MSG_MONGO_DISCONNECTED);
+ } catch (err) {
+ console.error(MSG_MONGO_COULD_NOT_DISCONNECT);
+ console.error(err);
+ }
+ } else {
+ throw new MongoClientNotInitialised();
}
+ }
}
diff --git a/api/src/server/utils.ts b/api/src/server/utils.ts
index a74eb69e..b87bfd82 100644
--- a/api/src/server/utils.ts
+++ b/api/src/server/utils.ts
@@ -37,7 +37,7 @@ export const allElementsInListHaveTypeString = (list: any[]): boolean => {
export const getPrometheusMetricFromBaseChain = (baseChain: string): string => {
switch (baseChain.toLowerCase()) {
case 'cosmos':
- return 'tendermint_consensus_height'
+ return 'consensus_height'
case 'chainlink':
return 'max_unconfirmed_blocks'
default:
diff --git a/api/src/util/MongooseUtil.ts b/api/src/util/MongooseUtil.ts
index e9213824..149da3cb 100644
--- a/api/src/util/MongooseUtil.ts
+++ b/api/src/util/MongooseUtil.ts
@@ -1,197 +1,256 @@
-import {ObjectID} from "mongodb";
-import mongoose, {Document,Model,Schema} from "mongoose";
-import {ObjectUtil} from "./ObjectUtil";
+import { ObjectId } from "mongodb";
+import mongoose, { Document, Model, Schema } from "mongoose";
+import { ObjectUtil } from "./ObjectUtil";
import fs from "fs";
-import {TypeUtil} from "./TypeUtil";
+import { TypeUtil } from "./TypeUtil";
export class MongooseUtil {
-
- /**
- * Checks if the Mongoose Document is Valid
- *
- * @param doc Mongoose Document
- * @returns {boolean}
- */
- public static async isValid(doc: Document): Promise {
- try {
- await doc.validate();
- return true;
- } catch (e) {
- return false;
- }
+ /**
+ * Checks if the Mongoose Document is Valid
+ *
+ * @param doc Mongoose Document
+ * @returns {boolean}
+ */
+ public static async isValid(doc: Document): Promise {
+ try {
+ await doc.validate();
+ return true;
+ } catch (e) {
+ return false;
}
-
- /**
- * Enable virtual mode on mongoose and enable alias name
- *
- * @param schema
- */
- public static virtualize(schema: Schema): void {
- schema.set('toJSON', {
- virtuals: true,
- transform: (doc, ret) => {
- if (ret._id) delete ret._id;
- if (ret.config_type) delete ret.config_type;
-
- ret.id = doc._id;
- },
- });
+ }
+
+ /**
+ * Enable virtual mode on mongoose and enable alias name
+ *
+ * @param schema
+ */
+ public static virtualize(schema: Schema): void {
+ schema.set("toJSON", {
+ virtuals: true,
+ transform: (doc, ret) => {
+ if (ret._id) delete ret._id;
+ if (ret.config_type) delete ret.config_type;
+
+ ret.id = doc._id;
+ },
+ });
+ }
+
+ /**
+ * Deep Merge two objects
+ *
+ * @param obj1 The target object
+ * @param obj2 The fulfilled object
+ * @param ignoreFields array of field names to ignore (not include)
+ * @param parent
+ * @returns The merged object
+ */
+ public static merge(
+ obj1: T | any,
+ obj2: any,
+ ignoreFields: string[] = [],
+ parent: T = null
+ ): T {
+ if (TypeUtil.isScalarValue(obj2)) {
+ return obj2;
}
- /**
- * Deep Merge two objects
- *
- * @param obj1 The target object
- * @param obj2 The fulfilled object
- * @param ignoreFields array of field names to ignore (not include)
- * @param parent
- * @returns The merged object
- */
- public static merge(obj1: T | any, obj2: any,
- ignoreFields: string[] = [],
- parent: T = null): T {
-
- if (TypeUtil.isScalarValue(obj2)) {
- return obj2;
- }
+ const isMongooseDocument = parent
+ ? !!parent && typeof parent["$isNew"] === "boolean"
+ : !!obj1 && typeof obj1["$isNew"] === "boolean";
- const isMongooseDocument = parent ?
- (!!parent && typeof parent['$isNew'] === 'boolean') :
- (!!obj1 && typeof obj1['$isNew'] === 'boolean');
+ ignoreFields.forEach((invalid_field) => {
+ if (invalid_field in obj2) {
+ delete obj2[invalid_field];
+ }
+ });
- ignoreFields.forEach(invalid_field => {
- if (invalid_field in obj2) {
- delete obj2[invalid_field];
- }
- });
+ //automatic update when edit
+ if (obj1 && obj1["modified"]) {
+ obj1["modified"] = new Date();
+ }
- //automatic update when edit
- if (obj1 && obj1['modified']) {
- obj1['modified'] = new Date();
+ const convertToObjID = (x) => {
+ return typeof x === "string" && x.length === 24 ? new ObjectId(x) : x;
+ };
+ Object.keys(obj2).forEach((x) => {
+ //to avoid changes on created and modified properties
+ if (x === "created" || x === "modified") {
+ return;
+ }
+
+ if (Array.isArray(obj2[x])) {
+ //for empty arrays
+ if (obj2[x].length === 0) {
+ obj1[x] = obj2[x];
+ return;
}
- const convertToObjID = x => {
- return typeof x === 'string' && x.length === 24 ? new ObjectID(x) : x;
+ //Considering Set A for obj2, and Set B for obj1 the target
+ if (Array.isArray(obj1[x])) {
+ //We get the intersection of Sets (A & B)
+ obj1[x] = obj1[x].filter((b) =>
+ obj2[x].some((a) => ObjectUtil.isObject(b) && a.id === b.id)
+ );
+
+ //and merge
+ obj1[x].map((b) =>
+ MongooseUtil.merge(
+ b,
+ obj2[x].find((a) => b.id === a.id)
+ )
+ );
+ } else {
+ obj1[x] = [];
}
- Object.keys(obj2).forEach(x => {
- //to avoid changes on created and modified properties
- if (x === 'created' || x === 'modified') {
- return;
- }
-
- if (Array.isArray(obj2[x])) {
- //for empty arrays
- if (obj2[x].length === 0) {
- obj1[x] = obj2[x];
- return;
- }
-
- //Considering Set A for obj2, and Set B for obj1 the target
- if (Array.isArray(obj1[x])) {
- //We get the intersection of Sets (A & B)
- obj1[x] = obj1[x].filter(
- b => obj2[x].some(a => ObjectUtil.isObject(b) && a.id === b.id)
- );
-
- //and merge
- obj1[x].map(
- b => MongooseUtil.merge(b, obj2[x].find(a => b.id === a.id))
- );
- } else {
- obj1[x] = [];
- }
-
- //add new items to set B
- obj2[x].filter(x => !x.id)
- .forEach((b: object, key: number) => {
- obj1[x].push(b);
- });
-
- return;
- } else {
-
- //avoid undefined property
- if (!obj1) {
- obj1 = {};
- }
-
- if (x === 'id') {
- //make sure to fulfil with objectID instance
-
- //validate before assignment
- if (mongoose.Types.ObjectId.isValid(obj2[x])) {
- obj1['_id'] = convertToObjID(obj2[x]);
- } else {
- obj1['_id'] = new ObjectID();
- }
-
- if (!isMongooseDocument) {
- delete obj1[x];
- }
-
- return;
- }
- }
-
- let isReference = false;
- if (mongoose.Types.ObjectId.isValid(obj1[x])) {
- isReference = true;
-
- //if mongoose check it's a collection
- if (isMongooseDocument) {
- isReference = obj1[x]['collection'] !== undefined;
- }
- }
- // to edit pre-existing reference
- if (isReference) {
+ //add new items to set B
+ obj2[x]
+ .filter((x) => !x.id)
+ .forEach((b: object, key: number) => {
+ obj1[x].push(b);
+ });
+
+ return;
+ } else {
+ //avoid undefined property
+ if (!obj1) {
+ obj1 = {};
+ }
- let objID = null;
+ if (x === "id") {
+ //make sure to fulfil with objectID instance
- if (obj2[x] && obj2[x].id) {
- objID = convertToObjID(obj2[x].id);
- } else if (mongoose.Types.ObjectId.isValid(obj2[x])) {
- objID = convertToObjID(obj2[x]);
- }
+ //validate before assignment
+ if (mongoose.Types.ObjectId.isValid(obj2[x])) {
+ obj1["_id"] = convertToObjID(obj2[x]);
+ } else {
+ obj1["_id"] = new ObjectId();
+ }
- obj1[x] = objID;
+ if (!isMongooseDocument) {
+ delete obj1[x];
+ }
- return;
- }
+ return;
+ }
+ }
- if (ObjectUtil.isObject(obj2[x])) {
- //recursive changes on each property of object
- obj1[x] = MongooseUtil.merge(obj1[x], obj2[x], [], obj1);
- return;
- }
+ let isReference = false;
+ if (mongoose.Types.ObjectId.isValid(obj1[x])) {
+ isReference = true;
- obj1[x] = obj2[x];
+ //if mongoose check it's a collection
+ if (isMongooseDocument) {
+ isReference = obj1[x]["collection"] !== undefined;
+ }
+ }
- });
+ // to edit pre-existing reference
+ if (isReference) {
+ let objID = null;
- return obj1;
- }
+ if (obj2[x] && obj2[x].id) {
+ objID = convertToObjID(obj2[x].id);
+ } else if (mongoose.Types.ObjectId.isValid(obj2[x])) {
+ objID = convertToObjID(obj2[x]);
+ }
- /**
- * Populates a model/collection from a JSON document in the base/dump directory.
- *
- * @param model The model which corresponds to a collection to populate
- * @param fileName The name of the JSON document to use to populate collection
- */
- public static async populateModel(model: Model, fileName: string): Promise {
- try {
- const jsonDocument = JSON.parse(fs.readFileSync(`${__dirname}/../../base/dump/${fileName}.json`, 'utf-8'));
- const ids = jsonDocument.map(x => new ObjectID(x._id));
- const total = await model.find({ '_id': { '$in': ids } }).countDocuments();
-
- //avoid duplicate _id key
- if(total === 0){
- await model.insertMany(jsonDocument);
- console.log(`Successfully populated ${fileName}.json`);
+ obj1[x] = objID;
+
+ return;
+ }
+
+ if (ObjectUtil.isObject(obj2[x])) {
+ //recursive changes on each property of object
+ obj1[x] = MongooseUtil.merge(obj1[x], obj2[x], [], obj1);
+ return;
+ }
+
+ obj1[x] = obj2[x];
+ });
+
+ return obj1;
+ }
+
+ /**
+ * Populates a model/collection from a JSON document in the base/dump directory.
+ *
+ * @param model The model which corresponds to a collection to populate
+ * @param fileName The name of the JSON document to use to populate collection
+ */
+ public static async populateModel(
+ model: Model,
+ fileName: string
+ ): Promise {
+ try {
+ const filePath = `${__dirname}/../../base/dump/${fileName}.json`;
+ if (!fs.existsSync(filePath)) {
+ return;
+ }
+ const fileContent = fs.readFileSync(filePath, "utf-8");
+ const jsonDocument: any[] = JSON.parse(fileContent);
+
+ if (!jsonDocument || jsonDocument.length === 0) {
+ return;
+ }
+
+ const idsToCheck = jsonDocument
+ .map((doc) => {
+ if (
+ !doc._id ||
+ typeof doc._id !== "string" ||
+ !ObjectId.isValid(doc._id)
+ ) {
+ return null;
+ }
+ return ObjectId.createFromHexString(doc._id);
+ })
+ .filter((id) => id !== null) as ObjectId[];
+
+ if (idsToCheck.length === 0) {
+ return;
+ }
+
+ const existingDocs = await model
+ .find({ _id: { $in: idsToCheck } }, { _id: 1 })
+ .lean();
+ const existingIds = new Set(
+ existingDocs.map((doc) => doc._id.toString())
+ );
+
+ const docsToInsert = jsonDocument.filter((doc) => {
+ return (
+ doc._id && typeof doc._id === "string" && !existingIds.has(doc._id)
+ );
+ });
+
+ if (docsToInsert.length > 0) {
+ const preparedDocs = docsToInsert.map((doc) => ({
+ ...doc,
+ _id: ObjectId.createFromHexString(doc._id),
+ }));
+ await model
+ .insertMany(preparedDocs, { ordered: false })
+ .catch((err) => {
+ if (err.code !== 11000) {
+ console.error(
+ `Non-duplicate error during insertMany for ${fileName}: ${err}`
+ );
+ } else {
+ console.warn(
+ `Duplicate key error during insertMany for ${fileName}, despite pre-check. (Potential race condition?)`
+ );
}
-
- } catch (err) {
- console.error(`Failed to populate ${fileName}: ${err}`);
- }
+ });
+ // Log success based on attempted insertion, acknowledge potential individual errors handled above.
+ console.log(
+ `Attempted to insert ${docsToInsert.length} new documents from ${fileName}.json`
+ );
+ }
+ } catch (err) {
+ console.error(`Failed processing ${fileName}: ${err}`);
}
+ }
}
diff --git a/api/src/v1/builder/BaseChainModelBuilder.ts b/api/src/v1/builder/BaseChainModelBuilder.ts
index 007a14f8..1a445445 100644
--- a/api/src/v1/builder/BaseChainModelBuilder.ts
+++ b/api/src/v1/builder/BaseChainModelBuilder.ts
@@ -1,67 +1,74 @@
-import mongoose, {Model, Schema} from "mongoose";
-import {BaseChain} from "../../../../entities/ts/BaseChain";
-import {Collection, ModelName} from "../../constant/mongoose";
-import {MongooseUtil} from "../../util/MongooseUtil";
-import {ObjectUtil} from "../../util/ObjectUtil";
-import {BaseMongoose} from "../entity/model/BaseMongoose";
-import {IModelBuilder} from "./IModelBuilder";
+import mongoose, { Model, Schema } from "mongoose";
+import { BaseChain } from "../../../../entities/ts/BaseChain";
+import { Collection, ModelName } from "../../constant/mongoose";
+import { MongooseUtil } from "../../util/MongooseUtil";
+import { ObjectUtil } from "../../util/ObjectUtil";
+import { BaseMongoose } from "../entity/model/BaseMongoose";
+import { IModelBuilder } from "./IModelBuilder";
/**
* Builder to create Mongoose Schema and Model of BaseChain Entity
*/
export class BaseChainModelBuilder implements IModelBuilder {
+ private _schema: Schema = null;
+ private _model: Model = null;
- private _schema: Schema = null;
- private _model: Model = null;
+ public produceSchema(): void {
+ const entity = {} as BaseChain;
+ entity.value = {
+ type: String,
+ required: [true, "Value is required!"],
+ default: null,
+ } as any;
+ entity.sources = [
+ {
+ type: Schema.Types.ObjectId,
+ ref: ModelName.GENERIC,
+ } as any,
+ ];
+ entity.thresholdAlerts = [
+ {
+ type: Schema.Types.ObjectId,
+ ref: ModelName.GENERIC,
+ alias: "thresholdAlerts",
+ } as any,
+ ];
+ entity.severityAlerts = [
+ {
+ type: Schema.Types.ObjectId,
+ ref: ModelName.SEVERITY_ALERT_SUBCONFIG,
+ alias: "severityAlerts",
+ } as any,
+ ];
+ entity.timeWindowAlerts = [
+ {
+ type: Schema.Types.ObjectId,
+ ref: ModelName.GENERIC,
+ alias: "timeWindowAlerts",
+ } as any,
+ ];
- public produceSchema(): void {
+ const obj = Object.assign(entity, new BaseMongoose());
+ this._schema = new Schema(ObjectUtil.camelToSnake