mirror of
https://github.com/nttgin/BGPalerter.git
synced 2024-05-19 06:50:08 +00:00
Merge branch 'sentry' into sentry
This commit is contained in:
3
AUTHORS
3
AUTHORS
@@ -9,8 +9,9 @@ If this list is not up to date, please contact NTT or one of the authors.
|
||||
NTT
|
||||
https://massimocandela.com/
|
||||
|
||||
|
||||
- CONTRIBUTORS -
|
||||
Damian Zaremba, Fastly
|
||||
Mircea Ulinic, DigitalOcean
|
||||
|
||||
|
||||
A special THANK YOU goes to:
|
||||
|
@@ -1,10 +1,10 @@
|
||||
# -- trivial container for bgpalerter
|
||||
FROM node:10-alpine as build
|
||||
FROM node:13-alpine as build
|
||||
|
||||
WORKDIR /opt/bgpalerter
|
||||
COPY . .
|
||||
|
||||
RUN yarn
|
||||
RUN npm install
|
||||
|
||||
ENTRYPOINT ["npm"]
|
||||
CMD ["run", "serve"]
|
||||
|
@@ -35,6 +35,7 @@ Please uncomment the related section and configure according to your needs.
|
||||
- [Installation](docs/installation.md)
|
||||
- [Run from binary](docs/installation.md#running-bgpalerter-from-binaries)
|
||||
- [Run from source code](docs/installation.md#running-bgpalerter-from-the-source-code)
|
||||
- [Run in Docker](docs/installation.md#running-bgpalerter-in-docker)
|
||||
- [Monitored prefixes list](docs/prefixes.md#prefixes)
|
||||
- [Generate prefix list](docs/prefixes.md#generate)
|
||||
- [Prefix attributes description](docs/prefixes.md#prefixes-fields)
|
||||
@@ -48,7 +49,8 @@ Please uncomment the related section and configure according to your needs.
|
||||
- [reportSlack](docs/configuration.md#reportslack)
|
||||
- [reportKafka](docs/configuration.md#reportkafka)
|
||||
- [reportSyslog](docs/configuration.md#reportsyslog)
|
||||
- [Uptime monitoring](docs/uptime-monitor.md)
|
||||
- [reportAlerta](docs/configuration.md#reportalerta)
|
||||
- [Process/Uptime monitoring](docs/process-monitors.md)
|
||||
- [More information for developers](docs/develop.md)
|
||||
- [All npm commands](docs/develop.md#all-npm-commands)
|
||||
|
||||
|
@@ -1,185 +1,216 @@
|
||||
environment: production
|
||||
|
||||
connectors:
|
||||
- file: connectorRIS
|
||||
name: ris
|
||||
params:
|
||||
carefulSubscription: true
|
||||
url: wss://ris-live.ripe.net/v1/ws/
|
||||
subscription:
|
||||
moreSpecific: true
|
||||
type: UPDATE
|
||||
host:
|
||||
socketOptions:
|
||||
includeRaw: false
|
||||
|
||||
monitors:
|
||||
- file: monitorHijack
|
||||
channel: hijack
|
||||
name: basic-hijack-detection
|
||||
params:
|
||||
thresholdMinPeers: 2
|
||||
|
||||
- file: monitorNewPrefix
|
||||
channel: newprefix
|
||||
name: prefix-detection
|
||||
params:
|
||||
thresholdMinPeers: 2
|
||||
|
||||
- file: monitorPath
|
||||
channel: path
|
||||
name: path-matching
|
||||
params:
|
||||
thresholdMinPeers: 0
|
||||
|
||||
- file: monitorVisibility
|
||||
channel: visibility
|
||||
name: withdrawal-detection
|
||||
params:
|
||||
thresholdMinPeers: 10
|
||||
|
||||
- file: monitorAS
|
||||
channel: misconfiguration
|
||||
name: asn-monitor
|
||||
params:
|
||||
thresholdMinPeers: 2
|
||||
|
||||
reports:
|
||||
- file: reportFile
|
||||
channels:
|
||||
- hijack
|
||||
- newprefix
|
||||
- visibility
|
||||
- path
|
||||
- misconfiguration
|
||||
|
||||
# - file: reportEmail
|
||||
# channels:
|
||||
# - hijack
|
||||
# - newprefix
|
||||
# - visibility
|
||||
# - path
|
||||
# - misconfiguration
|
||||
# params:
|
||||
# showPaths: 5 # Amount of AS_PATHs to report in the alert
|
||||
# senderEmail: bgpalerter@xxxx
|
||||
# # BGPalerter uses nodemailer.
|
||||
# # The smtp section can be configured with all the parameters available at https://nodemailer.com/smtp/
|
||||
# # the following are just the most useful one
|
||||
# smtp:
|
||||
# host: localhost
|
||||
# port: 25
|
||||
# secure: false # If true the connection will use TLS when connecting to server. If false it will be still possible doing connection upgrade via STARTTLS
|
||||
# ignoreTLS: false # If true TLS will be completely disabled, including STARTTLS. Set this to true if you see certificate errors in the logs.
|
||||
# auth:
|
||||
# user: username
|
||||
# pass: password
|
||||
# type: login
|
||||
# tls:
|
||||
# rejectUnauthorized: true # Reject unauthorized certificates
|
||||
# notifiedEmails:
|
||||
# default:
|
||||
# - joe@example.org
|
||||
# - noc@example.org
|
||||
|
||||
# - file: reportSlack
|
||||
# channels:
|
||||
# - hijack
|
||||
# - newprefix
|
||||
# - visibility
|
||||
# - path
|
||||
# - misconfiguration
|
||||
# params:
|
||||
# colors:
|
||||
# hijack: '#d60b1c'
|
||||
# newprefix: '#fa9548'
|
||||
# visibility: '#fad648'
|
||||
# path: '#42cbf5'
|
||||
# hooks:
|
||||
# default: _YOUR_SLACK_WEBHOOK_URL_
|
||||
|
||||
# - file: reportKafka
|
||||
# channels:
|
||||
# - hijack
|
||||
# - newprefix
|
||||
# - visibility
|
||||
# - path
|
||||
# - misconfiguration
|
||||
# params:
|
||||
# host: localhost
|
||||
# port: 9092
|
||||
# topics:
|
||||
# default: bgpalerter
|
||||
|
||||
# - file: reportSyslog
|
||||
# channels:
|
||||
# - hijack
|
||||
# - newprefix
|
||||
# - visibility
|
||||
# - path
|
||||
# - asn-monitor
|
||||
# - misconfiguration
|
||||
# params:
|
||||
# host: localhost
|
||||
# port: 514
|
||||
# templates:
|
||||
# default: "++BGPalerter-3-${type}: ${summary}|${earliest}|${latest}"
|
||||
# hijack: "++BGPalerter-5-${type}: ${summary}|${prefix}|${description}|${asn}|${newprefix}|${neworigin}|${earliest}|${latest}|${peers}"
|
||||
# newprefix: "++BGPalerter-4-${type}: ${summary}|${prefix}|${description}|${asn}|${newprefix}|${neworigin}|${earliest}|${latest}|${peers}"
|
||||
# visibility: "++BGPalerter-5-${type}: ${summary}|${prefix}|${description}|${asn}|${earliest}|${latest}|${peers}"
|
||||
# misconfiguration: "++BGPalerter-3-${type}: ${summary}|${asn}|${prefix}|${earliest}|${latest}"
|
||||
|
||||
|
||||
|
||||
############################
|
||||
# Notification settings:
|
||||
# - notificationIntervalSeconds
|
||||
# Defines the amount of seconds after which an alert can be repeated. An alert is repeated only if the event that
|
||||
# triggered it is not yet solved. Please, don't set this value to Infinity, use instead alertOnlyOnce.
|
||||
#
|
||||
# - alertOnlyOnce - A boolean that, if set to true, will prevent repetitions of the same alert even if the event that
|
||||
# triggered it is not yet solved. In this case notificationIntervalSeconds will be ignored.
|
||||
# If set to true, the signature of all alerts will be cached in order to recognize if they already happened in
|
||||
# the past. This may lead to a memory leak if the amount of alerts is considerable.
|
||||
|
||||
notificationIntervalSeconds: 7200
|
||||
alertOnlyOnce: false
|
||||
|
||||
############################
|
||||
|
||||
# Below the files containing the monitored prefixes. Please see prefixes.yml for an example.
|
||||
# This is an array (use new lines and dashes!)
|
||||
monitoredPrefixesFiles:
|
||||
- prefixes.yml
|
||||
|
||||
logging:
|
||||
directory: logs
|
||||
logRotatePattern: YYYY-MM-DD # Whenever the pattern changes, a new file is created and the old one rotated
|
||||
zippedArchive: true
|
||||
maxSize: 80m
|
||||
maxFiles: 14d
|
||||
|
||||
checkForUpdatesAtBoot: true
|
||||
|
||||
|
||||
#uptimeMonitors:
|
||||
# - file: uptimeApi
|
||||
# params:
|
||||
# useStatusCodes: true
|
||||
# host: null
|
||||
# port: 8011
|
||||
#
|
||||
# - file: uptimeHealthcheck
|
||||
# params:
|
||||
# url: url_to_poll
|
||||
# intervalSeconds: 300
|
||||
# method: get
|
||||
|
||||
pidFile: bgpalerter.pid
|
||||
|
||||
############################
|
||||
#
|
||||
# sentryDSN: https://<key>@sentry.io/<project>
|
||||
#
|
||||
# Create a new project for monitoring BGPalerter on your Sentry server and grab
|
||||
# the generated DSN.
|
||||
connectors:
|
||||
- file: connectorRIS
|
||||
name: ris
|
||||
params:
|
||||
carefulSubscription: true
|
||||
url: wss://ris-live.ripe.net/v1/ws/
|
||||
perMessageDeflate: true
|
||||
subscription:
|
||||
moreSpecific: true
|
||||
type: UPDATE
|
||||
host:
|
||||
socketOptions:
|
||||
includeRaw: false
|
||||
|
||||
monitors:
|
||||
- file: monitorHijack
|
||||
channel: hijack
|
||||
name: basic-hijack-detection
|
||||
params:
|
||||
thresholdMinPeers: 2
|
||||
|
||||
- file: monitorNewPrefix
|
||||
channel: newprefix
|
||||
name: prefix-detection
|
||||
params:
|
||||
thresholdMinPeers: 2
|
||||
|
||||
- file: monitorPath
|
||||
channel: path
|
||||
name: path-matching
|
||||
params:
|
||||
thresholdMinPeers: 0
|
||||
|
||||
- file: monitorVisibility
|
||||
channel: visibility
|
||||
name: withdrawal-detection
|
||||
params:
|
||||
thresholdMinPeers: 10
|
||||
|
||||
- file: monitorAS
|
||||
channel: misconfiguration
|
||||
name: asn-monitor
|
||||
params:
|
||||
thresholdMinPeers: 2
|
||||
|
||||
reports:
|
||||
- file: reportFile
|
||||
channels:
|
||||
- hijack
|
||||
- newprefix
|
||||
- visibility
|
||||
- path
|
||||
- misconfiguration
|
||||
params:
|
||||
persistAlertData: false
|
||||
alertDataDirectory: alertdata/
|
||||
|
||||
# - file: reportEmail
|
||||
# channels:
|
||||
# - hijack
|
||||
# - newprefix
|
||||
# - visibility
|
||||
# - path
|
||||
# - misconfiguration
|
||||
# params:
|
||||
# showPaths: 5 # Amount of AS_PATHs to report in the alert
|
||||
# senderEmail: bgpalerter@xxxx
|
||||
# # BGPalerter uses nodemailer.
|
||||
# # The smtp section can be configured with all the parameters available at https://nodemailer.com/smtp/
|
||||
# # the following are just the most useful one
|
||||
# smtp:
|
||||
# host: localhost
|
||||
# port: 25
|
||||
# secure: false # If true the connection will use TLS when connecting to server. If false it will be still possible doing connection upgrade via STARTTLS
|
||||
# ignoreTLS: false # If true TLS will be completely disabled, including STARTTLS. Set this to true if you see certificate errors in the logs.
|
||||
# auth:
|
||||
# user: username
|
||||
# pass: password
|
||||
# type: login
|
||||
# tls:
|
||||
# rejectUnauthorized: true # Reject unauthorized certificates
|
||||
# notifiedEmails:
|
||||
# default:
|
||||
# - joe@example.org
|
||||
# - noc@example.org
|
||||
|
||||
# - file: reportSlack
|
||||
# channels:
|
||||
# - hijack
|
||||
# - newprefix
|
||||
# - visibility
|
||||
# - path
|
||||
# - misconfiguration
|
||||
# params:
|
||||
# colors:
|
||||
# hijack: '#d60b1c'
|
||||
# newprefix: '#fa9548'
|
||||
# visibility: '#fad648'
|
||||
# path: '#42cbf5'
|
||||
# hooks:
|
||||
# default: _YOUR_SLACK_WEBHOOK_URL_
|
||||
|
||||
# - file: reportKafka
|
||||
# channels:
|
||||
# - hijack
|
||||
# - newprefix
|
||||
# - visibility
|
||||
# - path
|
||||
# - misconfiguration
|
||||
# params:
|
||||
# host: localhost
|
||||
# port: 9092
|
||||
# topics:
|
||||
# default: bgpalerter
|
||||
|
||||
# - file: reportSyslog
|
||||
# channels:
|
||||
# - hijack
|
||||
# - newprefix
|
||||
# - visibility
|
||||
# - path
|
||||
# - asn-monitor
|
||||
# - misconfiguration
|
||||
# params:
|
||||
# host: localhost
|
||||
# port: 514
|
||||
# templates:
|
||||
# default: "++BGPalerter-3-${type}: ${summary}|${earliest}|${latest}"
|
||||
# hijack: "++BGPalerter-5-${type}: ${summary}|${prefix}|${description}|${asn}|${newprefix}|${neworigin}|${earliest}|${latest}|${peers}"
|
||||
# newprefix: "++BGPalerter-4-${type}: ${summary}|${prefix}|${description}|${asn}|${newprefix}|${neworigin}|${earliest}|${latest}|${peers}"
|
||||
# visibility: "++BGPalerter-5-${type}: ${summary}|${prefix}|${description}|${asn}|${earliest}|${latest}|${peers}"
|
||||
# misconfiguration: "++BGPalerter-3-${type}: ${summary}|${asn}|${prefix}|${earliest}|${latest}"
|
||||
|
||||
# - file: reportAlerta
|
||||
# channels:
|
||||
# - hijack
|
||||
# - newprefix
|
||||
# - visibility
|
||||
# - path
|
||||
# - misconfiguration
|
||||
# params:
|
||||
# severity:
|
||||
# hijack: critical
|
||||
# newprefix: informational
|
||||
# visibility: debug
|
||||
# path: trace
|
||||
# resource_templates:
|
||||
# default: "${type}"
|
||||
# hijack: "hijack::${prefix}@@${asn}"
|
||||
# newprefix: "newprefix::${prefix}@@${asn}"
|
||||
# visibility: "visibility::${prefix}@@${asn}"
|
||||
# urls:
|
||||
# default: _YOUR_ALERTA_API_URL_
|
||||
|
||||
|
||||
############################
|
||||
# Notification settings:
|
||||
# - notificationIntervalSeconds
|
||||
# Defines the amount of seconds after which an alert can be repeated. An alert is repeated only if the event that
|
||||
# triggered it is not yet solved. Please, don't set this value to Infinity, use instead alertOnlyOnce.
|
||||
#
|
||||
|
||||
notificationIntervalSeconds: 7200
|
||||
|
||||
|
||||
logging:
|
||||
directory: logs
|
||||
logRotatePattern: YYYY-MM-DD
|
||||
backlogSize: 1000 #Advanced option, read the doc
|
||||
maxRetainedFiles: 10
|
||||
maxFileSizeMB: 15
|
||||
compressOnRotation: true
|
||||
|
||||
checkForUpdatesAtBoot: true
|
||||
|
||||
|
||||
############################
|
||||
# Process monitoring settings:
|
||||
# Uncomment or add classes under processMonitors if you want to monitor or send logs about the status of the BGPalerter process
|
||||
|
||||
#processMonitors:
|
||||
# - file: uptimeApi
|
||||
# params:
|
||||
# useStatusCodes: true
|
||||
# host: null
|
||||
# port: 8011
|
||||
#
|
||||
# - file: uptimeHealthcheck
|
||||
# params:
|
||||
# url: url_to_poll
|
||||
# intervalSeconds: 300
|
||||
# method: get
|
||||
|
||||
|
||||
|
||||
############################
|
||||
# Below the files containing the monitored prefixes. Please see prefixes.yml for an example.
|
||||
# This is an array (use new lines and dashes!)
|
||||
|
||||
monitoredPrefixesFiles:
|
||||
- prefixes.yml
|
||||
|
||||
|
||||
|
||||
############################
|
||||
# Advanced settings (Don't touch here!)
|
||||
# Please, refer to the documentation for knowing the meaning of the following parameters.
|
||||
|
||||
alertOnlyOnce: false
|
||||
fadeOffSeconds: 360
|
||||
checkFadeOffGroupsSeconds: 30
|
||||
pidFile: bgpalerter.pid
|
||||
maxMessagesPerSecond: 6000
|
||||
multiProcess: false
|
||||
environment: production
|
||||
|
||||
|
@@ -6,20 +6,33 @@ The following are common parameters which it is possible to specify in the confi
|
||||
|
||||
| Parameter | Description | Expected format | Example | Required |
|
||||
|---|---|---|---|---|
|
||||
|environment| You can specify various environments. The values "production" (not verbose) and "development" (verbose) will affect the verbosity of the error/debug logs. Other values don't affect the functionalities, they will be used to identify from which environment the log is coming from. | A string | production | Yes |
|
||||
|notificationIntervalSeconds|Defines the amount of seconds after which an alert can be repeated. An alert is repeated only if the event that triggered it is not yet solved. Please, don't set this value to Infinity, use instead alertOnlyOnce. | An integer | 1800 | Yes |
|
||||
|alertOnlyOnce| A boolean that, if set to true, will prevent repetitions of the same alert even if the event that triggered it is not yet solved. In this case notificationIntervalSeconds will be ignored. If set to true, the signature of all alerts will be cached in order to recognize if they already happened in the past. This may lead to a memory leak if the amount of alerts is considerable. | A boolean | false | No |
|
||||
|monitoredPrefixesFiles| The [list](docs/prefixes.md#array) of files containing the prefixes to monitor. See [here](docs/prefixes.md#prefixes) for more informations. | A list of strings (valid .yml files) | -prefixes.yml | Yes |
|
||||
|logging| A dictionary of parameters containing the configuration for the file logging. | || Yes|
|
||||
|logging.directory| The directory where the log files will be generated. The directory will be created if not existent. | A string | logs | Yes |
|
||||
|logging.logRotatePattern| A pattern with date placeholders indicating the name of the file. This pattern will also indicate when a log file is rotated. | A string with date placeholders (YYYY, MM, DD, ss, hh) | YYYY-MM-DD | Yes |
|
||||
|logging.zippedArchive| Indicates if when a file gets rotates it has to be zipped or not. | A boolean | true | Yes |
|
||||
|logging.maxSize| Indicates the maximum file size allowed before to be rotated (by adding .number ad the end). This allows to rotate files when logRotatePattern still the same but the file is too big | A string (indicating an amount and a unit of measure) | 20m | Yes |
|
||||
|logger.maxFiles| Indicates the maximum amount of files or the maximum amount of days the files are retained. When this threshold is passed, files get deleted. | A string (a number or an amount of days ending with "d") | 14d | Yes |
|
||||
|logging.compressOnRotation| Indicates if when a file gets rotates it has to be compressed or not. | A boolean | true | Yes |
|
||||
|logging.maxFileSizeMB| Indicates the maximum file size in MB allowed before to be rotated. This allows to rotate files when logRotatePattern still the same but the file is too big | An integer | 15 | Yes |
|
||||
|logger.maxRetainedFiles| Indicates the maximum amount of log files retained. When this threshold is passed, files are deleted. | An integer | 10 | Yes |
|
||||
|checkForUpdatesAtBoot| Indicates if at each booth the application should check for updates. If an update is available, a notification will be sent to the default group. If you restart the process often (e.g. debugging, experimenting etc.) set this to false to avoid notifications. Anyway, BGPalerter checks for updates every 10 days.| A boolean | true | Yes |
|
||||
|uptimeMonitors| A list of modules allowing various way to check for the status of BGPalerter (e.g. API, heartbeat). See [here](uptime-monitor.md) for more information. | | | No |
|
||||
|processMonitors| A list of modules allowing various way to check for the status of BGPalerter (e.g. API, heartbeat). See [here](process-monitors.md) for more information. | | | No |
|
||||
|sentryDSN| The DSN corresponding to the Sentry project to send the runtime exceptions to. | `https://<key>@<sentry-server-address-or-nameserver>/<project>` | `https://bgpalerter@sentry.io/1` | No |
|
||||
|
||||
The following are advanced parameters, please don't touch them if you are not doing research/experiments.
|
||||
|
||||
| Parameter | Description | Expected format | Example | Required |
|
||||
|---|---|---|---|---|
|
||||
|environment| You can specify various environments. The values "production" (not verbose) and "development" (verbose) will affect the verbosity of the error/debug logs. Other values don't affect the functionalities, they will be used to identify from which environment the log is coming from. | A string | production | Yes |
|
||||
|alertOnlyOnce| A boolean that, if set to true, will prevent repetitions of the same alert in the future (which it doesn't make sense for production purposes). In this case notificationIntervalSeconds will be ignored. If set to true, the signature of all alerts will be cached in order to recognize if they already happened in the past. This may lead to a memory leak if the amount of alerts is considerable. | A boolean | false | No |
|
||||
|pidFile| A file where the PID of the BGP alerter master process is recorded. | A string | bgpalerter.pid | No |
|
||||
|logging.backlogSize| Indicates the buffer dimension (number of alerts) before flushing it on the disk. This parameter plays a role only when receiving thousand of alerts per second in order to prevent IO starvation, in all other cases (e.g. production monitoring) it is irrelevant. | An integer | 15 | Yes |
|
||||
|maxMessagesPerSecond| A cap to the BGP messages received, over such cap the messages will be dropped. The default value is way above any practical rate. This may be useful for research measurements on the entire address space. | An integer | 6000 | No |
|
||||
|multiProcess| If set to true, the processing of the BGP messages will be distributed on two processes. This may be useful for research measurements on the entire address space. It is discouraged to set this to true for normal production monitoring. | A boolean | false | No |
|
||||
|fadeOffSeconds| If an alert is generated but cannot be yet squashed (e.g. not reached yet the `thresholdMinPeers`), it is inserted in a temporary list which is garbage collected after the amount of seconds expressed in `fadeOffSeconds`. Due to BGP propagation times, values below 5 minutes can result in false negatives.| An integer | 360 | No |
|
||||
|checkFadeOffGroupsSeconds| Amount of seconds after which the process checks for fading off alerts. | An integer | 30 | No |
|
||||
|
||||
|
||||
|
||||
## Composition
|
||||
|
||||
You can compose the tool with 3 main components: connectors, monitors, and reports.
|
||||
@@ -54,6 +67,9 @@ reports:
|
||||
channels:
|
||||
- hijack
|
||||
- path
|
||||
params:
|
||||
persistAlertData: false
|
||||
alertDataDirectory: alertdata/
|
||||
```
|
||||
|
||||
Each monitor declaration is composed of:
|
||||
@@ -113,11 +129,16 @@ In particular, it will monitor for all the declared prefixes and will trigger an
|
||||
* A more specific of the prefix has been announced by an AS which is different from the ones specified.
|
||||
* The BGP update declares an AS_SET as origin and at least one of the AS in the AS_SET is not specified in the configuration.
|
||||
|
||||
Example of alert:
|
||||
> The prefix 2a00:5884::/32 (description associated with the prefix) is announced by AS15563 instead of AS204092
|
||||
|
||||
|
||||
Parameters for this monitor module:
|
||||
|
||||
|Parameter| Description|
|
||||
|---|---|
|
||||
|thresholdMinPeers| Minimum number of peers that need to see the BGP update before to trigger an alert. |
|
||||
|maxDataSamples| Maximum number of collected BGP messages for each alert. Default to 1000. |
|
||||
|
||||
|
||||
#### monitorVisibility
|
||||
@@ -126,12 +147,15 @@ This monitor has the logic to detect loss of visibility.
|
||||
In particular, it will monitor for all the declared prefixes and will trigger an alert when:
|
||||
* The prefix is not visible anymore from at least `thresholdMinPeers` peers.
|
||||
|
||||
Example of alert:
|
||||
> The prefix 165.254.225.0/24 (description associated with the prefix) has been withdrawn. It is no longer visible from 4 peers
|
||||
|
||||
Parameters for this monitor module:
|
||||
|
||||
|Parameter| Description|
|
||||
|---|---|
|
||||
|thresholdMinPeers| Minimum number of peers that need to see the BGP update before to trigger an alert. |
|
||||
|
||||
|maxDataSamples| Maximum number of collected BGP messages for each alert. Default to 1000. |
|
||||
|
||||
#### monitorPath
|
||||
|
||||
@@ -154,12 +178,15 @@ This monitor detects BGP updates containing AS_PATH which match particular regul
|
||||
|
||||
More path matching options are available, see the entire list [here](prefixes.md#prefixes-fields)
|
||||
|
||||
Example of alert:
|
||||
> Matched "an example on path matching" on prefix 98.5.4.3/22 (including length violation) 1 times
|
||||
|
||||
Parameters for this monitor module:
|
||||
|
||||
|Parameter| Description|
|
||||
|---|---|
|
||||
|thresholdMinPeers| Minimum number of peers that need to see the BGP update before to trigger an alert. |
|
||||
|
||||
|maxDataSamples| Maximum number of collected BGP messages for each alert. Default to 1000. |
|
||||
|
||||
|
||||
|
||||
@@ -180,12 +207,17 @@ In particular, it will monitor for all the declared prefixes and will trigger an
|
||||
> ```
|
||||
> If in config.yml monitorNewPrefix is enabled you will receive alerts every time a more specific prefix (e.g. 50.82.4.0/24) is announced by AS58302.
|
||||
|
||||
|
||||
Example of alert:
|
||||
> A new prefix 165.254.255.0/25 is announced by AS15562. It should be instead 165.254.255.0/24 (description associated with the prefix) announced by AS15562
|
||||
|
||||
|
||||
Parameters for this monitor module:
|
||||
|
||||
|Parameter| Description|
|
||||
|---|---|
|
||||
|thresholdMinPeers| Minimum number of peers that need to see the BGP update before to trigger an alert. |
|
||||
|
||||
|maxDataSamples| Maximum number of collected BGP messages for each alert. Default to 1000. |
|
||||
|
||||
#### monitorAS
|
||||
|
||||
@@ -212,12 +244,16 @@ This is useful if you want to be alerted in case your AS starts announcing somet
|
||||
|
||||
You can generate the options block in the prefixes list automatically. Refer to the options `-s` and `-m` in the [auto genere prefixes documentation](prefixes.md#generate).
|
||||
|
||||
|
||||
Example of alert:
|
||||
> AS2914 is announcing 2.2.2.3/22 but this prefix is not in the configured list of announced prefixes
|
||||
|
||||
Parameters for this monitor module:
|
||||
|
||||
|Parameter| Description|
|
||||
|---|---|
|
||||
|thresholdMinPeers| Minimum number of peers that need to see the BGP update before to trigger an alert. |
|
||||
|
||||
|maxDataSamples| Maximum number of collected BGP messages for each alert. Default to 1000. |
|
||||
|
||||
### Reports
|
||||
|
||||
@@ -228,6 +264,15 @@ Possible reports are:
|
||||
This report module is the default one. It sends the alerts as verbose logs.
|
||||
To configure the logs see the [configuration introduction](configuration.md).
|
||||
|
||||
Parameters for this report module:
|
||||
|
||||
|Parameter| Description|
|
||||
|---|---|
|
||||
|persistAlertData| If set to true, the BGP messages that triggered an alert will be collected in JSON files. The default is false.|
|
||||
|alertDataDirectory| If persistAlertData is set to true, this field must contain the directory where the JSON files with the BGP messages will be stored. |
|
||||
|
||||
|
||||
|
||||
#### reportEmail
|
||||
|
||||
This report module sends the alerts by email.
|
||||
@@ -254,7 +299,7 @@ Parameters for this report module:
|
||||
|---|---|
|
||||
|colors| A dictionary having as key the event channel and as value a hex color (string). These colors will be used to make messages in Slack distinguishable. |
|
||||
|hooks| A dictionary containing Slack WebHooks grouped by user group (key: group, value: WebHook).|
|
||||
|hooks.default| The default user group. Each user group is a WebHook (url). |
|
||||
|hooks.default| The WebHook (URL) of the default user group.|
|
||||
|
||||
|
||||
#### reportKafka
|
||||
@@ -269,7 +314,7 @@ Parameters for this report module:
|
||||
|port| Port of the Kafka instance/broker (e.g. 9092).|
|
||||
|topics| A dictionary containing a mapping from BGPalerter channels to Kafka topics (e.g. `hijack: hijack-topic`). By default all channels are sent to the topic `bgpalerter` (`default: bgpalerter`) |
|
||||
|
||||
#### reportSyslog
|
||||
#### reportSyslog
|
||||
|
||||
This report module sends the alerts on Syslog.
|
||||
|
||||
@@ -281,3 +326,20 @@ Parameters for this report module:
|
||||
|host| Host of the Syslog server (e.g. localhost).|
|
||||
|port| Port of the Syslog server (e.g. 514).|
|
||||
|templates| A dictionary containing string templates for each BGPalerter channels. If a channel doesn't have a template defined, the `default` template will be used (see `config.yml.example` for more details). |
|
||||
|
||||
#### reportAlerta
|
||||
|
||||
This report module sends alerts to [Alerta](https://alerta.io/).
|
||||
|
||||
Parameters for this report module:
|
||||
|
||||
|Parameter| Description |
|
||||
|---|---|
|
||||
|severity| The alert severity, e.g., ``critical``. See https://docs.alerta.io/en/latest/api/alert.html#alert-severities for the list of possible values. |
|
||||
|environment| The Alerta environment name. If not specified, it'll use the BGPalerter environment name. |
|
||||
|key| Optional, the Alerta API key to use for authenticated requests. |
|
||||
|token| Optional value used when executing HTTP requests to the Alerta API with bearer authentication. |
|
||||
|resource_templates| A dictionary of string templates for each BGPalerter channels to generate the content of the `resource` field for the alert. If a channel doesn't have a template defined, the `default` template will be used (see `config.yml.example` for more details). |
|
||||
|urls| A dictionary containing Alerta API URLs grouped by user group (key: group, value: API URL). |
|
||||
|urls.default| The Alerta API URL of the default user group. |
|
||||
|
||||
|
@@ -21,7 +21,6 @@ If you enable email reporting, download also the directory `reports/email_templa
|
||||
|
||||
## Running BGPalerter from the source code
|
||||
|
||||
|
||||
1. Git clone this repo.
|
||||
|
||||
2. Install Node.js (version >= 10.16) and npm ([installing node and npm](https://nodejs.org/en/download/)).
|
||||
@@ -29,3 +28,14 @@ If you enable email reporting, download also the directory `reports/email_templa
|
||||
3. Execute `npm install` to install all dependencies.
|
||||
|
||||
4. Run `npm run watch-and-serve` to run the application. At every file change it will self-reload.
|
||||
|
||||
|
||||
## Running BGPalerter in Docker
|
||||
|
||||
BGPalerter is available in Docker Hub [here](https://hub.docker.com/r/nttgin/bgpalerter/tags).
|
||||
|
||||
There are two main builds:
|
||||
* `latest` stable version for production monitoring;
|
||||
* `dev` reflects the last commit in the `dev` branch. Use this only for development purposes.
|
||||
|
||||
Additionally, each release has its own build in case you want to revet back to an older version.
|
||||
|
@@ -64,7 +64,7 @@ Below the complete list of attributes (the dot notation is used to represent yml
|
||||
| ignoreMorespecifics | Prefixes more specific of the current one will be excluded from monitoring | A boolean | Yes |
|
||||
| ignore | Exclude the current prefix from monitoring. Useful when you are monitoring a prefix and you want to exclude a particular sub-prefix| A boolean | No |
|
||||
| includeMonitors | The list of monitors you want to run on this prefix. If this attribute is not declared, all monitors will be used. Not compatible with excludeMonitors. | An array of strings (monitors name according to config.yml) | No |
|
||||
| excludeMonitors | The list of monitors you want to exclude on this prefix. Not compatible with includeMonitors. | An array of strings (monitors name according to config.yml) | No |
|
||||
| excludeMonitors | The list of monitors you want to exclude on this prefix. Not compatible with includeMonitors. Use monitors `name` attributes, as defined in the monitor listy in [config.yml](https://github.com/nttgin/BGPalerter/blob/master/config.yml.example). | An array of strings (monitors name according to config.yml) | No |
|
||||
| path | A dictionary containing all sub-attributes for path matching. All the sub-attributes are in AND.| Sub-attributes (as follows) | No |
|
||||
| path.match | The regular expression that will be tested on each AS path. If the expression tests positive the BGP message triggers an alert. ASns are comma separated (see example above). **Please, use optimized regular expression as described [in the following sub-section](#optimized-regular-expressions-for-as-path-matching)** | A string (valid RegEx) | No |
|
||||
| path.notMatch | The regular expression that will be tested on each AS path. If the expression tests positive the BGP message will not triggers an alert. ASns are comma separated (see example above). | A string (valid RegEx) | No |
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Uptime monitoring
|
||||
# Process monitoring
|
||||
|
||||
Since version 1.22.0 it is possible to monitor the status of the BGPalerter process.
|
||||
|
||||
@@ -6,7 +6,7 @@ There are various approaches for monitoring the status of BGPalerter, each imple
|
||||
You can declare the modules you want to load/enable in `config.yml`, as follows:
|
||||
|
||||
```yaml
|
||||
uptimeMonitors:
|
||||
processMonitors:
|
||||
- file: uptimeApi
|
||||
params:
|
||||
useStatusCodes: true
|
||||
@@ -44,7 +44,7 @@ The following is an example of the API output.
|
||||
In `config.yml` the uptimeApi is declared as:
|
||||
|
||||
```yaml
|
||||
uptimeMonitors:
|
||||
processMonitors:
|
||||
|
||||
- file: uptimeApi
|
||||
params:
|
||||
@@ -77,7 +77,7 @@ If there is any warning about any component activated in BGPalerter, the heartbe
|
||||
In `config.yml` the uptimeHealthcheck is declared as:
|
||||
|
||||
```yaml
|
||||
uptimeMonitors:
|
||||
processMonitors:
|
||||
|
||||
- file: uptimeHealthcheck
|
||||
params:
|
4119
package-lock.json
generated
4119
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
27
package.json
27
package.json
@@ -17,35 +17,32 @@
|
||||
"author": "Massimo Candela",
|
||||
"license": "ISC",
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.7.7",
|
||||
"@babel/core": "^7.7.7",
|
||||
"@babel/node": "^7.7.7",
|
||||
"@babel/plugin-proposal-class-properties": "^7.7.4",
|
||||
"@babel/plugin-proposal-object-rest-spread": "^7.7.7",
|
||||
"@babel/preset-env": "^7.7.7",
|
||||
"@babel/cli": "^7.8.3",
|
||||
"@babel/core": "^7.8.3",
|
||||
"@babel/node": "^7.8.3",
|
||||
"@babel/plugin-proposal-class-properties": "^7.8.3",
|
||||
"@babel/plugin-proposal-object-rest-spread": "^7.8.3",
|
||||
"@babel/preset-env": "^7.8.3",
|
||||
"chai": "^4.2.0",
|
||||
"chai-subset": "^1.6.0",
|
||||
"mocha": "^7.0.0",
|
||||
"nodemon": "^2.0.1",
|
||||
"read-last-lines": "^1.7.1"
|
||||
"read-last-lines": "^1.7.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"axios": "^0.19.0",
|
||||
"axios": "^0.19.1",
|
||||
"batch-promises": "^0.0.3",
|
||||
"brembo": "^2.0.3",
|
||||
"event-stream": "^4.0.1",
|
||||
"ip-address": "^6.2.0",
|
||||
"ip-sub": "^1.0.4",
|
||||
"js-yaml": "^3.13.1",
|
||||
"kafka-node": "^5.0.0",
|
||||
"nodemailer": "^6.4.1",
|
||||
"path": "^0.12.7",
|
||||
"pkg": "^4.4.2",
|
||||
"pubsub-js": "^1.8.0",
|
||||
"restify": "^8.5.1",
|
||||
"rpki-validator": "^1.0.9",
|
||||
"syslog-client": "^1.1.1",
|
||||
"websocket-stream": "^5.5.0",
|
||||
"winston": "^3.2.1",
|
||||
"winston-daily-rotate-file": "^4.4.1",
|
||||
"ws": "^7.2.1",
|
||||
"yargs": "^15.1.0",
|
||||
"@sentry/node": "^5.10.2"
|
||||
@@ -56,13 +53,13 @@
|
||||
"./src/monitors/*.js",
|
||||
"./src/reports/*.js",
|
||||
"./src/connectors/*.js",
|
||||
"./src/uptimeMonitors/*.js"
|
||||
"./src/processMonitors/*.js"
|
||||
],
|
||||
"assets": [
|
||||
"./bin/config.yml"
|
||||
],
|
||||
"targets": [
|
||||
"node10"
|
||||
"node13"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
@@ -51,7 +51,7 @@ export default class ConnectorFactory {
|
||||
if (connectors.length === 0) {
|
||||
|
||||
for (let connector of env.config.connectors) {
|
||||
this.connectors[connector.name] = new connector.class(connector.name, connector.params, env);
|
||||
this.connectors[connector.name] = new connector.class(connector.name, connector.params || {}, env);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
175
src/connectors/connectorFullThrottle.js
Normal file
175
src/connectors/connectorFullThrottle.js
Normal file
@@ -0,0 +1,175 @@
|
||||
/*
|
||||
* BSD 3-Clause License
|
||||
*
|
||||
* Copyright (c) 2019, NTT Ltd.
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are met:
|
||||
*
|
||||
* Redistributions of source code must retain the above copyright notice, this
|
||||
* list of conditions and the following disclaimer.
|
||||
*
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
* this list of conditions and the following disclaimer in the documentation
|
||||
* and/or other materials provided with the distribution.
|
||||
*
|
||||
* Neither the name of the copyright holder nor the names of its
|
||||
* contributors may be used to endorse or promote products derived from
|
||||
* this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
import Connector from "./connector";
|
||||
import {AS, Path} from "../model";
|
||||
|
||||
export default class ConnectorFullThrottle extends Connector{
|
||||
|
||||
constructor(name, params, env) {
|
||||
super(name, params, env);
|
||||
this.updates = [
|
||||
{
|
||||
data: {
|
||||
announcements: [{
|
||||
prefixes: ["175.254.205.0/25", "170.254.205.0/25"],
|
||||
next_hop: "124.0.0.3"
|
||||
}],
|
||||
peer: "124.0.0.3",
|
||||
path: [1, 2, 3, 4321]
|
||||
},
|
||||
type: "ris_message"
|
||||
},
|
||||
{
|
||||
data: {
|
||||
announcements: [{
|
||||
prefixes: ["165.254.255.0/25"],
|
||||
next_hop: "124.0.0.2"
|
||||
}],
|
||||
peer: "124.0.0.2",
|
||||
path: [1, 2, 3, [4, 15562]]
|
||||
},
|
||||
type: "ris_message"
|
||||
},
|
||||
{
|
||||
data: {
|
||||
announcements: [{
|
||||
prefixes: ["2a00:5884:ffff::/48"],
|
||||
next_hop: "124.0.0.3"
|
||||
}],
|
||||
peer: "124.0.0.3",
|
||||
path: [1, 2, 3, 208585]
|
||||
},
|
||||
type: "ris_message"
|
||||
},
|
||||
{
|
||||
data: {
|
||||
announcements: [{
|
||||
prefixes: ["2a00:5884::/32"],
|
||||
next_hop: "124.0.0.3"
|
||||
}],
|
||||
peer: "124.0.0.3",
|
||||
path: [1, 2, 3, [204092, 45]]
|
||||
},
|
||||
type: "ris_message"
|
||||
},
|
||||
{
|
||||
data: {
|
||||
announcements: [{
|
||||
prefixes: ["2a00:5884::/32"],
|
||||
next_hop: "124.0.0.3"
|
||||
}],
|
||||
peer: "124.0.0.3",
|
||||
path: [1, 2, 3, [15563]]
|
||||
},
|
||||
type: "ris_message"
|
||||
},
|
||||
{
|
||||
data: {
|
||||
announcements: [{
|
||||
prefixes: ["2a00:5884::/32"],
|
||||
next_hop: "124.0.0.3"
|
||||
}],
|
||||
peer: "124.0.0.3",
|
||||
path: [1, 2, 3, 204092]
|
||||
},
|
||||
type: "ris_message"
|
||||
}
|
||||
];
|
||||
}
|
||||
|
||||
connect = () =>
|
||||
new Promise((resolve, reject) => {
|
||||
resolve(true);
|
||||
});
|
||||
|
||||
subscribe = (params) =>
|
||||
new Promise((resolve, reject) => {
|
||||
resolve(true);
|
||||
this._startStream();
|
||||
});
|
||||
|
||||
_startStream = () => {
|
||||
setInterval(() => {
|
||||
this.updates.forEach(message => this._message(message));
|
||||
this.updates.forEach(message => this._message(message));
|
||||
this.updates.forEach(message => this._message(message));
|
||||
this.updates.forEach(message => this._message(message));
|
||||
this.updates.forEach(message => this._message(message));
|
||||
this.updates.forEach(message => this._message(message));
|
||||
this.updates.forEach(message => this._message(message));
|
||||
this.updates.forEach(message => this._message(message));
|
||||
this.updates.forEach(message => this._message(message));
|
||||
this.updates.forEach(message => this._message(message));
|
||||
}, 2);
|
||||
};
|
||||
|
||||
static transform = (message) => {
|
||||
if (message.type === 'ris_message') {
|
||||
message = message.data;
|
||||
const components = [];
|
||||
const announcements = message["announcements"] || [];
|
||||
const withdrawals = message["withdrawals"] || [];
|
||||
const aggregator = message["aggregator"] || null;
|
||||
const peer = message["peer"];
|
||||
|
||||
for (let announcement of announcements){
|
||||
const nextHop = announcement["next_hop"];
|
||||
const prefixes = announcement["prefixes"] || [];
|
||||
let path = new Path(message["path"].map(i => new AS(i)));
|
||||
let originAS = path.getLast();
|
||||
|
||||
for (let prefix of prefixes){
|
||||
components.push({
|
||||
type: "announcement",
|
||||
prefix,
|
||||
peer,
|
||||
path,
|
||||
originAS,
|
||||
nextHop,
|
||||
aggregator
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
for (let prefix of withdrawals){
|
||||
components.push({
|
||||
type: "withdrawal",
|
||||
prefix,
|
||||
peer
|
||||
})
|
||||
}
|
||||
|
||||
return components;
|
||||
}
|
||||
};
|
||||
}
|
@@ -41,7 +41,7 @@ export default class ConnectorRIS extends Connector{
|
||||
super(name, params, env);
|
||||
this.ws = null;
|
||||
this.subscription = null;
|
||||
this.pingTimer = null;
|
||||
setInterval(this._ping, 5000);
|
||||
|
||||
this.url = brembo.build(this.params.url, {
|
||||
path: [],
|
||||
@@ -50,49 +50,62 @@ export default class ConnectorRIS extends Connector{
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
};
|
||||
|
||||
_ping = () => {
|
||||
if (this.ws) {
|
||||
try {
|
||||
this.ws.ping();
|
||||
} catch (e) {
|
||||
// Nothing to do here
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
_openConnect = (resolve) => {
|
||||
resolve(true);
|
||||
this._connect(this.name + ' connector connected');
|
||||
};
|
||||
|
||||
_messageToJson = (message) => {
|
||||
this._message(JSON.parse(message));
|
||||
};
|
||||
|
||||
connect = () =>
|
||||
new Promise((resolve, reject) => {
|
||||
try {
|
||||
this.ws = new WebSocket(this.url);
|
||||
|
||||
this.pingTimer = setInterval(() => {
|
||||
try {
|
||||
this.ws.ping(() => {});
|
||||
} catch (e) {
|
||||
// Nothing to do here
|
||||
}
|
||||
}, 5000);
|
||||
|
||||
this.ws.on('message', this._message);
|
||||
this.ws.on('close', this._close);
|
||||
this.ws.on('error', this._error);
|
||||
this.ws.on('open', () => {
|
||||
resolve(true);
|
||||
this._connect(this.name + ' connector connected');
|
||||
this.ws = new WebSocket(this.url, {
|
||||
perMessageDeflate: this.params.perMessageDeflate
|
||||
});
|
||||
|
||||
this.ws.on('message', this._messageToJson);
|
||||
this.ws.on('close', (error) => {
|
||||
this._close("RIPE RIS disconnected (error: " + error + "). Please, provide a feedback to rislive@ripe.net on the importance of the reliability of this service.");
|
||||
});
|
||||
this.ws.on('error', this._error);
|
||||
this.ws.on('open', this._openConnect.bind(null, resolve));
|
||||
|
||||
} catch(error) {
|
||||
this._error(error);
|
||||
resolve(false);
|
||||
}
|
||||
});
|
||||
|
||||
_reconnect = () => {
|
||||
this.connect()
|
||||
.then(this.subscribe.bind(null, this.subscription));
|
||||
};
|
||||
|
||||
_close = (error) => {
|
||||
this._disconnect(error);
|
||||
clearInterval(this.pingTimer);
|
||||
|
||||
try {
|
||||
this.ws.terminate();
|
||||
delete this.ws;
|
||||
} catch(e) {
|
||||
// Nothing to do here
|
||||
}
|
||||
// Reconnect
|
||||
setTimeout(() => {
|
||||
try {
|
||||
this.ws.terminate();
|
||||
} catch(e) {
|
||||
// Nothing to do here
|
||||
}
|
||||
this.connect()
|
||||
.then(() => this.subscribe(this.subscription));
|
||||
}, 5000);
|
||||
setTimeout(this._reconnect, 10000);
|
||||
};
|
||||
|
||||
_subscribeToAll = (input) => {
|
||||
|
@@ -37,7 +37,6 @@ export default class ConnectorSwUpdates extends Connector{
|
||||
|
||||
constructor(name, params, env) {
|
||||
super(name, params, env);
|
||||
this.timer = null;
|
||||
}
|
||||
|
||||
connect = () =>
|
||||
@@ -52,12 +51,12 @@ export default class ConnectorSwUpdates extends Connector{
|
||||
})
|
||||
.then(data => {
|
||||
if (data && data.data && data.data.version && data.data.version !== this.version){
|
||||
this._message(JSON.stringify({
|
||||
this._message({
|
||||
type: "software-update",
|
||||
currentVersion: this.version,
|
||||
newVersion: data.data.version,
|
||||
repo: "https://github.com/nttgin/BGPalerter"
|
||||
}));
|
||||
});
|
||||
}
|
||||
})
|
||||
.catch(() => {
|
||||
@@ -73,7 +72,7 @@ export default class ConnectorSwUpdates extends Connector{
|
||||
if (this.config.checkForUpdatesAtBoot){
|
||||
this._checkForUpdates();
|
||||
}
|
||||
this.timer = setInterval(this._checkForUpdates, 1000 * 3600 * 24 * 5); // Check every 5 days
|
||||
setInterval(this._checkForUpdates, 1000 * 3600 * 24 * 5); // Check every 5 days
|
||||
resolve(true);
|
||||
});
|
||||
|
||||
|
@@ -50,6 +50,47 @@ export default class ConnectorTest extends Connector{
|
||||
resolve(true);
|
||||
});
|
||||
|
||||
_fadeOffTest = (fade) => {
|
||||
const updates = [
|
||||
{
|
||||
data: {
|
||||
withdrawals: ["165.24.225.0/24"],
|
||||
peer: "124.0.0.1"
|
||||
},
|
||||
type: "ris_message"
|
||||
},
|
||||
{
|
||||
data: {
|
||||
withdrawals: ["165.24.225.0/24"],
|
||||
peer: "124.0.0.2"
|
||||
},
|
||||
type: "ris_message"
|
||||
},
|
||||
{
|
||||
data: {
|
||||
withdrawals: ["165.24.225.0/24"],
|
||||
peer: "124.0.0.3"
|
||||
},
|
||||
type: "ris_message"
|
||||
},
|
||||
{
|
||||
data: {
|
||||
withdrawals: ["165.24.225.0/24"],
|
||||
peer: "124.0.0.4"
|
||||
},
|
||||
type: "ris_message"
|
||||
}
|
||||
];
|
||||
|
||||
this._message(updates[0]);
|
||||
this._message(updates[1]);
|
||||
this._message(updates[2]);
|
||||
|
||||
setTimeout(() => {
|
||||
this._message(updates[3]);
|
||||
}, (this.config.fadeOffSeconds + ((fade) ? -4 : 4)) * 1000); // depending on "fade" it goes in our out of the fading period
|
||||
};
|
||||
|
||||
subscribe = (params) =>
|
||||
new Promise((resolve, reject) => {
|
||||
resolve(true);
|
||||
@@ -59,6 +100,12 @@ export default class ConnectorTest extends Connector{
|
||||
let updates;
|
||||
|
||||
switch (type) {
|
||||
case "fade-off":
|
||||
return this._fadeOffTest(false);
|
||||
|
||||
case "fade-in":
|
||||
return this._fadeOffTest(true);
|
||||
|
||||
case "hijack":
|
||||
updates = [
|
||||
{
|
||||
@@ -86,7 +133,7 @@ export default class ConnectorTest extends Connector{
|
||||
{
|
||||
data: {
|
||||
announcements: [{
|
||||
prefixes: ["2a00:5884:ffff:/48"],
|
||||
prefixes: ["2a00:5884:ffff::/48"],
|
||||
next_hop: "124.0.0.3"
|
||||
}],
|
||||
peer: "124.0.0.3",
|
||||
@@ -157,7 +204,7 @@ export default class ConnectorTest extends Connector{
|
||||
{
|
||||
data: {
|
||||
announcements: [{
|
||||
prefixes: ["2a00:5884:ffff:/48"],
|
||||
prefixes: ["2a00:5884:ffff::/48"],
|
||||
next_hop: "124.0.0.3"
|
||||
}],
|
||||
peer: "124.0.0.3",
|
||||
@@ -219,7 +266,7 @@ export default class ConnectorTest extends Connector{
|
||||
},
|
||||
{
|
||||
data: {
|
||||
withdrawals: ["2a00:5884:ffff:/48"],
|
||||
withdrawals: ["2a00:5884:ffff::/48"],
|
||||
peer: "124.0.0.2"
|
||||
},
|
||||
type: "ris_message"
|
||||
@@ -304,7 +351,7 @@ export default class ConnectorTest extends Connector{
|
||||
|
||||
this.timer = setInterval(() => {
|
||||
updates.forEach(update => {
|
||||
this._message(JSON.stringify(update));
|
||||
this._message(update);
|
||||
if (type === 'visibility') {
|
||||
let peer = update.data.peer.split('.');
|
||||
peer[3] = Math.min(parseInt(peer[3]) + 1, 254);
|
||||
|
@@ -36,45 +36,49 @@ export default class Consumer {
|
||||
|
||||
constructor(){
|
||||
this.connectors = {};
|
||||
|
||||
for (let connector of env.config.connectors) {
|
||||
this.connectors[connector.name] = connector.class
|
||||
}
|
||||
|
||||
this.monitors = env.config.monitors
|
||||
.map(monitor => new monitor.class(monitor.name, monitor.channel, monitor.params, env));
|
||||
.map(monitor => new monitor.class(monitor.name, monitor.channel, monitor.params || {}, env));
|
||||
|
||||
this.reports = env.config.reports
|
||||
.map(report => new report.class(report.channels, report.params, env));
|
||||
.map(report => new report.class(report.channels, report.params || {}, env));
|
||||
|
||||
process.on('message', this.dispatch);
|
||||
env.pubSub.subscribe('data', (type, data) => {
|
||||
this.dispatch(data);
|
||||
});
|
||||
|
||||
};
|
||||
|
||||
dispatch = (data) => {
|
||||
dispatch = (buffer) => {
|
||||
try {
|
||||
const connector = data.slice(0,3);
|
||||
const messagesRaw = JSON.parse(data.slice(4));
|
||||
const messages = this.connectors[connector].transform(messagesRaw) || [];
|
||||
for (let data of buffer){
|
||||
|
||||
for (let monitor of this.monitors) {
|
||||
const connector = data.connector;
|
||||
const messagesRaw = data.message;
|
||||
const messages = this.connectors[connector].transform(messagesRaw) || [];
|
||||
|
||||
// Blocking filtering to reduce stack usage
|
||||
for (const message of messages.filter(monitor.filter)) {
|
||||
for (let monitor of this.monitors) {
|
||||
|
||||
// Promise call to reduce waiting times
|
||||
monitor
|
||||
.monitor(message)
|
||||
.catch(error => {
|
||||
env.logger.log({
|
||||
level: 'error',
|
||||
message: error
|
||||
// Blocking filtering to reduce stack usage
|
||||
for (const message of messages.filter(monitor.filter)) {
|
||||
|
||||
// Promise call to reduce waiting times
|
||||
monitor
|
||||
.monitor(message)
|
||||
.catch(error => {
|
||||
env.logger.log({
|
||||
level: 'error',
|
||||
message: error
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
env.logger.log({
|
||||
level: 'error',
|
||||
|
501
src/env.js
501
src/env.js
@@ -1,257 +1,244 @@
|
||||
/*
|
||||
* BSD 3-Clause License
|
||||
*
|
||||
* Copyright (c) 2019, NTT Ltd.
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are met:
|
||||
*
|
||||
* Redistributions of source code must retain the above copyright notice, this
|
||||
* list of conditions and the following disclaimer.
|
||||
*
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
* this list of conditions and the following disclaimer in the documentation
|
||||
* and/or other materials provided with the distribution.
|
||||
*
|
||||
* Neither the name of the copyright holder nor the names of its
|
||||
* contributors may be used to endorse or promote products derived from
|
||||
* this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
import yaml from "js-yaml";
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import pubSub from 'pubsub-js';
|
||||
import winston from 'winston';
|
||||
import Input from "./inputs/inputYml";
|
||||
require('winston-daily-rotate-file');
|
||||
const { combine, timestamp, label, printf } = winston.format;
|
||||
import {version} from '../package.json';
|
||||
|
||||
const defaultConfigFilePath = path.resolve(process.cwd(), 'config.yml');
|
||||
const vector = {
|
||||
version: global.EXTERNAL_VERSION_FOR_TEST || version,
|
||||
configFile: global.EXTERNAL_CONFIG_FILE || defaultConfigFilePath,
|
||||
clientId: Buffer.from("bnR0LWJncGFsZXJ0ZXI=", 'base64').toString('ascii')
|
||||
};
|
||||
let config = {
|
||||
environment: "production",
|
||||
connectors: [
|
||||
{
|
||||
file: "connectorRIS",
|
||||
name: "ris",
|
||||
params: {
|
||||
carefulSubscription: true,
|
||||
url: "wss://ris-live.ripe.net/v1/ws/",
|
||||
subscription: {
|
||||
moreSpecific: true,
|
||||
type: "UPDATE",
|
||||
host: null,
|
||||
socketOptions: {
|
||||
includeRaw: false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
monitors: [
|
||||
{
|
||||
file: "monitorHijack",
|
||||
channel: "hijack",
|
||||
name: "basic-hijack-detection",
|
||||
params: {
|
||||
thresholdMinPeers: 2
|
||||
}
|
||||
},
|
||||
{
|
||||
file: "monitorPath",
|
||||
channel: "path",
|
||||
name: "path-matching",
|
||||
params: {
|
||||
thresholdMinPeers: 0
|
||||
}
|
||||
},
|
||||
{
|
||||
file: "monitorNewPrefix",
|
||||
channel: "newprefix",
|
||||
name: "prefix-detection",
|
||||
params: {
|
||||
thresholdMinPeers: 2
|
||||
}
|
||||
},
|
||||
{
|
||||
file: "monitorVisibility",
|
||||
channel: "visibility",
|
||||
name: "withdrawal-detection",
|
||||
params: {
|
||||
thresholdMinPeers: 10
|
||||
}
|
||||
},
|
||||
{
|
||||
file: "monitorAS",
|
||||
channel: "misconfiguration",
|
||||
name: "as-monitor",
|
||||
params: {
|
||||
thresholdMinPeers: 2
|
||||
}
|
||||
}
|
||||
],
|
||||
reports: [
|
||||
{
|
||||
file: "reportFile",
|
||||
channels: ["hijack", "newprefix", "visibility", "path", "misconfiguration"]
|
||||
}
|
||||
],
|
||||
notificationIntervalSeconds: 7200,
|
||||
alarmOnlyOnce: false,
|
||||
monitoredPrefixesFiles: ["prefixes.yml"],
|
||||
logging: {
|
||||
directory: "logs",
|
||||
logRotatePattern: "YYYY-MM-DD",
|
||||
zippedArchive: true,
|
||||
maxSize: "80m",
|
||||
maxFiles: "14d",
|
||||
},
|
||||
checkForUpdatesAtBoot: true,
|
||||
pidFile: "bgpalerter.pid",
|
||||
sentryDSN: null
|
||||
};
|
||||
|
||||
|
||||
if (fs.existsSync(vector.configFile)) {
|
||||
try {
|
||||
config = yaml.safeLoad(fs.readFileSync(vector.configFile, 'utf8')) || config;
|
||||
} catch (error) {
|
||||
throw new Error("The file " + vector.configFile + " is not valid yml: " + error.message.split(":")[0]);
|
||||
}
|
||||
} else {
|
||||
console.log("Impossible to load config.yml. A default configuration file has been generated.");
|
||||
fs.writeFileSync(defaultConfigFilePath, yaml.dump(config))
|
||||
}
|
||||
|
||||
const formatLine = printf(({ level, message, label, timestamp }) => `${timestamp} [${label}] ${level}: ${message}`);
|
||||
const verboseFilter = winston.format((info, opts) => info.level === 'verbose' ? info : false);
|
||||
const transportError = new (winston.transports.DailyRotateFile)({
|
||||
filename: config.logging.directory +'/error-%DATE%.log',
|
||||
datePattern: config.logging.logRotatePattern,
|
||||
zippedArchive: config.logging.zippedArchive,
|
||||
maxSize: config.logging.maxSize,
|
||||
maxFiles: config.logging.maxFiles,
|
||||
level: 'info',
|
||||
timestamp: true,
|
||||
eol: '\n',
|
||||
json: false,
|
||||
format: combine(
|
||||
label({ label: config.environment}),
|
||||
timestamp(),
|
||||
formatLine
|
||||
)
|
||||
});
|
||||
|
||||
const transportReports = new (winston.transports.DailyRotateFile)({
|
||||
filename: config.logging.directory + '/reports-%DATE%.log',
|
||||
datePattern: config.logging.logRotatePattern,
|
||||
zippedArchive: config.logging.zippedArchive,
|
||||
maxSize: config.logging.maxSize,
|
||||
maxFiles: config.logging.maxFiles,
|
||||
level: 'verbose',
|
||||
timestamp: true,
|
||||
eol: '\n',
|
||||
json: false,
|
||||
format: combine(
|
||||
verboseFilter(),
|
||||
label({ label: config.environment}),
|
||||
timestamp(),
|
||||
formatLine
|
||||
)
|
||||
});
|
||||
|
||||
const winstonTransports = [
|
||||
transportError,
|
||||
transportReports
|
||||
];
|
||||
|
||||
if (config.environment !== 'production') {
|
||||
const consoleTransport = new winston.transports.Console({
|
||||
format: winston.format.simple()
|
||||
});
|
||||
winstonTransports.push(consoleTransport);
|
||||
}
|
||||
|
||||
const wlogger = winston.createLogger({ transports: winstonTransports });
|
||||
|
||||
config.monitors = (config.monitors || []);
|
||||
config.monitors.push({
|
||||
file: "monitorSwUpdates",
|
||||
channel: "software-update",
|
||||
name: "software-update",
|
||||
});
|
||||
|
||||
|
||||
config.monitors = config.monitors
|
||||
.map(item => {
|
||||
return {
|
||||
class: require("./monitors/" + item.file).default,
|
||||
channel: item.channel,
|
||||
name: item.name,
|
||||
params: item.params
|
||||
};
|
||||
});
|
||||
|
||||
config.reports = (config.reports || [])
|
||||
.map(item => {
|
||||
|
||||
return {
|
||||
class: require("./reports/" + item.file).default,
|
||||
channels: [...item.channels, "software-update"],
|
||||
params: item.params
|
||||
};
|
||||
|
||||
});
|
||||
config.connectors = config.connectors || [];
|
||||
|
||||
config.connectors.push( {
|
||||
file: "connectorSwUpdates",
|
||||
name: "upd"
|
||||
});
|
||||
|
||||
if ([...new Set(config.connectors)].length !== config.connectors.length) {
|
||||
throw new Error('Connectors names MUST be unique');
|
||||
}
|
||||
|
||||
config.connectors = config.connectors
|
||||
.map((item, index) => {
|
||||
|
||||
if (item.name.length !== 3) {
|
||||
throw new Error('Connectors names MUST be exactly 3 letters');
|
||||
}
|
||||
|
||||
return {
|
||||
class: require("./connectors/" + item.file).default,
|
||||
params: item.params,
|
||||
name: item.name
|
||||
};
|
||||
|
||||
});
|
||||
|
||||
|
||||
const input = new Input(config);
|
||||
|
||||
vector.config = config;
|
||||
vector.logger = wlogger;
|
||||
vector.input = input;
|
||||
vector.pubSub = pubSub;
|
||||
|
||||
module.exports = vector;
|
||||
/*
|
||||
* BSD 3-Clause License
|
||||
*
|
||||
* Copyright (c) 2019, NTT Ltd.
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are met:
|
||||
*
|
||||
* Redistributions of source code must retain the above copyright notice, this
|
||||
* list of conditions and the following disclaimer.
|
||||
*
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
* this list of conditions and the following disclaimer in the documentation
|
||||
* and/or other materials provided with the distribution.
|
||||
*
|
||||
* Neither the name of the copyright holder nor the names of its
|
||||
* contributors may be used to endorse or promote products derived from
|
||||
* this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
import yaml from "js-yaml";
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import PubSub from './pubSub';
|
||||
import FileLogger from './fileLogger';
|
||||
import Input from "./inputs/inputYml";
|
||||
import {version} from '../package.json';
|
||||
|
||||
const defaultConfigFilePath = path.resolve(process.cwd(), 'config.yml');
|
||||
const vector = {
|
||||
version: global.EXTERNAL_VERSION_FOR_TEST || version,
|
||||
configFile: global.EXTERNAL_CONFIG_FILE || defaultConfigFilePath,
|
||||
clientId: Buffer.from("bnR0LWJncGFsZXJ0ZXI=", 'base64').toString('ascii')
|
||||
};
|
||||
let config = {
|
||||
environment: "production",
|
||||
connectors: [
|
||||
{
|
||||
file: "connectorRIS",
|
||||
name: "ris",
|
||||
params: {
|
||||
carefulSubscription: true,
|
||||
url: "wss://ris-live.ripe.net/v1/ws/",
|
||||
perMessageDeflate: true,
|
||||
subscription: {
|
||||
moreSpecific: true,
|
||||
type: "UPDATE",
|
||||
host: null,
|
||||
socketOptions: {
|
||||
includeRaw: false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
monitors: [
|
||||
{
|
||||
file: "monitorHijack",
|
||||
channel: "hijack",
|
||||
name: "basic-hijack-detection",
|
||||
params: {
|
||||
thresholdMinPeers: 2
|
||||
}
|
||||
},
|
||||
{
|
||||
file: "monitorPath",
|
||||
channel: "path",
|
||||
name: "path-matching",
|
||||
params: {
|
||||
thresholdMinPeers: 0
|
||||
}
|
||||
},
|
||||
{
|
||||
file: "monitorNewPrefix",
|
||||
channel: "newprefix",
|
||||
name: "prefix-detection",
|
||||
params: {
|
||||
thresholdMinPeers: 2
|
||||
}
|
||||
},
|
||||
{
|
||||
file: "monitorVisibility",
|
||||
channel: "visibility",
|
||||
name: "withdrawal-detection",
|
||||
params: {
|
||||
thresholdMinPeers: 10
|
||||
}
|
||||
},
|
||||
{
|
||||
file: "monitorAS",
|
||||
channel: "misconfiguration",
|
||||
name: "as-monitor",
|
||||
params: {
|
||||
thresholdMinPeers: 2
|
||||
}
|
||||
}
|
||||
],
|
||||
reports: [
|
||||
{
|
||||
file: "reportFile",
|
||||
channels: ["hijack", "newprefix", "visibility", "path", "misconfiguration"]
|
||||
}
|
||||
],
|
||||
notificationIntervalSeconds: 7200,
|
||||
alarmOnlyOnce: false,
|
||||
monitoredPrefixesFiles: ["prefixes.yml"],
|
||||
logging: {
|
||||
directory: "logs",
|
||||
logRotatePattern: "YYYY-MM-DD",
|
||||
zippedArchive: true,
|
||||
maxSize: "80m",
|
||||
maxFiles: "14d",
|
||||
},
|
||||
checkForUpdatesAtBoot: true,
|
||||
pidFile: "bgpalerter.pid",
|
||||
fadeOffSeconds: 360,
|
||||
checkFadeOffGroupsSeconds: 30
|
||||
};
|
||||
|
||||
|
||||
if (fs.existsSync(vector.configFile)) {
|
||||
try {
|
||||
config = yaml.safeLoad(fs.readFileSync(vector.configFile, 'utf8')) || config;
|
||||
} catch (error) {
|
||||
throw new Error("The file " + vector.configFile + " is not valid yml: " + error.message.split(":")[0]);
|
||||
}
|
||||
} else {
|
||||
console.log("Impossible to load config.yml. A default configuration file has been generated.");
|
||||
fs.writeFileSync(defaultConfigFilePath, yaml.dump(config))
|
||||
}
|
||||
|
||||
const errorTransport = new FileLogger({
|
||||
logRotatePattern: config.logging.logRotatePattern,
|
||||
filename: 'error-%DATE%.log',
|
||||
directory: config.logging.directory,
|
||||
backlogSize: config.logging.backlogSize,
|
||||
maxRetainedFiles: config.logging.maxRetainedFiles,
|
||||
maxFileSizeMB: config.logging.maxFileSizeMB,
|
||||
compressOnRotation: config.logging.compressOnRotation,
|
||||
label: config.environment,
|
||||
format: ({data, timestamp}) => `${timestamp} ${data.level}: ${data.message}`
|
||||
});
|
||||
|
||||
const verboseTransport = new FileLogger({
|
||||
logRotatePattern: config.logging.logRotatePattern,
|
||||
filename: 'reports-%DATE%.log',
|
||||
directory: config.logging.directory,
|
||||
backlogSize: config.logging.backlogSize,
|
||||
maxRetainedFiles: config.logging.maxRetainedFiles,
|
||||
maxFileSizeMB: config.logging.maxFileSizeMB,
|
||||
compressOnRotation: config.logging.compressOnRotation,
|
||||
label: config.environment,
|
||||
format: ({data, timestamp}) => `${timestamp} ${data.level}: ${data.message}`
|
||||
});
|
||||
|
||||
const loggerTransports = {
|
||||
verbose: verboseTransport,
|
||||
error: errorTransport,
|
||||
info: errorTransport
|
||||
};
|
||||
|
||||
const wlogger = {
|
||||
log:
|
||||
function(data){
|
||||
return loggerTransports[data.level].log(data);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
config.monitors = (config.monitors || []);
|
||||
config.monitors.push({
|
||||
file: "monitorSwUpdates",
|
||||
channel: "software-update",
|
||||
name: "software-update",
|
||||
});
|
||||
|
||||
|
||||
config.monitors = config.monitors
|
||||
.map(item => {
|
||||
return {
|
||||
class: require("./monitors/" + item.file).default,
|
||||
channel: item.channel,
|
||||
name: item.name,
|
||||
params: item.params
|
||||
};
|
||||
});
|
||||
|
||||
config.reports = (config.reports || [])
|
||||
.map(item => {
|
||||
|
||||
return {
|
||||
class: require("./reports/" + item.file).default,
|
||||
channels: [...item.channels, "software-update"],
|
||||
params: item.params
|
||||
};
|
||||
|
||||
});
|
||||
config.connectors = config.connectors || [];
|
||||
|
||||
config.connectors.push( {
|
||||
file: "connectorSwUpdates",
|
||||
name: "upd"
|
||||
});
|
||||
|
||||
if ([...new Set(config.connectors)].length !== config.connectors.length) {
|
||||
throw new Error('Connectors names MUST be unique');
|
||||
}
|
||||
|
||||
config.connectors = config.connectors
|
||||
.map((item, index) => {
|
||||
|
||||
if (item.name.length !== 3) {
|
||||
throw new Error('Connectors names MUST be exactly 3 letters');
|
||||
}
|
||||
|
||||
return {
|
||||
class: require("./connectors/" + item.file).default,
|
||||
params: item.params,
|
||||
name: item.name
|
||||
};
|
||||
|
||||
});
|
||||
|
||||
|
||||
const input = new Input(config);
|
||||
|
||||
vector.config = config;
|
||||
vector.logger = wlogger;
|
||||
vector.input = input;
|
||||
vector.pubSub = new PubSub();
|
||||
|
||||
module.exports = vector;
|
||||
|
159
src/fileLogger.js
Normal file
159
src/fileLogger.js
Normal file
@@ -0,0 +1,159 @@
|
||||
var fs = require('fs');
|
||||
var moment = require('moment');
|
||||
const zlib = require('zlib');
|
||||
|
||||
export default class FileLogger {
|
||||
|
||||
constructor(params) {
|
||||
|
||||
this.format = params.format || this.defaultFormat;
|
||||
this.logRotatePattern = params.logRotatePattern;
|
||||
this.filename = params.filename;
|
||||
this.directory = params.directory;
|
||||
this.levels = params.levels || ['error', 'info', 'verbose'];
|
||||
|
||||
// File rotation
|
||||
this.compressOnRotation = params.compressOnRotation;
|
||||
this.maxFileSizeMB = parseFloat(params.maxFileSizeMB || 20);
|
||||
this.maxRetainedFiles = parseFloat(params.maxRetainedFiles || 20);
|
||||
|
||||
this.backlog = [];
|
||||
this.staleTimer = null;
|
||||
this.backlogSize = parseFloat(params.backlogSize || 100);
|
||||
|
||||
this.wstream = null;
|
||||
|
||||
|
||||
if (!fs.existsSync(this.directory)){
|
||||
fs.mkdirSync(this.directory);
|
||||
}
|
||||
|
||||
this.setCurrentFile();
|
||||
};
|
||||
|
||||
getRotatedFileName = (number) => {
|
||||
return this._currentFile + '.' + number + ((this.compressOnRotation) ? '.gz' : '');
|
||||
};
|
||||
|
||||
rotateOldFiles = () => {
|
||||
for (let n=this.maxRetainedFiles; n > 0; n--) {
|
||||
const fileName = this.getRotatedFileName(n);
|
||||
|
||||
if (fs.existsSync(fileName)) {
|
||||
fs.renameSync(fileName, this.getRotatedFileName(n + 1));
|
||||
}
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
applyFileNumberLimit = () => {
|
||||
|
||||
try {
|
||||
|
||||
let files = fs.readdirSync(this.directory)
|
||||
.filter(i => i.indexOf('.log') > 0 && i.indexOf('.tmp') === -1)
|
||||
.sort((file1, file2) => {
|
||||
const v1 = file1.replace('.gz', '').split('.').pop();
|
||||
const v2 = file2.replace('.gz', '').split('.').pop();
|
||||
return parseInt(v1) - parseInt(v2);
|
||||
});
|
||||
|
||||
if (files.length >= this.maxRetainedFiles - 1) {
|
||||
files = files.slice(this.maxRetainedFiles);
|
||||
files
|
||||
.forEach(file => {
|
||||
fs.unlinkSync(this.directory + '/' + file);
|
||||
});
|
||||
}
|
||||
} catch {
|
||||
// Nothing
|
||||
}
|
||||
};
|
||||
|
||||
hasToBeRotated = () => {
|
||||
const stat = fs.statSync(this._currentFile);
|
||||
const fileSizeInMegabytes = stat.size / 1000000.0;
|
||||
return fileSizeInMegabytes > this.maxFileSizeMB;
|
||||
};
|
||||
|
||||
rotate = () => {
|
||||
if (this.hasToBeRotated()) {
|
||||
|
||||
this.close();
|
||||
|
||||
const tmpFile = this._currentFile + ".tmp";
|
||||
fs.renameSync(this._currentFile, tmpFile);
|
||||
this.open();
|
||||
const newFile = this.getRotatedFileName(1);
|
||||
|
||||
this.rotateOldFiles();
|
||||
this.applyFileNumberLimit();
|
||||
if (this.compressOnRotation) {
|
||||
|
||||
fs.writeFileSync(newFile, zlib.gzipSync(fs.readFileSync(tmpFile, 'utf8')));
|
||||
fs.unlinkSync(tmpFile);
|
||||
} else {
|
||||
fs.renameSync(tmpFile, newFile);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
setCurrentFile = () => {
|
||||
const file = this.directory + '/' + this.filename.replace("%DATE%", moment().format(this.logRotatePattern));
|
||||
const changed = this._currentFile && this._currentFile === file;
|
||||
this._currentFile = file;
|
||||
|
||||
return changed;
|
||||
};
|
||||
|
||||
defaultFormat = (json) => {
|
||||
return JSON.stringify(json);
|
||||
};
|
||||
|
||||
log = (data) => {
|
||||
this.backlog
|
||||
.push(this.format({
|
||||
timestamp: moment().format('YYYY-MM-DDTHH:mm:ssZ'),
|
||||
data
|
||||
}));
|
||||
|
||||
if (this.staleTimer) {
|
||||
clearTimeout(this.staleTimer);
|
||||
delete this.staleTimer;
|
||||
}
|
||||
|
||||
if (this.backlog.length >= this.backlogSize) {
|
||||
this.flush();
|
||||
} else {
|
||||
this.staleTimer = setTimeout(this.flushAndClose, 1000);
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
flushAndClose = () => {
|
||||
this.flush();
|
||||
this.close();
|
||||
};
|
||||
|
||||
flush = () => {
|
||||
const string = this.backlog.join('\n') + '\n';
|
||||
this.backlog = [];
|
||||
if (this.wstream === null) {
|
||||
this.open();
|
||||
}
|
||||
fs.appendFileSync(this.wstream, string, 'utf8');
|
||||
|
||||
this.rotate();
|
||||
};
|
||||
|
||||
open = () => {
|
||||
this.wstream = fs.openSync(this._currentFile, 'a');
|
||||
};
|
||||
|
||||
close = () => {
|
||||
if (this.wstream !== null)
|
||||
fs.closeSync(this.wstream);
|
||||
this.wstream = null;
|
||||
}
|
||||
|
||||
};
|
@@ -31,7 +31,7 @@
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
import ipUtils from "../ipUtils";
|
||||
import ipUtils from "ip-sub";
|
||||
|
||||
export default class Input {
|
||||
|
||||
|
@@ -33,7 +33,7 @@
|
||||
import yaml from "js-yaml";
|
||||
import fs from "fs";
|
||||
import Input from "./input";
|
||||
import ipUtils from "../ipUtils";
|
||||
import ipUtils from "ip-sub";
|
||||
import { AS, Path } from "../model";
|
||||
|
||||
|
||||
|
@@ -1,86 +0,0 @@
|
||||
import { Address4, Address6 } from "ip-address";
|
||||
|
||||
const ip = {
|
||||
|
||||
isValidPrefix: function(prefix){
|
||||
let bits, ip;
|
||||
|
||||
try {
|
||||
if (prefix.indexOf("/") !== -1) {
|
||||
const components = prefix.split("/");
|
||||
ip = components[0];
|
||||
bits = parseInt(components[1]);
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (ip.indexOf(":") === -1) {
|
||||
return this.isValidIP(ip) && (bits >= 0 && bits <= 32);
|
||||
} else {
|
||||
return this.isValidIP(ip) && (bits >= 0 && bits <= 128);
|
||||
}
|
||||
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
|
||||
isValidIP: function(ip) {
|
||||
|
||||
try {
|
||||
if (ip.indexOf(":") === -1) {
|
||||
return new Address4(ip).isValid();
|
||||
} else {
|
||||
return new Address6(ip).isValid();
|
||||
}
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
|
||||
sortByPrefixLength: function (a, b) {
|
||||
const netA = a.split("/")[1];
|
||||
const netB = b.split("/")[1];
|
||||
|
||||
return parseInt(netA) - parseInt(netB);
|
||||
},
|
||||
|
||||
toDecimal: function(ip) {
|
||||
let bytes = "";
|
||||
if (ip.indexOf(":") === -1) {
|
||||
bytes = ip.split(".").map(ip => parseInt(ip).toString(2).padStart(8, '0')).join("");
|
||||
} else {
|
||||
bytes = ip.split(":").filter(ip => ip !== "").map(ip => parseInt(ip, 16).toString(2).padStart(16, '0')).join("");
|
||||
}
|
||||
|
||||
return bytes;
|
||||
},
|
||||
|
||||
getNetmask: function(prefix) {
|
||||
const components = prefix.split("/");
|
||||
const ip = components[0];
|
||||
const bits = components[1];
|
||||
|
||||
let binaryRoot;
|
||||
|
||||
if (ip.indexOf(":") === -1){
|
||||
binaryRoot = this.toDecimal(ip).padEnd(32, '0').slice(0, bits);
|
||||
} else {
|
||||
binaryRoot = this.toDecimal(ip).padEnd(128, '0').slice(0, bits);
|
||||
}
|
||||
|
||||
return binaryRoot;
|
||||
|
||||
},
|
||||
|
||||
isSubnetBinary: (prefixContainer, prefixContained) => {
|
||||
return prefixContained != prefixContainer && prefixContained.startsWith(prefixContainer);
|
||||
},
|
||||
|
||||
isSubnet: function (prefixContainer, prefixContained) {
|
||||
return this.isSubnetBinary(this.getNetmask(prefixContainer), this.getNetmask(prefixContained));
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
module.exports = ip;
|
37
src/lossyBuffer.js
Normal file
37
src/lossyBuffer.js
Normal file
@@ -0,0 +1,37 @@
|
||||
export default class LossyBuffer {
|
||||
|
||||
constructor(bufferSize, cleaningInterval, logger){
|
||||
this.callback = null;
|
||||
this.buffer = [];
|
||||
this.bufferSize = bufferSize;
|
||||
setInterval(this.sendData, cleaningInterval);
|
||||
this.alertOnce = false;
|
||||
this.logger = logger;
|
||||
};
|
||||
|
||||
sendData = () => {
|
||||
if (this.callback && this.buffer.length){
|
||||
this.callback(this.buffer);
|
||||
this.buffer = [];
|
||||
}
|
||||
};
|
||||
|
||||
add = (item) => {
|
||||
if (this.buffer.length <= this.bufferSize) {
|
||||
this.buffer.push(item);
|
||||
|
||||
} else if (!this.alertOnce) {
|
||||
this.alertOnce = true;
|
||||
this.logger.log({
|
||||
level: 'error',
|
||||
message: "The data rate is too high, messages are getting dropped due to full buffer. Increase connectorsBufferSize if you think your machine could handle more."
|
||||
});
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
onData = (callback) => {
|
||||
this.callback = callback;
|
||||
};
|
||||
|
||||
}
|
68
src/model.js
68
src/model.js
@@ -3,26 +3,31 @@ export class Path {
|
||||
this.value = listAS;
|
||||
};
|
||||
|
||||
getLast = () => {
|
||||
getLast (){
|
||||
return this.value[this.value.length - 1];
|
||||
};
|
||||
|
||||
toString = () => {
|
||||
toString () {
|
||||
return JSON.stringify(this.toJSON());
|
||||
};
|
||||
|
||||
getValues = () => {
|
||||
getValues () {
|
||||
return this.value.map(i => i.getValue());
|
||||
};
|
||||
|
||||
toJSON = () => this.getValues();
|
||||
toJSON () {
|
||||
return this.getValues();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export class AS {
|
||||
static _instances = {};
|
||||
|
||||
constructor(numbers) {
|
||||
this.numbers = null;
|
||||
this.ASset = false;
|
||||
this._valid = null;
|
||||
|
||||
if (["string", "number"].includes(typeof(numbers))) {
|
||||
this.numbers = [ numbers ];
|
||||
@@ -38,33 +43,44 @@ export class AS {
|
||||
if (this.isValid()) {
|
||||
this.numbers = this.numbers.map(i => parseInt(i));
|
||||
}
|
||||
|
||||
const key = this.numbers.join("-");
|
||||
if (!!AS._instances[key]) {
|
||||
return AS._instances[key];
|
||||
}
|
||||
|
||||
AS._instances[key] = this;
|
||||
}
|
||||
|
||||
getId = () => {
|
||||
getId () {
|
||||
return (this.numbers.length === 1) ? this.numbers[0] : this.numbers.sort().join("-");
|
||||
};
|
||||
|
||||
isValid = () => {
|
||||
return this.numbers.length > 0 &&
|
||||
this.numbers
|
||||
.every(asn => {
|
||||
isValid () {
|
||||
if (this._valid === null) {
|
||||
this._valid = this.numbers.length > 0 &&
|
||||
this.numbers
|
||||
.every(asn => {
|
||||
|
||||
try {
|
||||
const intAsn = parseInt(asn);
|
||||
if (intAsn != asn) {
|
||||
try {
|
||||
const intAsn = parseInt(asn);
|
||||
if (intAsn != asn) {
|
||||
return false;
|
||||
}
|
||||
asn = intAsn;
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
asn = intAsn;
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return asn > 0 && asn <= 4294967295;
|
||||
}) &&
|
||||
[...new Set(this.numbers.map(i => parseInt(i)))].length === this.numbers.length;
|
||||
return asn > 0 && asn <= 4294967295;
|
||||
}) &&
|
||||
[...new Set(this.numbers.map(i => parseInt(i)))].length === this.numbers.length;
|
||||
}
|
||||
|
||||
return this._valid;
|
||||
};
|
||||
|
||||
includes = (ASn) => {
|
||||
includes (ASn){
|
||||
|
||||
for (let a of ASn.numbers) {
|
||||
if (!this.numbers.includes(a)) {
|
||||
@@ -75,19 +91,19 @@ export class AS {
|
||||
return true;
|
||||
};
|
||||
|
||||
isASset = () => {
|
||||
isASset () {
|
||||
return this.ASset;
|
||||
};
|
||||
|
||||
getValue = () => {
|
||||
return (this.numbers.length > 1) ? this.numbers : this.numbers[0]
|
||||
getValue () {
|
||||
return (this.numbers.length > 1) ? this.numbers : this.numbers[0];
|
||||
};
|
||||
|
||||
toString = () => {
|
||||
toString() {
|
||||
return this.numbers.map(i => "AS" + i).join(", and ");
|
||||
};
|
||||
|
||||
toJSON = () => {
|
||||
toJSON () {
|
||||
return this.numbers;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -38,19 +38,24 @@ export default class Monitor {
|
||||
this.pubSub = env.pubSub;
|
||||
this.logger = env.logger;
|
||||
this.input = env.input;
|
||||
this.params = params;
|
||||
this.params = params || {};
|
||||
this.maxDataSamples = this.params.maxDataSamples || 1000;
|
||||
this.name = name;
|
||||
this.channel = channel;
|
||||
this.monitored = [];
|
||||
this.alerts = {};
|
||||
this.sent = {};
|
||||
|
||||
this.alerts = {}; // Dictionary containing the alerts <id, Array>. The id is the "group" key of the alert.
|
||||
this.sent = {}; // Dictionary containing the last sent unix timestamp of each group <id, int>
|
||||
this.truncated = {}; // Dictionary containing <id, boolean> if the alerts Array for "id" is truncated according to maxDataSamples
|
||||
this.fadeOff = {}; // Dictionary containing the last alert unix timestamp of each group <id, int> which contains alerts that have been triggered but are not ready yet to be sent (e.g. thresholdMinPeers not yet reached)
|
||||
|
||||
this.internalConfig = {
|
||||
notificationIntervalSeconds: this.config.notificationIntervalSeconds,
|
||||
checkStaleNotificationsSeconds: 60,
|
||||
clearNotificationQueueAfterSeconds: (this.config.notificationIntervalSeconds * 3) / 2
|
||||
notificationInterval: (this.config.notificationIntervalSeconds || 7200) * 1000,
|
||||
checkFadeOffGroups: this.config.checkFadeOffGroupsSeconds || 30 * 1000,
|
||||
fadeOff: this.config.fadeOffSeconds * 1000 || 60 * 6 * 1000
|
||||
};
|
||||
setInterval(this._publish, this.internalConfig.checkStaleNotificationsSeconds * 1000);
|
||||
|
||||
setInterval(this._publishFadeOffGroups, this.internalConfig.checkFadeOffGroups);
|
||||
};
|
||||
|
||||
updateMonitoredResources = () => {
|
||||
@@ -70,74 +75,57 @@ export default class Monitor {
|
||||
throw new Error('The method squashAlerts must be implemented in ' + this.name);
|
||||
};
|
||||
|
||||
_squash = (alerts) => {
|
||||
_squash = (id) => {
|
||||
|
||||
const alerts = this.alerts[id];
|
||||
const message = this.squashAlerts(alerts);
|
||||
|
||||
if (message) {
|
||||
const firstAlert = alerts[0];
|
||||
const id = firstAlert.id;
|
||||
let earliest = Infinity;
|
||||
let latest = -Infinity;
|
||||
|
||||
for (let alert of alerts) {
|
||||
|
||||
earliest = Math.min(alert.timestamp, earliest);
|
||||
latest = Math.max(alert.timestamp, latest);
|
||||
|
||||
if (id !== alert.id) {
|
||||
throw new Error('Squash MUST receive a list of events all with the same ID.');
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
id,
|
||||
truncated: this.truncated[id] || false,
|
||||
origin: this.name,
|
||||
earliest,
|
||||
latest,
|
||||
affected: firstAlert.affected,
|
||||
message,
|
||||
data: alerts.map(a => {
|
||||
return {
|
||||
extra: a.extra,
|
||||
matchedRule: a.matchedRule,
|
||||
matchedMessage: a.matchedMessage,
|
||||
timestamp: a.timestamp
|
||||
};
|
||||
})
|
||||
data: alerts
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
publishAlert = (id, message, affected, matchedRule, matchedMessage, extra) => {
|
||||
|
||||
publishAlert = (id, affected, matchedRule, matchedMessage, extra) => {
|
||||
const now = new Date().getTime();
|
||||
const context = {
|
||||
id,
|
||||
timestamp: new Date().getTime(),
|
||||
message,
|
||||
timestamp: now,
|
||||
affected,
|
||||
matchedRule,
|
||||
matchedMessage,
|
||||
extra
|
||||
};
|
||||
|
||||
if (!this.alerts[id]) {
|
||||
this.alerts[id] = [];
|
||||
}
|
||||
if (!this.sent[id] ||
|
||||
(!this.config.alertOnlyOnce && now > (this.sent[id] + this.internalConfig.notificationInterval))) {
|
||||
|
||||
this.alerts[id].push(context);
|
||||
this.alerts[id] = this.alerts[id] || [];
|
||||
this.alerts[id].push(context);
|
||||
|
||||
if (!this.sent[id]) {
|
||||
this._publish();
|
||||
}
|
||||
};
|
||||
// Check if for each alert group the maxDataSamples parameter is respected
|
||||
if (!this.truncated[id] && this.alerts[id].length > this.maxDataSamples) {
|
||||
this.truncated[id] = this.alerts[id][0].timestamp; // Mark as truncated
|
||||
this.alerts[id] = this.alerts[id].slice(-this.maxDataSamples); // Truncate
|
||||
}
|
||||
|
||||
_clean = (group) => {
|
||||
if (this.config.alertOnlyOnce) {
|
||||
delete this.alerts[group.id];
|
||||
} else if (this.config.alertOnlyOnce && new Date().getTime() > group.latest + (this.internalConfig.clearNotificationQueueAfterSeconds * 1000)) {
|
||||
delete this.alerts[group.id];
|
||||
delete this.sent[group.id];
|
||||
this._publishGroupId(id, now);
|
||||
|
||||
return true;
|
||||
}
|
||||
@@ -145,37 +133,44 @@ export default class Monitor {
|
||||
return false;
|
||||
};
|
||||
|
||||
_checkLastSent = (group) => {
|
||||
const lastTimeSent = this.sent[group.id];
|
||||
_publishFadeOffGroups = () => {
|
||||
const now = new Date().getTime();
|
||||
|
||||
if (lastTimeSent && this.config.alertOnlyOnce) {
|
||||
return false;
|
||||
} else if (lastTimeSent) {
|
||||
for (let id in this.fadeOff) {
|
||||
this._publishGroupId(id, now);
|
||||
}
|
||||
|
||||
const isThereSomethingNew = lastTimeSent < group.latest;
|
||||
const isItTimeToSend = new Date().getTime() > lastTimeSent + (this.internalConfig.notificationIntervalSeconds * 1000);
|
||||
|
||||
return isThereSomethingNew && isItTimeToSend;
|
||||
} else {
|
||||
return true;
|
||||
if (!this.config.alertOnlyOnce) {
|
||||
for (let id in this.alerts) {
|
||||
if (now > (this.sent[id] + this.internalConfig.notificationInterval)) {
|
||||
delete this.sent[id];
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
_publish = () => {
|
||||
_publishGroupId = (id, now) => {
|
||||
const group = this._squash(id);
|
||||
|
||||
for (let id in this.alerts) {
|
||||
const group = this._squash(this.alerts[id]);
|
||||
if (group) {
|
||||
this._publishOnChannel(group);
|
||||
this.sent[id] = now;
|
||||
|
||||
if (group) {
|
||||
if (this._checkLastSent(group)) {
|
||||
this.sent[group.id] = new Date().getTime();
|
||||
this._publishOnChannel(group);
|
||||
}
|
||||
delete this.alerts[id];
|
||||
delete this.fadeOff[id];
|
||||
delete this.truncated[id];
|
||||
|
||||
this._clean(group);
|
||||
} else if (this.fadeOff[id]) {
|
||||
|
||||
if (now > this.fadeOff[id] + this.internalConfig.fadeOff) {
|
||||
delete this.fadeOff[id];
|
||||
delete this.alerts[id];
|
||||
delete this.truncated[id];
|
||||
}
|
||||
}
|
||||
|
||||
} else {
|
||||
this.fadeOff[id] = this.fadeOff[id] || now;
|
||||
}
|
||||
};
|
||||
|
||||
_publishOnChannel = (alert) => {
|
||||
|
@@ -68,27 +68,36 @@ export default class MonitorAS extends Monitor {
|
||||
if (prefixesOut.length > 1) {
|
||||
return `${matchedMessages[0].originAS} is announcing some prefixes which are not in the configured list of announced prefixes: ${prefixesOut}`
|
||||
} else if (prefixesOut.length === 1) {
|
||||
return alerts[0].message;
|
||||
return `${matchedMessages[0].originAS} is announcing ${matchedMessages[0].prefix} but this prefix is not in the configured list of announced prefixes`;
|
||||
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
_getMonitoredAS = (message) => {
|
||||
const monitored = this.monitored;
|
||||
|
||||
for (let m of monitored) {
|
||||
if (message.originAS.includes(m.asn)) {
|
||||
return m;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
monitor = (message) =>
|
||||
new Promise((resolve, reject) => {
|
||||
|
||||
const messageOrigin = message.originAS;
|
||||
const messagePrefix = message.prefix;
|
||||
const matchedRule = this.monitored.filter(i => message.path.getLast().includes(i.asn))[0];
|
||||
const matchedRule = this._getMonitoredAS(message);
|
||||
|
||||
if (matchedRule) {
|
||||
|
||||
const matchedPrefixRule = this.getMoreSpecificMatch(messagePrefix);
|
||||
if (!matchedPrefixRule) {
|
||||
const text = `${messageOrigin} is announcing ${messagePrefix} but this prefix is not in the configured list of announced prefixes`;
|
||||
|
||||
this.publishAlert(messageOrigin.getId().toString(),
|
||||
text,
|
||||
messageOrigin.getId(),
|
||||
matchedRule,
|
||||
message,
|
||||
|
@@ -52,7 +52,14 @@ export default class MonitorHijack extends Monitor {
|
||||
const peers = [...new Set(alerts.map(alert => alert.matchedMessage.peer))].length;
|
||||
|
||||
if (peers >= this.thresholdMinPeers) {
|
||||
return alerts[0].message;
|
||||
const matchedRule = alerts[0].matchedRule;
|
||||
const message = alerts[0].matchedMessage;
|
||||
const asnText = matchedRule.asn;
|
||||
|
||||
return (message.prefix === matchedRule.prefix) ?
|
||||
`The prefix ${matchedRule.prefix} (${matchedRule.description}) is announced by ${message.originAS} instead of ${asnText}` :
|
||||
`A new prefix ${message.prefix} is announced by ${message.originAS}. ` +
|
||||
`It should be instead ${matchedRule.prefix} (${matchedRule.description}) announced by ${asnText}`;
|
||||
}
|
||||
|
||||
return false;
|
||||
@@ -65,15 +72,8 @@ export default class MonitorHijack extends Monitor {
|
||||
const matchedRule = this.getMoreSpecificMatch(messagePrefix);
|
||||
|
||||
if (matchedRule && !matchedRule.ignore && !matchedRule.asn.includes(message.originAS)) {
|
||||
const asnText = matchedRule.asn;
|
||||
|
||||
const text = (message.prefix === matchedRule.prefix) ?
|
||||
`The prefix ${matchedRule.prefix} (${matchedRule.description}) is announced by ${message.originAS} instead of ${asnText}` :
|
||||
`A new prefix ${message.prefix} is announced by ${message.originAS}. ` +
|
||||
`It should be instead ${matchedRule.prefix} (${matchedRule.description}) announced by ${asnText}`;
|
||||
|
||||
this.publishAlert(message.originAS.getId() + "-" + message.prefix,
|
||||
text,
|
||||
matchedRule.asn.getId(),
|
||||
matchedRule,
|
||||
message,
|
||||
|
@@ -52,7 +52,11 @@ export default class MonitorNewPrefix extends Monitor {
|
||||
const peers = [...new Set(alerts.map(alert => alert.matchedMessage.peer))].length;
|
||||
|
||||
if (peers >= this.thresholdMinPeers) {
|
||||
return alerts[0].message;
|
||||
const message = alerts[0].matchedMessage;
|
||||
const matchedRule = alerts[0].matchedRule;
|
||||
|
||||
return `Possible change of configuration. A new prefix ${message.prefix} is announced by ${message.originAS}. It is a more specific of ${matchedRule.prefix} (${matchedRule.description})`;
|
||||
|
||||
}
|
||||
|
||||
return false;
|
||||
@@ -65,10 +69,8 @@ export default class MonitorNewPrefix extends Monitor {
|
||||
const matchedRule = this.getMoreSpecificMatch(messagePrefix);
|
||||
|
||||
if (matchedRule && !matchedRule.ignore && matchedRule.asn.includes(message.originAS) && matchedRule.prefix !== messagePrefix) {
|
||||
const text = `Possible change of configuration. A new prefix ${message.prefix} is announced by ${message.originAS}. It is a more specific of ${matchedRule.prefix} (${matchedRule.description}).`;
|
||||
|
||||
this.publishAlert(message.originAS.getId() + "-" + message.prefix,
|
||||
text,
|
||||
matchedRule.asn.getId(),
|
||||
matchedRule,
|
||||
message,
|
||||
|
35
src/monitors/monitorPassthrough.js
Normal file
35
src/monitors/monitorPassthrough.js
Normal file
@@ -0,0 +1,35 @@
|
||||
import Monitor from "./monitor";
|
||||
|
||||
export default class monitorPassthrough extends Monitor {
|
||||
|
||||
constructor(name, channel, params, env){
|
||||
super(name, channel, params, env);
|
||||
this.count = 0;
|
||||
};
|
||||
|
||||
filter = () => {
|
||||
return true
|
||||
};
|
||||
|
||||
squashAlerts = (alerts) => {
|
||||
return JSON.stringify(alerts[0]);
|
||||
};
|
||||
|
||||
monitor = (message) =>
|
||||
new Promise((resolve, reject) => {
|
||||
const prefix = message.prefix;
|
||||
this.publishAlert(this.count,
|
||||
prefix,
|
||||
{},
|
||||
message,
|
||||
{});
|
||||
|
||||
this.count++;
|
||||
|
||||
resolve(true);
|
||||
|
||||
});
|
||||
|
||||
|
||||
|
||||
}
|
@@ -54,7 +54,7 @@ export default class MonitorPath extends Monitor {
|
||||
|
||||
if (peers >= this.thresholdMinPeers) {
|
||||
const lengthViolation = (alerts.some(i => i.extra.lengthViolation)) ? "(including length violation) " : "";
|
||||
return `Matched ${alerts[0].matchedRule.path.matchDescription} on prefix ${alerts[0].matchedMessage.prefix} ${lengthViolation}${alerts.length} times.`;
|
||||
return `Matched ${alerts[0].matchedRule.path.matchDescription} on prefix ${alerts[0].matchedMessage.prefix} ${lengthViolation}${alerts.length} times`;
|
||||
}
|
||||
|
||||
return false;
|
||||
@@ -101,7 +101,6 @@ export default class MonitorPath extends Monitor {
|
||||
((!matchedRule.path.maxLength && !matchedRule.path.maxLength) || !correctLength)) {
|
||||
|
||||
this.publishAlert(messagePrefix,
|
||||
`Matched ${matchedRule.path.matchDescription} on prefix ${messagePrefix}, path: ${message.path}`,
|
||||
matchedRule.prefix,
|
||||
matchedRule,
|
||||
message,
|
||||
|
62
src/monitors/monitorRPKI.js
Normal file
62
src/monitors/monitorRPKI.js
Normal file
@@ -0,0 +1,62 @@
|
||||
import Monitor from "./monitor";
|
||||
import rpki from "rpki-validator";
|
||||
|
||||
export default class MonitorRPKI extends Monitor {
|
||||
|
||||
constructor(name, channel, params, env){
|
||||
super(name, channel, params, env);
|
||||
this.updateMonitoredPrefixes();
|
||||
this.cache = rpki.preCache(60);
|
||||
};
|
||||
|
||||
updateMonitoredPrefixes = () => {
|
||||
this.monitored = this.input.getMonitoredPrefixes();
|
||||
};
|
||||
|
||||
filter = (message) => {
|
||||
return message.type === 'announcement' && message.originAS.numbers.length == 1;
|
||||
};
|
||||
|
||||
squashAlerts = (alerts) => {
|
||||
|
||||
const message = alerts[0].matchedMessage;
|
||||
const covering = (alerts[0].extra.covering && alerts[0].extra.covering[0]) ? alerts[0].extra.covering[0] : false;
|
||||
const coveringString = (covering) ? `Valid ROA: origin AS${covering.origin} prefix ${covering.prefix} max length ${covering.maxLength}` : '';
|
||||
|
||||
return `The route ${message.prefix} announced by ${message.originAS} is not RPKI valid. Accepted with AS path: ${message.path}. ${coveringString}`;
|
||||
};
|
||||
|
||||
monitor = (message) =>
|
||||
new Promise((resolve, reject) => {
|
||||
const prefix = message.prefix;
|
||||
const origin = message.originAS.getValue();
|
||||
const matchedRule = this.input.getMoreSpecificMatch(prefix);
|
||||
|
||||
if (matchedRule) {
|
||||
|
||||
this.cache
|
||||
.then(() => rpki.validate(prefix, origin, true))
|
||||
.then((result) => {
|
||||
|
||||
if (result.valid === false) {
|
||||
const key = "a" + [prefix, origin]
|
||||
.join("AS")
|
||||
.replace(/\./g, "_")
|
||||
.replace(/\:/g, "_")
|
||||
.replace(/\//g, "_");
|
||||
|
||||
this.publishAlert(key,
|
||||
prefix,
|
||||
matchedRule,
|
||||
message,
|
||||
{ covering: result.covering });
|
||||
}
|
||||
});
|
||||
}
|
||||
resolve(true);
|
||||
|
||||
});
|
||||
|
||||
|
||||
|
||||
}
|
@@ -43,14 +43,15 @@ export default class MonitorSwUpdates extends Monitor {
|
||||
};
|
||||
|
||||
squashAlerts = (alerts) => {
|
||||
return alerts[0].message;
|
||||
const message = alerts[0].matchedMessage;
|
||||
|
||||
return `A new version of BGPalerter is available. Current version: ${message.currentVersion} new version: ${message.newVersion}. Please, go to: ${message.repo}`;
|
||||
};
|
||||
|
||||
monitor = (message) =>
|
||||
new Promise((resolve, reject) => {
|
||||
|
||||
this.publishAlert("software-update",
|
||||
`A new version of BGPalerter is available. Current version: ${message.currentVersion} new version: ${message.newVersion}. Please, go to: ${message.repo}`,
|
||||
"bgpalerter",
|
||||
{
|
||||
group: "default"
|
||||
|
@@ -63,8 +63,8 @@ export default class MonitorVisibility extends Monitor {
|
||||
|
||||
if (peers >= this.thresholdMinPeers) {
|
||||
return (peers === 1) ?
|
||||
`The prefix ${alerts[0].matchedMessage.prefix} (${alerts[0].matchedRule.description}) it's no longer visible (withdrawn) from the peer ${alerts[0].matchedMessage.peer}.` :
|
||||
`The prefix ${alerts[0].matchedMessage.prefix} (${alerts[0].matchedRule.description}) has been withdrawn. It is no longer visible from ${peers} peers.`;
|
||||
`The prefix ${alerts[0].matchedMessage.prefix} (${alerts[0].matchedRule.description}) it's no longer visible (withdrawn) from the peer ${alerts[0].matchedMessage.peer}` :
|
||||
`The prefix ${alerts[0].matchedMessage.prefix} (${alerts[0].matchedRule.description}) has been withdrawn. It is no longer visible from ${peers} peers`;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
@@ -81,7 +81,6 @@ export default class MonitorVisibility extends Monitor {
|
||||
let key = matchedRule.prefix;
|
||||
|
||||
this.publishAlert(key,
|
||||
`The prefix ${matchedRule.prefix} has been withdrawn.`,
|
||||
matchedRule.asn.getId(),
|
||||
matchedRule,
|
||||
message,
|
||||
|
22
src/pubSub.js
Normal file
22
src/pubSub.js
Normal file
@@ -0,0 +1,22 @@
|
||||
export default class PubSub{
|
||||
constructor() {
|
||||
this.callbacks = {};
|
||||
};
|
||||
|
||||
subscribe(channel, callback) {
|
||||
this.callbacks[channel] = this.callbacks[channel] || [];
|
||||
this.callbacks[channel].push(callback);
|
||||
};
|
||||
|
||||
publish(channel, content) {
|
||||
const callbacks = this.callbacks[channel] || [];
|
||||
for (let clb of callbacks) {
|
||||
new Promise(function(resolve, reject){
|
||||
clb(channel, content);
|
||||
resolve(true);
|
||||
})
|
||||
.catch(console.log);
|
||||
}
|
||||
};
|
||||
|
||||
}
|
@@ -172,4 +172,4 @@ export default class Report {
|
||||
report = (message, content) => {
|
||||
throw new Error('The method report must be implemented');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
112
src/reports/reportAlerta.js
Normal file
112
src/reports/reportAlerta.js
Normal file
@@ -0,0 +1,112 @@
|
||||
/*
|
||||
* BSD 3-Clause License
|
||||
*
|
||||
* Copyright (c) 2019, NTT Ltd.
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are met:
|
||||
*
|
||||
* Redistributions of source code must retain the above copyright notice, this
|
||||
* list of conditions and the following disclaimer.
|
||||
*
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
* this list of conditions and the following disclaimer in the documentation
|
||||
* and/or other materials provided with the distribution.
|
||||
*
|
||||
* Neither the name of the copyright holder nor the names of its
|
||||
* contributors may be used to endorse or promote products derived from
|
||||
* this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
import Report from "./report";
|
||||
import axios from "axios";
|
||||
|
||||
export default class ReportAlerta extends Report {
|
||||
|
||||
constructor(channels, params, env) {
|
||||
super(channels, params, env);
|
||||
|
||||
this.environment = this.params.environment || env.environment;
|
||||
this.enabled = true;
|
||||
if (!this.params.urls || !Object.keys(this.params.urls).length){
|
||||
this.logger.log({
|
||||
level: 'error',
|
||||
message: "Alerta reporting is not enabled: no group is defined"
|
||||
});
|
||||
this.enabled = false;
|
||||
} else {
|
||||
if (!this.params.urls["default"]) {
|
||||
this.logger.log({
|
||||
level: 'error',
|
||||
message: "In urls, for reportAlerta, a group named 'default' is required for communications to the admin."
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
this.headers = {};
|
||||
if (this.params.key){
|
||||
this.headers.Authorization = "Key " + this.params.key;
|
||||
}
|
||||
if (this.params.token){
|
||||
this.headers.Authorization = "Bearer " + this.params.token;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
_createAlertaAlert = (url, message, content) => {
|
||||
|
||||
const severity = (this.params && this.params.severity && this.params.severity[message])
|
||||
? this.params.severity[message]
|
||||
: "informational"; // informational level
|
||||
const context = this.getContext(message, content);
|
||||
|
||||
axios({
|
||||
url: url + "/alert",
|
||||
method: "POST",
|
||||
headers: this.headers,
|
||||
resposnseType: "json",
|
||||
data: {
|
||||
event: message,
|
||||
resource: this.parseTemplate(this.params.resource_templates[message] || this.params.resource_templates["default"], context),
|
||||
text: content.message,
|
||||
service: [(this.params.service || "BGPalerter")],
|
||||
attributes: context,
|
||||
severity: severity,
|
||||
environment: this.environment
|
||||
}
|
||||
})
|
||||
.catch((error) => {
|
||||
this.logger.log({
|
||||
level: 'error',
|
||||
message: error
|
||||
});
|
||||
})
|
||||
};
|
||||
|
||||
report = (message, content) => {
|
||||
if (this.enabled){
|
||||
let groups = content.data.map(i => i.matchedRule.group).filter(i => i != null);
|
||||
|
||||
groups = (groups.length) ? [...new Set(groups)] : Object.keys(this.params.urls); // If there are no groups defined, send to all of them
|
||||
|
||||
for (let group of groups) {
|
||||
if (this.params.urls[group]) {
|
||||
this._createAlertaAlert(this.params.urls[group], message, content);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
@@ -31,17 +31,55 @@
|
||||
*/
|
||||
|
||||
import Report from "./report";
|
||||
import fs from "fs";
|
||||
|
||||
export default class ReportFile extends Report {
|
||||
|
||||
constructor(channels, params, env) {
|
||||
super(channels, params, env);
|
||||
|
||||
this.persistAlerts = params.persistAlertData;
|
||||
this.alertsDirectory = params.alertDataDirectory;
|
||||
if (this.persistAlerts && !this.alertsDirectory) {
|
||||
this.persistAlerts = false;
|
||||
this.logger.log({
|
||||
level: 'error',
|
||||
message: "Cannot persist alert data, the parameter alertDataDirectory is missing."
|
||||
});
|
||||
}
|
||||
this.latestTimestamps = [];
|
||||
this.timestampsBacklogSize = 100;
|
||||
}
|
||||
|
||||
writeDataOnFile = (message) => {
|
||||
try {
|
||||
const timestamp = `${message.earliest}-${message.latest}`;
|
||||
this.latestTimestamps.push(timestamp);
|
||||
const count = this.latestTimestamps.filter(i => i === timestamp).length;
|
||||
this.latestTimestamps = this.latestTimestamps.slice(-this.timestampsBacklogSize);
|
||||
const filename = `${this.alertsDirectory}/alert-${timestamp}-${count}.json`;
|
||||
|
||||
if (!fs.existsSync(this.alertsDirectory)) {
|
||||
fs.mkdirSync(this.alertsDirectory);
|
||||
}
|
||||
|
||||
fs.writeFileSync(filename, JSON.stringify(message));
|
||||
} catch (error) {
|
||||
this.logger.log({
|
||||
level: 'error',
|
||||
message: error
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
report = (message, content) => {
|
||||
this.logger.log({
|
||||
level: 'verbose',
|
||||
message: content.message
|
||||
});
|
||||
|
||||
if (this.persistAlerts) {
|
||||
this.writeDataOnFile(content);
|
||||
}
|
||||
}
|
||||
}
|
@@ -31,6 +31,7 @@
|
||||
*/
|
||||
|
||||
import Consumer from "./consumer";
|
||||
import LossyBuffer from "./lossyBuffer";
|
||||
import ConnectorFactory from "./connectorFactory";
|
||||
import cluster from "cluster";
|
||||
import fs from "fs";
|
||||
@@ -52,7 +53,7 @@ export default class Worker {
|
||||
Sentry.init({ dsn: env.sentryDSN });
|
||||
}
|
||||
|
||||
if (this.config.environment === "test") {
|
||||
if (!this.config.multiProcess) {
|
||||
|
||||
this.master();
|
||||
new Consumer();
|
||||
@@ -65,7 +66,7 @@ export default class Worker {
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
};
|
||||
|
||||
master = (worker) => {
|
||||
console.log("BGPalerter, version:", this.version, "environment:", this.config.environment);
|
||||
@@ -88,32 +89,42 @@ export default class Worker {
|
||||
if (this.config.uptimeMonitor) {
|
||||
this.logger.log({
|
||||
level: 'error',
|
||||
message: "The uptime monitor configuration changed. Please see the documentation https://github.com/nttgin/BGPalerter/blob/master/docs/uptime-monitor.md"
|
||||
message: "The uptime monitor configuration changed. Please see the documentation https://github.com/nttgin/BGPalerter/blob/master/docs/process-monitors.md"
|
||||
});
|
||||
}
|
||||
|
||||
if (this.config.uptimeMonitors) {
|
||||
for (let uptimeEntry of this.config.uptimeMonitors) {
|
||||
const UptimeModule = require("./uptimeMonitors/" + uptimeEntry.file).default;
|
||||
if (this.config.processMonitors) {
|
||||
for (let uptimeEntry of this.config.processMonitors) {
|
||||
const UptimeModule = require("./processMonitors/" + uptimeEntry.file).default;
|
||||
new UptimeModule(connectorFactory, uptimeEntry.params);
|
||||
}
|
||||
}
|
||||
|
||||
const bufferCleaningInterval = 300;
|
||||
this.config.maxMessagesPerSecond = this.config.maxMessagesPerSecond || 6000;
|
||||
const buffer = new LossyBuffer(parseInt(this.config.maxMessagesPerSecond /(1000/bufferCleaningInterval)), bufferCleaningInterval, this.logger);
|
||||
connectorFactory.loadConnectors();
|
||||
return connectorFactory.connectConnectors()
|
||||
.then(() => {
|
||||
|
||||
for (const connector of connectorFactory.getConnectors()) {
|
||||
|
||||
connector.onMessage((message) => {
|
||||
buffer.add({
|
||||
connector: connector.name,
|
||||
message
|
||||
});
|
||||
});
|
||||
if (worker){
|
||||
connector.onMessage((message) => {
|
||||
worker.send(connector.name + "-" + message);
|
||||
buffer.onData((message) => {
|
||||
worker.send(message);
|
||||
});
|
||||
} else {
|
||||
connector.onMessage((message) => {
|
||||
this.pubSub.publish("data", connector.name + "-" + message);
|
||||
buffer.onData((message) => {
|
||||
this.pubSub.publish("data", message);
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
})
|
||||
.then(() => connectorFactory.subscribeConnectors(this.input))
|
||||
|
@@ -74,8 +74,12 @@ describe("Composition", function() {
|
||||
"monitoredPrefixesFiles",
|
||||
"logging",
|
||||
"checkForUpdatesAtBoot",
|
||||
"uptimeMonitors",
|
||||
"pidFile"
|
||||
"processMonitors",
|
||||
"pidFile",
|
||||
"multiProcess",
|
||||
"maxMessagesPerSecond",
|
||||
"fadeOffSeconds",
|
||||
"checkFadeOffGroupsSeconds"
|
||||
]);
|
||||
expect(config.connectors[0]).to.have
|
||||
.property('class')
|
||||
@@ -160,7 +164,7 @@ describe("Composition", function() {
|
||||
"newprefix",
|
||||
"visibility"
|
||||
],
|
||||
"params": undefined
|
||||
"params": {}
|
||||
});
|
||||
|
||||
expect(config.reports[0]).to.have
|
||||
@@ -175,7 +179,7 @@ describe("Composition", function() {
|
||||
|
||||
it("loading prefixes", function () {
|
||||
|
||||
expect(input.prefixes.length).to.equal(12);
|
||||
expect(input.prefixes.length).to.equal(13);
|
||||
|
||||
expect(JSON.parse(JSON.stringify(input))).to
|
||||
.containSubset({
|
||||
@@ -269,6 +273,16 @@ describe("Composition", function() {
|
||||
"ignore": false,
|
||||
"excludeMonitors" : [],
|
||||
"includeMonitors": ["prefix-detection"]
|
||||
},
|
||||
{
|
||||
"asn": [15562],
|
||||
"description": "test fade off",
|
||||
"ignoreMorespecifics": false,
|
||||
"prefix": "165.24.225.0/24",
|
||||
"group": "default",
|
||||
"ignore": false,
|
||||
"excludeMonitors" : [],
|
||||
"includeMonitors": []
|
||||
}
|
||||
]
|
||||
});
|
||||
@@ -294,7 +308,7 @@ describe("Composition", function() {
|
||||
readLastLines
|
||||
.read(file, 1)
|
||||
.then((line) => {
|
||||
const lineMessage = line.split(" ").slice(3, 5).join(" ").trim();
|
||||
const lineMessage = line.split(" ").slice(2, 4).join(" ").trim();
|
||||
|
||||
expect(lineMessage).to
|
||||
.equal(message);
|
||||
@@ -315,7 +329,7 @@ describe("Composition", function() {
|
||||
readLastLines
|
||||
.read(file, 1)
|
||||
.then((line) => {
|
||||
const lineMessage = line.split(" ").slice(3, 5).join(" ").trim();
|
||||
const lineMessage = line.split(" ").slice(2, 5).join(" ").trim();
|
||||
expect(lineMessage).to.equal(message);
|
||||
done();
|
||||
});
|
||||
|
@@ -35,11 +35,11 @@ var chaiSubset = require('chai-subset');
|
||||
chai.use(chaiSubset);
|
||||
var expect = chai.expect;
|
||||
|
||||
var asyncTimeout = 20000;
|
||||
let asyncTimeout = 20000;
|
||||
global.EXTERNAL_VERSION_FOR_TEST = "0.0.1";
|
||||
global.EXTERNAL_CONFIG_FILE = "tests/config.test.yml";
|
||||
|
||||
|
||||
let visibilityDone = false;
|
||||
describe("Alerting", function () {
|
||||
var worker = require("../index");
|
||||
var pubSub = worker.pubSub;
|
||||
@@ -53,38 +53,41 @@ describe("Alerting", function () {
|
||||
id: '165.254.225.0/24',
|
||||
origin: 'withdrawal-detection',
|
||||
affected: 15562,
|
||||
message: 'The prefix 165.254.225.0/24 (description 1) has been withdrawn. It is no longer visible from 4 peers.'
|
||||
message: 'The prefix 165.254.225.0/24 (description 1) has been withdrawn. It is no longer visible from 4 peers'
|
||||
},
|
||||
"2a00:5884::/32": {
|
||||
id: '2a00:5884::/32',
|
||||
origin: 'withdrawal-detection',
|
||||
affected: "204092-45",
|
||||
message: 'The prefix 2a00:5884::/32 (alarig fix test) has been withdrawn. It is no longer visible from 4 peers.'
|
||||
message: 'The prefix 2a00:5884::/32 (alarig fix test) has been withdrawn. It is no longer visible from 4 peers'
|
||||
}
|
||||
};
|
||||
|
||||
pubSub.subscribe("visibility", function (type, message) {
|
||||
|
||||
message = JSON.parse(JSON.stringify(message));
|
||||
if (!visibilityDone) {
|
||||
message = JSON.parse(JSON.stringify(message));
|
||||
|
||||
const id = message.id;
|
||||
const id = message.id;
|
||||
|
||||
expect(Object.keys(expectedData).includes(id)).to.equal(true);
|
||||
expect(expectedData[id] != null).to.equal(true);
|
||||
expect(Object.keys(expectedData).includes(id)).to.equal(true);
|
||||
expect(expectedData[id] != null).to.equal(true);
|
||||
|
||||
|
||||
expect(message).to
|
||||
.containSubset(expectedData[id]);
|
||||
expect(message).to
|
||||
.containSubset(expectedData[id]);
|
||||
|
||||
expect(message).to.contain
|
||||
.keys([
|
||||
"latest",
|
||||
"earliest"
|
||||
]);
|
||||
expect(message).to.contain
|
||||
.keys([
|
||||
"latest",
|
||||
"earliest"
|
||||
]);
|
||||
|
||||
delete expectedData[id];
|
||||
if (Object.keys(expectedData).length === 0){
|
||||
done();
|
||||
delete expectedData[id];
|
||||
if (Object.keys(expectedData).length === 0) {
|
||||
done();
|
||||
visibilityDone = true;
|
||||
}
|
||||
}
|
||||
|
||||
});
|
||||
@@ -100,7 +103,7 @@ describe("Alerting", function () {
|
||||
id: '15562-4-165.254.255.0/25',
|
||||
origin: 'basic-hijack-detection',
|
||||
affected: 15562,
|
||||
message: 'A new prefix 165.254.255.0/25 is announced by AS4, and AS15562. It should be instead 165.254.255.0/24 (description 2) announced by AS15562',
|
||||
message: 'A new prefix 165.254.255.0/25 is announced by AS15562, and AS4. It should be instead 165.254.255.0/24 (description 2) announced by AS15562',
|
||||
data: [
|
||||
{
|
||||
extra: {},
|
||||
@@ -122,11 +125,11 @@ describe("Alerting", function () {
|
||||
}
|
||||
]
|
||||
},
|
||||
"208585-2a00:5884:ffff:/48": {
|
||||
id: '208585-2a00:5884:ffff:/48',
|
||||
"208585-2a00:5884:ffff::/48": {
|
||||
id: '208585-2a00:5884:ffff::/48',
|
||||
origin: 'basic-hijack-detection',
|
||||
affected: "204092-45",
|
||||
message: 'A new prefix 2a00:5884:ffff:/48 is announced by AS208585. It should be instead 2a00:5884::/32 (alarig fix test) announced by AS204092, and AS45',
|
||||
message: 'A new prefix 2a00:5884:ffff::/48 is announced by AS208585. It should be instead 2a00:5884::/32 (alarig fix test) announced by AS204092, and AS45',
|
||||
data: [
|
||||
{
|
||||
extra: {},
|
||||
@@ -139,7 +142,7 @@ describe("Alerting", function () {
|
||||
},
|
||||
matchedMessage: {
|
||||
type: "announcement",
|
||||
prefix: "2a00:5884:ffff:/48",
|
||||
prefix: "2a00:5884:ffff::/48",
|
||||
peer: "124.0.0.3",
|
||||
path: [1, 2, 3, 208585],
|
||||
originAS: [208585],
|
||||
@@ -177,7 +180,7 @@ describe("Alerting", function () {
|
||||
|
||||
};
|
||||
|
||||
pubSub.subscribe("hijack", function (type, message) {
|
||||
pubSub.subscribe("hijack", function(type, message){
|
||||
|
||||
message = JSON.parse(JSON.stringify(message));
|
||||
|
||||
@@ -186,7 +189,6 @@ describe("Alerting", function () {
|
||||
expect(Object.keys(expectedData).includes(id)).to.equal(true);
|
||||
expect(expectedData[id] != null).to.equal(true);
|
||||
|
||||
|
||||
expect(message).to
|
||||
.containSubset(expectedData[id]);
|
||||
|
||||
@@ -214,7 +216,7 @@ describe("Alerting", function () {
|
||||
id: '1234-175.254.205.0/25',
|
||||
origin: 'prefix-detection',
|
||||
affected: 1234,
|
||||
message: 'Possible change of configuration. A new prefix 175.254.205.0/25 is announced by AS1234. It is a more specific of 175.254.205.0/24 (include exclude test).',
|
||||
message: 'Possible change of configuration. A new prefix 175.254.205.0/25 is announced by AS1234. It is a more specific of 175.254.205.0/24 (include exclude test)',
|
||||
data: [
|
||||
{
|
||||
extra: {},
|
||||
@@ -242,7 +244,7 @@ describe("Alerting", function () {
|
||||
id: '1234-170.254.205.0/25',
|
||||
origin: 'prefix-detection',
|
||||
affected: 1234,
|
||||
message: 'Possible change of configuration. A new prefix 170.254.205.0/25 is announced by AS1234. It is a more specific of 170.254.205.0/24 (include exclude test).',
|
||||
message: 'Possible change of configuration. A new prefix 170.254.205.0/25 is announced by AS1234. It is a more specific of 170.254.205.0/24 (include exclude test)',
|
||||
data: [
|
||||
{
|
||||
extra: {},
|
||||
@@ -271,7 +273,7 @@ describe("Alerting", function () {
|
||||
id: '15562-165.254.255.0/25',
|
||||
origin: 'prefix-detection',
|
||||
affected: 15562,
|
||||
message: 'Possible change of configuration. A new prefix 165.254.255.0/25 is announced by AS15562. It is a more specific of 165.254.255.0/24 (description 2).',
|
||||
message: 'Possible change of configuration. A new prefix 165.254.255.0/25 is announced by AS15562. It is a more specific of 165.254.255.0/24 (description 2)',
|
||||
data: [
|
||||
{
|
||||
extra: {},
|
||||
@@ -293,11 +295,11 @@ describe("Alerting", function () {
|
||||
}
|
||||
]
|
||||
},
|
||||
"204092-2a00:5884:ffff:/48": {
|
||||
id: '204092-2a00:5884:ffff:/48',
|
||||
"204092-2a00:5884:ffff::/48": {
|
||||
id: '204092-2a00:5884:ffff::/48',
|
||||
origin: 'prefix-detection',
|
||||
affected: "204092-45",
|
||||
message: 'Possible change of configuration. A new prefix 2a00:5884:ffff:/48 is announced by AS204092. It is a more specific of 2a00:5884::/32 (alarig fix test).',
|
||||
message: 'Possible change of configuration. A new prefix 2a00:5884:ffff::/48 is announced by AS204092. It is a more specific of 2a00:5884::/32 (alarig fix test)',
|
||||
data: [
|
||||
{
|
||||
extra: {},
|
||||
@@ -310,7 +312,7 @@ describe("Alerting", function () {
|
||||
},
|
||||
matchedMessage: {
|
||||
type: 'announcement',
|
||||
prefix: '2a00:5884:ffff:/48',
|
||||
prefix: '2a00:5884:ffff::/48',
|
||||
peer: '124.0.0.3',
|
||||
path: [ 1, 2, 3, 204092 ],
|
||||
originAS: [204092],
|
||||
@@ -360,7 +362,7 @@ describe("Alerting", function () {
|
||||
id: '98.5.4.3/22',
|
||||
origin: 'path-matching',
|
||||
affected: "98.5.4.3/22",
|
||||
message: 'Matched test description on prefix 98.5.4.3/22 (including length violation) 1 times.',
|
||||
message: 'Matched test description on prefix 98.5.4.3/22 (including length violation) 1 times',
|
||||
data: [
|
||||
{
|
||||
extra: {
|
||||
@@ -395,7 +397,7 @@ describe("Alerting", function () {
|
||||
id: '99.5.4.3/22',
|
||||
origin: 'path-matching',
|
||||
affected: "99.5.4.3/22",
|
||||
message: 'Matched test description on prefix 99.5.4.3/22 1 times.',
|
||||
message: 'Matched test description on prefix 99.5.4.3/22 1 times',
|
||||
data: [
|
||||
{
|
||||
extra: {
|
||||
@@ -454,8 +456,6 @@ describe("Alerting", function () {
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
it("asn monitoring reporting", function (done) {
|
||||
|
||||
pubSub.publish("test-type", "misconfiguration");
|
||||
@@ -490,11 +490,27 @@ describe("Alerting", function () {
|
||||
if (Object.keys(expectedData).length === 0){
|
||||
done();
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
}).timeout(asyncTimeout);
|
||||
|
||||
|
||||
it("fading alerting", function (done) {
|
||||
|
||||
pubSub.publish("test-type", "fade-off");
|
||||
|
||||
let notReceived = true;
|
||||
|
||||
setTimeout(() => {
|
||||
if (notReceived){
|
||||
done();
|
||||
}
|
||||
}, 15000);
|
||||
|
||||
pubSub.subscribe("visibility", function (type, message) {
|
||||
notReceived = false;
|
||||
});
|
||||
|
||||
}).timeout(asyncTimeout);
|
||||
|
||||
});
|
||||
|
@@ -48,7 +48,7 @@ describe("Uptime Monitor", function() {
|
||||
var config = worker.config;
|
||||
|
||||
it("uptime config", function () {
|
||||
expect(config.uptimeMonitors[0]).to
|
||||
expect(config.processMonitors[0]).to
|
||||
.containSubset({
|
||||
params: {
|
||||
useStatusCodes: true,
|
||||
@@ -60,7 +60,7 @@ describe("Uptime Monitor", function() {
|
||||
|
||||
it("API format and header", function (done) {
|
||||
|
||||
const port = config.uptimeMonitors[0].params.port;
|
||||
const port = config.processMonitors[0].params.port;
|
||||
|
||||
axios({
|
||||
method: 'get',
|
||||
|
@@ -45,9 +45,14 @@ reports:
|
||||
- visibility
|
||||
- path
|
||||
- misconfiguration
|
||||
params:
|
||||
persistAlertData: false
|
||||
alertDataDirectory: alertdata/
|
||||
|
||||
notificationIntervalSeconds: 1800 # Repeat the same alert (which keeps being triggered) after x seconds
|
||||
alertOnlyOnce: false
|
||||
fadeOffSeconds: 10
|
||||
checkFadeOffGroupsSeconds: 2
|
||||
|
||||
# The file containing the monitored prefixes. Please see monitored_prefixes_test.yml for an example
|
||||
# This is an array (use new lines and dashes!)
|
||||
@@ -56,18 +61,21 @@ monitoredPrefixesFiles:
|
||||
|
||||
logging:
|
||||
directory: logs
|
||||
logRotatePattern: YYYY-MM-DD # Whenever the pattern changes, a new file is created and the old one rotated
|
||||
zippedArchive: true
|
||||
maxSize: 20m
|
||||
maxFiles: 14d
|
||||
logRotatePattern: YYYY-MM-DD # Whenever the pattern changes, a new file is created and the old one rotated
|
||||
backlogSize: 1
|
||||
maxRetainedFiles: 10
|
||||
maxFileSizeMB: 15
|
||||
compressOnRotation: true
|
||||
|
||||
checkForUpdatesAtBoot: true
|
||||
|
||||
uptimeMonitors:
|
||||
processMonitors:
|
||||
- file: uptimeApi
|
||||
params:
|
||||
useStatusCodes: true
|
||||
host: null
|
||||
port: 8011
|
||||
|
||||
pidFile: bgpalerter.pid
|
||||
pidFile: bgpalerter.pid
|
||||
multiProcess: false
|
||||
maxMessagesPerSecond: 6000
|
@@ -84,6 +84,11 @@
|
||||
minLength: 2
|
||||
matchDescription: test description
|
||||
|
||||
165.24.225.0/24:
|
||||
description: test fade off
|
||||
asn: 15562
|
||||
ignoreMorespecifics: false
|
||||
|
||||
options:
|
||||
monitorASns:
|
||||
2914:
|
||||
|
Reference in New Issue
Block a user