mirror of
https://github.com/checktheroads/hyperglass
synced 2024-05-11 05:55:08 +00:00
complete overhaul, mostly to make pylint happy
This commit is contained in:
541
.pylintrc
Normal file
541
.pylintrc
Normal file
@@ -0,0 +1,541 @@
|
||||
# Hyperglass PyLint: Notes
|
||||
#
|
||||
# This is a mostly default pylintrc file, generated by PyLint. Only cosmetic parameters have been
|
||||
# changed, mostly naming-style standards.
|
||||
#
|
||||
# Additionally, the "cyclic-import" and "logging-fstring-interpolation" messages have been disabled.
|
||||
#
|
||||
# "cyclic-import" was disabled due to the structure of the project; almost all modules rely on or
|
||||
# pass data back and forth between other modules.
|
||||
#
|
||||
# "logging-fstring-interpolation" was disabled due to me thinking it's stupid. I find fstrings
|
||||
# extremely valuable, and while I could get around this default setting by setting variables for
|
||||
# each log message, e.g.:
|
||||
# log_message = f"Error: {var1}, {var2}, {var3}"
|
||||
# logger.error(log_message)
|
||||
# I find this to be needlessly obtuse, and therefore log fstrings directly:
|
||||
# logger.error(f"Error: {var1}, {var2}, {var3}")
|
||||
# Perhaps this is "incorrect", but it works well and is more elegant, in my uneducated opinion.
|
||||
|
||||
|
||||
[MASTER]
|
||||
|
||||
# A comma-separated list of package or module names from where C extensions may
|
||||
# be loaded. Extensions are loading into the active Python interpreter and may
|
||||
# run arbitrary code.
|
||||
extension-pkg-whitelist=
|
||||
|
||||
# Add files or directories to the blacklist. They should be base names, not
|
||||
# paths.
|
||||
ignore=CVS
|
||||
|
||||
# Add files or directories matching the regex patterns to the blacklist. The
|
||||
# regex matches against base names, not paths.
|
||||
ignore-patterns=
|
||||
|
||||
# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
|
||||
# number of processors available to use.
|
||||
jobs=1
|
||||
|
||||
# Control the amount of potential inferred values when inferring a single
|
||||
# object. This can help the performance when dealing with large functions or
|
||||
# complex, nested conditions.
|
||||
limit-inference-results=100
|
||||
|
||||
# List of plugins (as comma separated values of python modules names) to load,
|
||||
# usually to register additional checkers.
|
||||
load-plugins=
|
||||
|
||||
# Pickle collected data for later comparisons.
|
||||
persistent=yes
|
||||
|
||||
# When enabled, pylint would attempt to guess common misconfiguration and emit
|
||||
# user-friendly hints instead of false-positive error messages.
|
||||
suggestion-mode=yes
|
||||
|
||||
# Allow loading of arbitrary C extensions. Extensions are imported into the
|
||||
# active Python interpreter and may run arbitrary code.
|
||||
unsafe-load-any-extension=no
|
||||
|
||||
|
||||
[MESSAGES CONTROL]
|
||||
|
||||
# Only show warnings with the listed confidence levels. Leave empty to show
|
||||
# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED.
|
||||
confidence=
|
||||
|
||||
# Disable the message, report, category or checker with the given id(s). You
|
||||
# can either give multiple identifiers separated by comma (,) or put this
|
||||
# option multiple times (only on the command line, not in the configuration
|
||||
# file where it should appear only once). You can also use "--disable=all" to
|
||||
# disable everything first and then reenable specific checks. For example, if
|
||||
# you want to run only the similarities checker, you can use "--disable=all
|
||||
# --enable=similarities". If you want to run only the classes checker, but have
|
||||
# no Warning level messages displayed, use "--disable=all --enable=classes
|
||||
# --disable=W".
|
||||
disable=print-statement,
|
||||
parameter-unpacking,
|
||||
unpacking-in-except,
|
||||
old-raise-syntax,
|
||||
backtick,
|
||||
long-suffix,
|
||||
old-ne-operator,
|
||||
old-octal-literal,
|
||||
import-star-module-level,
|
||||
non-ascii-bytes-literal,
|
||||
raw-checker-failed,
|
||||
bad-inline-option,
|
||||
locally-disabled,
|
||||
file-ignored,
|
||||
suppressed-message,
|
||||
useless-suppression,
|
||||
deprecated-pragma,
|
||||
use-symbolic-message-instead,
|
||||
apply-builtin,
|
||||
basestring-builtin,
|
||||
buffer-builtin,
|
||||
cmp-builtin,
|
||||
coerce-builtin,
|
||||
execfile-builtin,
|
||||
file-builtin,
|
||||
long-builtin,
|
||||
raw_input-builtin,
|
||||
reduce-builtin,
|
||||
standarderror-builtin,
|
||||
unicode-builtin,
|
||||
xrange-builtin,
|
||||
coerce-method,
|
||||
delslice-method,
|
||||
getslice-method,
|
||||
setslice-method,
|
||||
no-absolute-import,
|
||||
old-division,
|
||||
dict-iter-method,
|
||||
dict-view-method,
|
||||
next-method-called,
|
||||
metaclass-assignment,
|
||||
indexing-exception,
|
||||
raising-string,
|
||||
reload-builtin,
|
||||
oct-method,
|
||||
hex-method,
|
||||
nonzero-method,
|
||||
cmp-method,
|
||||
input-builtin,
|
||||
round-builtin,
|
||||
intern-builtin,
|
||||
unichr-builtin,
|
||||
map-builtin-not-iterating,
|
||||
zip-builtin-not-iterating,
|
||||
range-builtin-not-iterating,
|
||||
filter-builtin-not-iterating,
|
||||
using-cmp-argument,
|
||||
eq-without-hash,
|
||||
div-method,
|
||||
idiv-method,
|
||||
rdiv-method,
|
||||
exception-message-attribute,
|
||||
invalid-str-codec,
|
||||
sys-max-int,
|
||||
bad-python3-import,
|
||||
deprecated-string-function,
|
||||
deprecated-str-translate-call,
|
||||
deprecated-itertools-function,
|
||||
deprecated-types-field,
|
||||
next-method-defined,
|
||||
dict-items-not-iterating,
|
||||
dict-keys-not-iterating,
|
||||
dict-values-not-iterating,
|
||||
deprecated-operator-function,
|
||||
deprecated-urllib-function,
|
||||
xreadlines-attribute,
|
||||
deprecated-sys-function,
|
||||
exception-escape,
|
||||
comprehension-escape,
|
||||
bad-continuation,
|
||||
cyclic-import,
|
||||
logging-fstring-interpolation
|
||||
|
||||
# Enable the message, report, category or checker with the given id(s). You can
|
||||
# either give multiple identifier separated by comma (,) or put this option
|
||||
# multiple time (only on the command line, not in the configuration file where
|
||||
# it should appear only once). See also the "--disable" option for examples.
|
||||
enable=c-extension-no-member
|
||||
|
||||
|
||||
[REPORTS]
|
||||
|
||||
# Python expression which should return a note less than 10 (10 is the highest
|
||||
# note). You have access to the variables errors warning, statement which
|
||||
# respectively contain the number of errors / warnings messages and the total
|
||||
# number of statements analyzed. This is used by the global evaluation report
|
||||
# (RP0004).
|
||||
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
|
||||
|
||||
# Set the output format. Available formats are text, parseable, colorized, json
|
||||
# and msvs (visual studio). You can also give a reporter class, e.g.
|
||||
# mypackage.mymodule.MyReporterClass.
|
||||
output-format=text
|
||||
|
||||
# Tells whether to display a full report or only the messages.
|
||||
reports=no
|
||||
|
||||
# Activate the evaluation score.
|
||||
score=yes
|
||||
|
||||
|
||||
[REFACTORING]
|
||||
|
||||
# Maximum number of nested blocks for function / method body
|
||||
max-nested-blocks=5
|
||||
|
||||
# Complete name of functions that never returns. When checking for
|
||||
# inconsistent-return-statements if a never returning function is called then
|
||||
# it will be considered as an explicit return statement and no message will be
|
||||
# printed.
|
||||
never-returning-functions=sys.exit
|
||||
|
||||
|
||||
[LOGGING]
|
||||
|
||||
# Format style used to check logging format string. `old` means using %
|
||||
# formatting, while `new` is for `{}` formatting.
|
||||
logging-format-style=new
|
||||
|
||||
# Logging modules to check that the string format arguments are in logging
|
||||
# function parameter format.
|
||||
logging-modules=logging
|
||||
|
||||
|
||||
[SPELLING]
|
||||
|
||||
# Limits count of emitted suggestions for spelling mistakes.
|
||||
max-spelling-suggestions=4
|
||||
|
||||
# Spelling dictionary name. Available dictionaries: none. To make it working
|
||||
# install python-enchant package..
|
||||
spelling-dict=
|
||||
|
||||
# List of comma separated words that should not be checked.
|
||||
spelling-ignore-words=
|
||||
|
||||
# A path to a file that contains private dictionary; one word per line.
|
||||
spelling-private-dict-file=
|
||||
|
||||
# Tells whether to store unknown words to indicated private dictionary in
|
||||
# --spelling-private-dict-file option instead of raising a message.
|
||||
spelling-store-unknown-words=no
|
||||
|
||||
|
||||
[MISCELLANEOUS]
|
||||
|
||||
# List of note tags to take in consideration, separated by a comma.
|
||||
notes=FIXME,
|
||||
XXX,
|
||||
TODO
|
||||
|
||||
|
||||
[TYPECHECK]
|
||||
|
||||
# List of decorators that produce context managers, such as
|
||||
# contextlib.contextmanager. Add to this list to register other decorators that
|
||||
# produce valid context managers.
|
||||
contextmanager-decorators=contextlib.contextmanager
|
||||
|
||||
# List of members which are set dynamically and missed by pylint inference
|
||||
# system, and so shouldn't trigger E1101 when accessed. Python regular
|
||||
# expressions are accepted.
|
||||
generated-members=
|
||||
|
||||
# Tells whether missing members accessed in mixin class should be ignored. A
|
||||
# mixin class is detected if its name ends with "mixin" (case insensitive).
|
||||
ignore-mixin-members=yes
|
||||
|
||||
# Tells whether to warn about missing members when the owner of the attribute
|
||||
# is inferred to be None.
|
||||
ignore-none=yes
|
||||
|
||||
# This flag controls whether pylint should warn about no-member and similar
|
||||
# checks whenever an opaque object is returned when inferring. The inference
|
||||
# can return multiple potential results while evaluating a Python object, but
|
||||
# some branches might not be evaluated, which results in partial inference. In
|
||||
# that case, it might be useful to still emit no-member and other checks for
|
||||
# the rest of the inferred objects.
|
||||
ignore-on-opaque-inference=yes
|
||||
|
||||
# List of class names for which member attributes should not be checked (useful
|
||||
# for classes with dynamically set attributes). This supports the use of
|
||||
# qualified names.
|
||||
ignored-classes=optparse.Values,thread._local,_thread._local
|
||||
|
||||
# List of module names for which member attributes should not be checked
|
||||
# (useful for modules/projects where namespaces are manipulated during runtime
|
||||
# and thus existing member attributes cannot be deduced by static analysis. It
|
||||
# supports qualified module names, as well as Unix pattern matching.
|
||||
ignored-modules=
|
||||
|
||||
# Show a hint with possible names when a member name was not found. The aspect
|
||||
# of finding the hint is based on edit distance.
|
||||
missing-member-hint=yes
|
||||
|
||||
# The minimum edit distance a name should have in order to be considered a
|
||||
# similar match for a missing member name.
|
||||
missing-member-hint-distance=1
|
||||
|
||||
# The total number of similar names that should be taken in consideration when
|
||||
# showing a hint for a missing member.
|
||||
missing-member-max-choices=1
|
||||
|
||||
|
||||
[VARIABLES]
|
||||
|
||||
# List of additional names supposed to be defined in builtins. Remember that
|
||||
# you should avoid defining new builtins when possible.
|
||||
additional-builtins=
|
||||
|
||||
# Tells whether unused global variables should be treated as a violation.
|
||||
allow-global-unused-variables=yes
|
||||
|
||||
# List of strings which can identify a callback function by name. A callback
|
||||
# name must start or end with one of those strings.
|
||||
callbacks=cb_,
|
||||
_cb
|
||||
|
||||
# A regular expression matching the name of dummy variables (i.e. expected to
|
||||
# not be used).
|
||||
dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
|
||||
|
||||
# Argument names that match this expression will be ignored. Default to name
|
||||
# with leading underscore.
|
||||
ignored-argument-names=_.*|^ignored_|^unused_
|
||||
|
||||
# Tells whether we should check for unused import in __init__ files.
|
||||
init-import=no
|
||||
|
||||
# List of qualified module names which can have objects that can redefine
|
||||
# builtins.
|
||||
redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io
|
||||
|
||||
|
||||
[FORMAT]
|
||||
|
||||
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
|
||||
expected-line-ending-format=
|
||||
|
||||
# Regexp for a line that is allowed to be longer than the limit.
|
||||
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
|
||||
|
||||
# Number of spaces of indent required inside a hanging or continued line.
|
||||
indent-after-paren=4
|
||||
|
||||
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
|
||||
# tab).
|
||||
indent-string=' '
|
||||
|
||||
# Maximum number of characters on a single line.
|
||||
max-line-length=100
|
||||
|
||||
# Maximum number of lines in a module.
|
||||
max-module-lines=1000
|
||||
|
||||
# List of optional constructs for which whitespace checking is disabled. `dict-
|
||||
# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}.
|
||||
# `trailing-comma` allows a space between comma and closing bracket: (a, ).
|
||||
# `empty-line` allows space-only lines.
|
||||
no-space-check=trailing-comma,
|
||||
dict-separator
|
||||
|
||||
# Allow the body of a class to be on the same line as the declaration if body
|
||||
# contains single statement.
|
||||
single-line-class-stmt=no
|
||||
|
||||
# Allow the body of an if to be on the same line as the test if there is no
|
||||
# else.
|
||||
single-line-if-stmt=no
|
||||
|
||||
|
||||
[SIMILARITIES]
|
||||
|
||||
# Ignore comments when computing similarities.
|
||||
ignore-comments=yes
|
||||
|
||||
# Ignore docstrings when computing similarities.
|
||||
ignore-docstrings=yes
|
||||
|
||||
# Ignore imports when computing similarities.
|
||||
ignore-imports=no
|
||||
|
||||
# Minimum lines number of a similarity.
|
||||
min-similarity-lines=4
|
||||
|
||||
|
||||
[BASIC]
|
||||
|
||||
# Naming style matching correct argument names.
|
||||
argument-naming-style=any
|
||||
|
||||
# Naming style matching correct attribute names.
|
||||
attr-naming-style=any
|
||||
|
||||
# Bad variable names which should always be refused, separated by a comma.
|
||||
bad-names=foo,
|
||||
bar,
|
||||
baz,
|
||||
toto,
|
||||
tutu,
|
||||
tata
|
||||
|
||||
# Naming style matching correct class attribute names.
|
||||
class-attribute-naming-style=snake_case
|
||||
|
||||
# Naming style matching correct class names.
|
||||
class-naming-style=PascalCase
|
||||
|
||||
# Naming style matching correct constant names.
|
||||
const-naming-style=snake_case
|
||||
|
||||
# Minimum line length for functions/classes that require docstrings, shorter
|
||||
# ones are exempt.
|
||||
docstring-min-length=-1
|
||||
|
||||
# Naming style matching correct function names.
|
||||
function-naming-style=snake_case
|
||||
|
||||
# Good variable names which should always be accepted, separated by a comma.
|
||||
good-names=i,
|
||||
j,
|
||||
k,
|
||||
ex,
|
||||
Run,
|
||||
_
|
||||
|
||||
# Include a hint for the correct naming format with invalid-name.
|
||||
include-naming-hint=yes
|
||||
|
||||
# Naming style matching correct inline iteration names.
|
||||
inlinevar-naming-style=any
|
||||
|
||||
# Naming style matching correct method names.
|
||||
method-naming-style=snake_case
|
||||
|
||||
# Naming style matching correct module names.
|
||||
module-naming-style=snake_case
|
||||
|
||||
# Colon-delimited sets of names that determine each other's naming style when
|
||||
# the name regexes allow several styles.
|
||||
name-group=
|
||||
|
||||
# Regular expression which should only match function or class names that do
|
||||
# not require a docstring.
|
||||
no-docstring-rgx=^_
|
||||
|
||||
# List of decorators that produce properties, such as abc.abstractproperty. Add
|
||||
# to this list to register other decorators that produce valid properties.
|
||||
# These decorators are taken in consideration only for invalid-name.
|
||||
property-classes=abc.abstractproperty
|
||||
|
||||
# Naming style matching correct variable names.
|
||||
variable-naming-style=snake_case
|
||||
|
||||
[STRING]
|
||||
|
||||
# This flag controls whether the implicit-str-concat-in-sequence should
|
||||
# generate a warning on implicit string concatenation in sequences defined over
|
||||
# several lines.
|
||||
check-str-concat-over-line-jumps=no
|
||||
|
||||
|
||||
[IMPORTS]
|
||||
|
||||
# Allow wildcard imports from modules that define __all__.
|
||||
allow-wildcard-with-all=no
|
||||
|
||||
# Analyse import fallback blocks. This can be used to support both Python 2 and
|
||||
# 3 compatible code, which means that the block might have code that exists
|
||||
# only in one or another interpreter, leading to false positives when analysed.
|
||||
analyse-fallback-blocks=no
|
||||
|
||||
# Deprecated modules which should not be used, separated by a comma.
|
||||
deprecated-modules=optparse,tkinter.tix
|
||||
|
||||
# Create a graph of external dependencies in the given file (report RP0402 must
|
||||
# not be disabled).
|
||||
ext-import-graph=
|
||||
|
||||
# Create a graph of every (i.e. internal and external) dependencies in the
|
||||
# given file (report RP0402 must not be disabled).
|
||||
import-graph=
|
||||
|
||||
# Create a graph of internal dependencies in the given file (report RP0402 must
|
||||
# not be disabled).
|
||||
int-import-graph=
|
||||
|
||||
# Force import order to recognize a module as part of the standard
|
||||
# compatibility libraries.
|
||||
known-standard-library=
|
||||
|
||||
# Force import order to recognize a module as part of a third party library.
|
||||
known-third-party=enchant
|
||||
|
||||
|
||||
[CLASSES]
|
||||
|
||||
# List of method names used to declare (i.e. assign) instance attributes.
|
||||
defining-attr-methods=__init__,
|
||||
__new__,
|
||||
setUp
|
||||
|
||||
# List of member names, which should be excluded from the protected access
|
||||
# warning.
|
||||
exclude-protected=_asdict,
|
||||
_fields,
|
||||
_replace,
|
||||
_source,
|
||||
_make
|
||||
|
||||
# List of valid names for the first argument in a class method.
|
||||
valid-classmethod-first-arg=cls
|
||||
|
||||
# List of valid names for the first argument in a metaclass class method.
|
||||
valid-metaclass-classmethod-first-arg=cls
|
||||
|
||||
|
||||
[DESIGN]
|
||||
|
||||
# Maximum number of arguments for function / method.
|
||||
max-args=5
|
||||
|
||||
# Maximum number of attributes for a class (see R0902).
|
||||
max-attributes=7
|
||||
|
||||
# Maximum number of boolean expressions in an if statement.
|
||||
max-bool-expr=5
|
||||
|
||||
# Maximum number of branch for function / method body.
|
||||
max-branches=12
|
||||
|
||||
# Maximum number of locals for function / method body.
|
||||
max-locals=15
|
||||
|
||||
# Maximum number of parents for a class (see R0901).
|
||||
max-parents=7
|
||||
|
||||
# Maximum number of public methods for a class (see R0904).
|
||||
max-public-methods=20
|
||||
|
||||
# Maximum number of return / yield for function / method body.
|
||||
max-returns=6
|
||||
|
||||
# Maximum number of statements in function / method body.
|
||||
max-statements=50
|
||||
|
||||
# Minimum number of public methods for a class (see R0903).
|
||||
min-public-methods=2
|
||||
|
||||
|
||||
[EXCEPTIONS]
|
||||
|
||||
# Exceptions that will emit a warning when being caught. Defaults to
|
||||
# "BaseException, Exception".
|
||||
overgeneral-exceptions=BaseException,
|
||||
Exception
|
@@ -1,3 +1,39 @@
|
||||
"""
|
||||
https://github.com/checktheroads/hyperglass
|
||||
|
||||
The Clear BSD License
|
||||
|
||||
Copyright (c) 2019 Matthew Love
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted (subject to the limitations in the disclaimer
|
||||
below) provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice,
|
||||
this list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
* Neither the name of the copyright holder nor the names of its
|
||||
contributors may be used to endorse or promote products derived from this
|
||||
software without specific prior written permission.
|
||||
|
||||
NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY
|
||||
THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
|
||||
CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
|
||||
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
|
||||
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
|
||||
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
|
||||
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
|
||||
IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
POSSIBILITY OF SUCH DAMAGE.
|
||||
"""
|
||||
from hyperglass import command
|
||||
from hyperglass import configuration
|
||||
from hyperglass import render
|
||||
|
1
hyperglass/command/.gitignore
vendored
Normal file
1
hyperglass/command/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
parse.py
|
@@ -1,4 +1,8 @@
|
||||
#!/usr/bin/env python3
|
||||
# https://github.com/checktheroads/hyperglass
|
||||
"""
|
||||
Constructs SSH commands or API call parameters based on front end input, executes the
|
||||
commands/calls, returns the output to front end
|
||||
"""
|
||||
from hyperglass.command import execute
|
||||
from hyperglass.command import construct
|
||||
from hyperglass.command import parse
|
||||
from hyperglass.command import validate
|
||||
|
@@ -1,195 +1,123 @@
|
||||
# https://github.com/checktheroads/hyperglass
|
||||
"""
|
||||
Accepts filtered & validated input from execute.py, constructs SSH command for Netmiko library or \
|
||||
API call parameters for hyperglass-frr
|
||||
"""
|
||||
# Module Imports
|
||||
import re
|
||||
import sys
|
||||
import json
|
||||
import toml
|
||||
from logzero import logger
|
||||
from netaddr import IPNetwork, IPAddress, IPSet
|
||||
import inspect
|
||||
from netaddr import IPNetwork, IPAddress # pylint: disable=unused-import
|
||||
|
||||
# Dear PyLint, the netaddr library is a special snowflake. You might not see `IPAddress` get used, \
|
||||
# but when you use something like `IPNetwork("192.0.2.1/24").ip`, the returned value is \
|
||||
# IPAddress("192.0.2.1"), so I do actually need this import. <3, -ML
|
||||
|
||||
# Project Imports
|
||||
from hyperglass import configuration
|
||||
|
||||
# Configuration Imports
|
||||
code = configuration.codes()
|
||||
g = configuration.general()
|
||||
codes = configuration.codes()
|
||||
config = configuration.general()
|
||||
|
||||
|
||||
def frr(cmd, ipprefix, device):
|
||||
"""Validates input and constructs API call to FRRouting Stack via hyperglass-frr API"""
|
||||
d_address = device["address"]
|
||||
d_src_addr_ipv4 = device["src_addr_ipv4"]
|
||||
d_src_addr_ipv6 = device["src_addr_ipv6"]
|
||||
d_location = device["location"]
|
||||
d_name = device["name"]
|
||||
d_port = device["port"]
|
||||
d_type = device["type"]
|
||||
def current_function():
|
||||
"""Returns name of current function"""
|
||||
this_function = inspect.stack()[1][3]
|
||||
return this_function
|
||||
|
||||
# BGP Community Query
|
||||
if cmd in ["bgp_community"]:
|
||||
# Extended Communities, new-format
|
||||
query = json.dumps({"cmd": cmd, "afi": "dual", "target": ipprefix})
|
||||
if re.match("^([0-9]{0,5})\:([0-9]{1,5})$", ipprefix):
|
||||
msg = f"{ipprefix} matched new-format community."
|
||||
return (msg, code.success, d_address, query)
|
||||
# Extended Communities, 32 bit format
|
||||
elif re.match("^[0-9]{1,10}$", ipprefix):
|
||||
msg = f"{ipprefix} matched 32 bit community."
|
||||
return (msg, code.success, d_address, query)
|
||||
# RFC 8092 Large Community Support
|
||||
elif re.match("^([0-9]{1,10})\:([0-9]{1,10})\:[0-9]{1,10}$", ipprefix):
|
||||
msg = f"{ipprefix} matched large community."
|
||||
return (msg, code.success, d_address, query)
|
||||
else:
|
||||
msg = g.msg_error_invaliddual.format(i=ipprefix, qt="BGP Community")
|
||||
logger.error(f"{msg}, {code.danger}, {d_name}, {query}")
|
||||
return (msg, code.danger, d_address, query)
|
||||
# BGP AS_PATH Query
|
||||
elif cmd in ["bgp_aspath"]:
|
||||
if re.match(".*", ipprefix):
|
||||
query = json.dumps({"cmd": cmd, "afi": "dual", "target": ipprefix})
|
||||
msg = f"{ipprefix} matched AS_PATH regex."
|
||||
return (msg, code.success, d_address, query)
|
||||
else:
|
||||
msg = g.msg_error_invaliddual.format(i=ipprefix, qt="AS Path")
|
||||
logger.error(f"{msg}, {code.danger}, {d_name}, {cmd}, {ipprefix}")
|
||||
return (msg, code.danger, d_address, query)
|
||||
# BGP Route Query
|
||||
elif cmd in ["bgp_route"]:
|
||||
try:
|
||||
# Use netaddr library to verify if input is a valid IPv4 address or prefix
|
||||
if IPNetwork(ipprefix).ip.version == 4:
|
||||
query = json.dumps({"cmd": cmd, "afi": "ipv4", "target": ipprefix})
|
||||
msg = f"{ipprefix} is a valid IPv4 Adddress."
|
||||
return (msg, code.success, d_address, query)
|
||||
# Use netaddr library to verify if input is a valid IPv6 address or prefix
|
||||
elif IPNetwork(ipprefix).ip.version == 6:
|
||||
query = json.dumps({"cmd": cmd, "afi": "ipv6", "target": ipprefix})
|
||||
msg = f"{ipprefix} is a valid IPv6 Adddress."
|
||||
return (msg, code.success, d_address, query)
|
||||
# Exception from netaddr library will return a user-facing error
|
||||
except:
|
||||
msg = g.msg_error_invalidip.format(i=ipprefix)
|
||||
logger.error(f"{msg}, {code.danger}, {d_name}, {query}")
|
||||
return (msg, code.danger, d_address, query)
|
||||
# Ping/Traceroute
|
||||
elif cmd in ["ping", "traceroute"]:
|
||||
try:
|
||||
if IPNetwork(ipprefix).ip.version == 4:
|
||||
|
||||
class Construct:
|
||||
"""Constructor for FRRouting API"""
|
||||
|
||||
def __init__(self, device):
|
||||
self.device = device
|
||||
self.d_address = self.device["address"]
|
||||
self.d_src_addr_ipv4 = self.device["src_addr_ipv4"]
|
||||
self.d_src_addr_ipv6 = self.device["src_addr_ipv6"]
|
||||
self.d_name = self.device["name"]
|
||||
self.d_type = self.device["type"]
|
||||
self.command = configuration.command(self.d_type)
|
||||
|
||||
def get_src(self, ver):
|
||||
"""Returns source IP based on IP version."""
|
||||
src = None
|
||||
if ver == 4:
|
||||
src = self.d_src_addr_ipv4
|
||||
if ver == 6:
|
||||
src = self.d_src_addr_ipv6
|
||||
return src
|
||||
|
||||
def ping(self, transport, target):
|
||||
"""Constructs ping query parameters from pre-validated input"""
|
||||
cmd = current_function()
|
||||
query = None
|
||||
ip_version = IPNetwork(target).ip.version
|
||||
afi = f"ipv{ip_version}"
|
||||
source = self.get_src(ip_version)
|
||||
if transport == "rest":
|
||||
query = json.dumps(
|
||||
{
|
||||
"cmd": cmd,
|
||||
"afi": "ipv4",
|
||||
"source": d_src_addr_ipv4,
|
||||
"target": ipprefix,
|
||||
}
|
||||
{"cmd": cmd, "afi": afi, "source": source, "target": target}
|
||||
)
|
||||
msg = f"{ipprefix} is a valid IPv4 Adddress."
|
||||
return (msg, code.success, d_address, query)
|
||||
elif IPNetwork(ipprefix).ip.version == 6:
|
||||
if transport == "scrape":
|
||||
conf_command = self.command[afi][cmd]
|
||||
fmt_command = conf_command.format(target=target, source=source)
|
||||
query = (self.d_address, self.d_type, fmt_command)
|
||||
return query
|
||||
|
||||
def traceroute(self, transport, target):
|
||||
"""Constructs traceroute query parameters from pre-validated input"""
|
||||
cmd = current_function()
|
||||
query = None
|
||||
ip_version = IPNetwork(target).ip.version
|
||||
afi = f"ipv{ip_version}"
|
||||
source = self.get_src(ip_version)
|
||||
if transport == "rest":
|
||||
query = json.dumps(
|
||||
{
|
||||
"cmd": cmd,
|
||||
"afi": "ipv6",
|
||||
"source": d_src_addr_ipv6,
|
||||
"target": ipprefix,
|
||||
}
|
||||
{"cmd": cmd, "afi": afi, "source": source, "target": target}
|
||||
)
|
||||
msg = f"{ipprefix} is a valid IPv6 Adddress."
|
||||
return (msg, code.success, d_address, query)
|
||||
except:
|
||||
msg = g.msg_error_invalidip.format(i=ipprefix)
|
||||
logger.error(f"{msg}, {code.danger}, {d_name}, {query}")
|
||||
return (msg, code.danger, d_name, query)
|
||||
else:
|
||||
msg = f"Command {cmd} not found."
|
||||
logger.error(f"{msg}, {code.danger}, {d_name}, {query}")
|
||||
return (msg, code.danger, d_name, query)
|
||||
|
||||
if transport == "scrape":
|
||||
conf_command = self.command[afi][cmd]
|
||||
fmt_command = conf_command.format(target=target, source=source)
|
||||
query = (self.d_address, self.d_type, fmt_command)
|
||||
return query
|
||||
|
||||
def ssh(cmd, ipprefix, device):
|
||||
"""Validates input and constructs usable commands to run via netmiko"""
|
||||
d_address = device["address"]
|
||||
d_src_addr_ipv4 = device["src_addr_ipv4"]
|
||||
d_src_addr_ipv6 = device["src_addr_ipv6"]
|
||||
d_location = device["location"]
|
||||
d_name = device["name"]
|
||||
d_port = device["port"]
|
||||
d_type = device["type"]
|
||||
def bgp_route(self, transport, target):
|
||||
"""Constructs bgp_route query parameters from pre-validated input"""
|
||||
cmd = current_function()
|
||||
query = None
|
||||
ip_version = IPNetwork(target).ip.version
|
||||
afi = f"ipv{ip_version}"
|
||||
if transport == "rest":
|
||||
query = json.dumps({"cmd": cmd, "afi": afi, "target": target})
|
||||
if transport == "scrape":
|
||||
conf_command = self.command[afi][cmd]
|
||||
fmt_command = conf_command.format(target=target)
|
||||
query = (self.d_address, self.d_type, fmt_command)
|
||||
return query
|
||||
|
||||
c = configuration.command(d_type)
|
||||
# BGP Community Query
|
||||
if cmd == "bgp_community":
|
||||
# Extended Communities, new-format
|
||||
if re.match("^([0-9]{0,5})\:([0-9]{1,5})$", ipprefix):
|
||||
mc = c.dual[cmd]
|
||||
command = mc.format(target=ipprefix)
|
||||
msg = f"{ipprefix} matched new-format community."
|
||||
return (msg, code.success, d_address, d_type, command)
|
||||
# Extended Communities, 32 bit format
|
||||
elif re.match("^[0-9]{1,10}$", ipprefix):
|
||||
mc = c.dual[cmd]
|
||||
command = mc.format(target=ipprefix)
|
||||
msg = f"{ipprefix} matched 32 bit community."
|
||||
return (msg, code.success, d_address, d_type, command)
|
||||
# RFC 8092 Large Community Support
|
||||
elif re.match("^([0-9]{1,10})\:([0-9]{1,10})\:[0-9]{1,10}$", ipprefix):
|
||||
mc = c.dual[cmd]
|
||||
command = mc.format(target=ipprefix)
|
||||
msg = f"{ipprefix} matched large community."
|
||||
return (msg, code.success, d_address, d_type, command)
|
||||
else:
|
||||
msg = g.msg_error_invaliddual.format(i=ipprefix, qt="BGP Community")
|
||||
logger.error(f"{msg}, {code.danger}, {d_name}, {cmd}, {ipprefix}")
|
||||
return (msg, code.danger, d_name, cmd, ipprefix)
|
||||
# BGP AS_PATH Query
|
||||
elif cmd == "bgp_aspath":
|
||||
if re.match(".*", ipprefix):
|
||||
mc = c.dual[cmd]
|
||||
command = mc.format(target=ipprefix)
|
||||
msg = f"{ipprefix} matched AS_PATH regex."
|
||||
return (msg, code.success, d_address, d_type, command)
|
||||
else:
|
||||
msg = g.msg_error_invaliddual.format(i=ipprefix, qt="AS Path")
|
||||
logger.error(f"{msg}, {code.danger}, {d_name}, {cmd}, {ipprefix}")
|
||||
return (msg, code.danger, d_name, cmd, ipprefix)
|
||||
# BGP Route Query
|
||||
elif cmd == "bgp_route":
|
||||
try:
|
||||
# Use netaddr library to verify if input is a valid IPv4 address or prefix
|
||||
if IPNetwork(ipprefix).ip.version == 4:
|
||||
mc = c.ipv4[cmd]
|
||||
command = mc.format(target=ipprefix)
|
||||
msg = f"{ipprefix} is a valid IPv4 Adddress."
|
||||
return (msg, code.success, d_address, d_type, command)
|
||||
# Use netaddr library to verify if input is a valid IPv6 address or prefix
|
||||
elif IPNetwork(ipprefix).ip.version == 6:
|
||||
mc = c.ipv6[cmd]
|
||||
command = mc.format(target=ipprefix)
|
||||
msg = f"{ipprefix} is a valid IPv6 Adddress."
|
||||
return (msg, code.success, d_address, d_type, command)
|
||||
# Exception from netaddr library will return a user-facing error
|
||||
except:
|
||||
msg = g.msg_error_invalidip.format(i=ipprefix)
|
||||
logger.error(f"{msg}, {code.danger}, {d_name}, {cmd}, {ipprefix}")
|
||||
return (msg, code.danger, d_name, cmd, ipprefix)
|
||||
# Ping/Traceroute
|
||||
elif cmd in ["ping", "traceroute"]:
|
||||
try:
|
||||
if IPNetwork(ipprefix).ip.version == 4:
|
||||
mc = c.ipv4[cmd]
|
||||
command = mc.format(target=ipprefix, src_addr_ipv4=d_src_addr_ipv4)
|
||||
msg = f"{ipprefix} is a valid IPv4 Adddress."
|
||||
return (msg, code.success, d_address, d_type, command)
|
||||
elif IPNetwork(ipprefix).ip.version == 6:
|
||||
mc = c.ipv6[cmd]
|
||||
command = mc.format(target=ipprefix, src_addr_ipv6=d_src_addr_ipv6)
|
||||
msg = f"{ipprefix} is a valid IPv6 Adddress."
|
||||
return (msg, code.success, d_address, d_type, command)
|
||||
except:
|
||||
msg = g.msg_error_invalidip.format(i=ipprefix)
|
||||
logger.error(f"{msg}, {code.danger}, {d_name}, {cmd}, {ipprefix}")
|
||||
return (msg, code.danger, d_name, cmd, ipprefix)
|
||||
else:
|
||||
msg = f"Command {cmd} not found."
|
||||
logger.error(f"{msg}, {code.danger}, {d_name}, {cmd}, {ipprefix}")
|
||||
return (msg, code.danger, d_name, cmd, ipprefix)
|
||||
def bgp_community(self, transport, target):
|
||||
"""Constructs bgp_community query parameters from pre-validated input"""
|
||||
cmd = current_function()
|
||||
afi = "dual"
|
||||
query = None
|
||||
if transport == "rest":
|
||||
query = json.dumps({"cmd": cmd, "afi": afi, "target": target})
|
||||
if transport == "scrape":
|
||||
conf_command = self.command[afi][cmd]
|
||||
fmt_command = conf_command.format(target=target)
|
||||
query = (self.d_address, self.d_type, fmt_command)
|
||||
return query
|
||||
|
||||
def bgp_aspath(self, transport, target):
|
||||
"""Constructs bgp_aspath query parameters from pre-validated input"""
|
||||
cmd = current_function()
|
||||
afi = "dual"
|
||||
query = None
|
||||
if transport == "rest":
|
||||
query = json.dumps({"cmd": cmd, "afi": afi, "target": target})
|
||||
if transport == "scrape":
|
||||
conf_command = self.command[afi][cmd]
|
||||
fmt_command = conf_command.format(target=target)
|
||||
query = (self.d_address, self.d_type, fmt_command)
|
||||
return query
|
||||
|
@@ -1,277 +1,219 @@
|
||||
# https://github.com/checktheroads/hyperglass
|
||||
"""
|
||||
Accepts input from front end application, validates the input and returns errors if input is \
|
||||
invalid. Passes validated parameters to construct.py, which is used to build & run the Netmiko \
|
||||
connectoins or hyperglass-frr API calls, returns the output back to the front end.
|
||||
"""
|
||||
# Module Imports
|
||||
import re
|
||||
import sys
|
||||
import json
|
||||
import time
|
||||
import requests
|
||||
import requests.exceptions
|
||||
from logzero import logger
|
||||
from netmiko import redispatch
|
||||
from netmiko import ConnectHandler
|
||||
from netaddr import IPNetwork, IPAddress, IPSet
|
||||
from netmiko import (
|
||||
ConnectHandler,
|
||||
redispatch,
|
||||
NetMikoAuthenticationException,
|
||||
NetMikoTimeoutException,
|
||||
NetmikoAuthError,
|
||||
NetmikoTimeoutError,
|
||||
)
|
||||
|
||||
# Project Imports
|
||||
from hyperglass import configuration
|
||||
from hyperglass.command import parse
|
||||
from hyperglass.command import construct
|
||||
from hyperglass.command.construct import Construct
|
||||
from hyperglass.command.validate import Validate
|
||||
|
||||
codes = configuration.codes()
|
||||
config = configuration.general()
|
||||
|
||||
|
||||
class ipcheck:
|
||||
"""Checks input IPv4 or IPv6 address against host & CIDR regex patters,
|
||||
returns dictionary of discovered attributes. Used for input validation in
|
||||
command.execute module."""
|
||||
class Rest:
|
||||
"""Executes connections to REST API devices"""
|
||||
|
||||
def __init__(self):
|
||||
self.ipv4_host = "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)?$"
|
||||
self.ipv4_cidr = "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\/(3[0-2]|2[0-9]|1[0-9]|[0-9])?$"
|
||||
self.ipv6_host = "^(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))?$"
|
||||
self.ipv6_cidr = "^(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))\/((1(1[0-9]|2[0-8]))|([0-9][0-9])|([0-9]))?$"
|
||||
# pylint: disable=too-few-public-methods
|
||||
# Dear PyLint, sometimes, people need to make their code scalable for future use. <3, -ML
|
||||
|
||||
def test(self, prefix):
|
||||
if IPNetwork(prefix).ip.version == 4:
|
||||
if re.match(self.ipv4_host, prefix):
|
||||
return {"protocol": "ipv4", "type": "host"}
|
||||
elif re.match(self.ipv4_cidr, prefix):
|
||||
return {"protocol": "ipv4", "type": "cidr"}
|
||||
|
||||
if IPNetwork(prefix).ip.version == 6:
|
||||
if re.match(self.ipv6_host, prefix):
|
||||
return {"protocol": "ipv6", "type": "host"}
|
||||
if re.match(self.ipv6_cidr, prefix):
|
||||
return {"protocol": "ipv6", "type": "cidr"}
|
||||
|
||||
|
||||
class params:
|
||||
"""Sends input parameters to command.construct module for use by execution functions"""
|
||||
|
||||
class http:
|
||||
def __init__(self):
|
||||
self.msg, self.status, self.router, self.query = construct.frr(
|
||||
lg_cmd, lg_ipprefix, d()
|
||||
def __init__(self, transport, device, cmd, target):
|
||||
self.transport = transport
|
||||
self.device = device
|
||||
self.cmd = cmd
|
||||
self.target = target
|
||||
self.cred = configuration.credential(self.device["credential"])
|
||||
self.query = getattr(Construct(self.device), self.cmd)(
|
||||
self.transport, self.target
|
||||
)
|
||||
|
||||
def __call__(self):
|
||||
return vars(self)
|
||||
|
||||
class ssh:
|
||||
def __init__(self):
|
||||
self.msg, self.status, self.router, self.type, self.command = construct.ssh(
|
||||
lg_cmd, lg_ipprefix, d()
|
||||
)
|
||||
|
||||
def __call__(self):
|
||||
return vars(self)
|
||||
|
||||
def nm_host(self):
|
||||
"""Defines netmiko end-host dictionary"""
|
||||
c = configuration.credential(d.credential)
|
||||
attr = {
|
||||
"host": self.router,
|
||||
"device_type": self.type,
|
||||
"username": c.username,
|
||||
"password": c.password,
|
||||
"global_delay_factor": 0.5,
|
||||
}
|
||||
return attr
|
||||
|
||||
def nm_proxy(self):
|
||||
"""Defines netmiko SSH proxy dictionary"""
|
||||
p = configuration.proxy(d.proxy)
|
||||
attr = {
|
||||
"host": p.address,
|
||||
"username": p.username,
|
||||
"password": p.password,
|
||||
"device_type": p.type,
|
||||
"global_delay_factor": 0.5,
|
||||
}
|
||||
return attr
|
||||
|
||||
|
||||
class connect:
|
||||
"""Performs the actual connection to the end device"""
|
||||
|
||||
class restapi:
|
||||
def frr():
|
||||
def frr(self):
|
||||
"""Sends HTTP POST to router running the hyperglass-frr API"""
|
||||
http = params().http()
|
||||
c = configuration.credential(d.credential)
|
||||
try:
|
||||
headers = {"Content-Type": "application/json", "X-API-Key": c.password}
|
||||
json_query = json.dumps(http.query)
|
||||
frr_endpoint = f"http://{d.address}:{d.port}/frr"
|
||||
frr_response = requests.post(
|
||||
frr_endpoint, headers=headers, data=json_query
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"X-API-Key": self.cred["password"],
|
||||
}
|
||||
json_query = json.dumps(self.query)
|
||||
frr_endpoint = f'http://{self.device["address"]}:{self.device["port"]}/frr'
|
||||
frr_response = requests.post(frr_endpoint, headers=headers, data=json_query)
|
||||
response = frr_response.text
|
||||
status = frr_response.status_code
|
||||
except requests.exceptions.RequestException as requests_exception:
|
||||
logger.error(
|
||||
f'Error connecting to device {self.device["name"]}: {requests_exception}'
|
||||
)
|
||||
return frr_response.text, frr_response.status_code
|
||||
except:
|
||||
raise
|
||||
response = config["msg_error_general"]
|
||||
status = codes["danger"]
|
||||
return response, status
|
||||
|
||||
class nm:
|
||||
def direct():
|
||||
|
||||
class Netmiko:
|
||||
"""Executes connections to Netmiko devices"""
|
||||
|
||||
# pylint: disable=too-many-instance-attributes
|
||||
# Dear PyLint, I actually need all these. <3, -ML
|
||||
|
||||
def __init__(self, transport, device, cmd, target):
|
||||
self.device = device
|
||||
self.target = target
|
||||
self.cred = configuration.credential(self.device["credential"])
|
||||
self.params = getattr(Construct(device), cmd)(transport, target)
|
||||
self.router = self.params[0]
|
||||
self.nos = self.params[1]
|
||||
self.command = self.params[2]
|
||||
self.nm_host = {
|
||||
"host": self.router,
|
||||
"device_type": self.nos,
|
||||
"username": self.cred["username"],
|
||||
"password": self.cred["password"],
|
||||
"global_delay_factor": 0.5,
|
||||
}
|
||||
|
||||
def direct(self):
|
||||
"""Connects to the router via netmiko library, return the command output"""
|
||||
ssh = params().ssh()
|
||||
nm_host = ssh.nm_host()
|
||||
nm_connect_direct = ConnectHandler(**nm_host)
|
||||
nm_output_direct = nm_connect_direct.send_command(ssh.command)
|
||||
return nm_output_direct
|
||||
|
||||
def proxied(device_proxy):
|
||||
"""Connects to the proxy server via netmiko library, then logs into the router via standard SSH"""
|
||||
ssh = params().ssh()
|
||||
nm_proxy = ssh.nm_proxy()
|
||||
nm_host = ssh.nm_host()
|
||||
dp = configuration.proxy(d.proxy)
|
||||
try:
|
||||
nm_connect_direct = ConnectHandler(**self.nm_host)
|
||||
response = nm_connect_direct.send_command(self.command)
|
||||
status = codes["success"]
|
||||
except (
|
||||
NetMikoAuthenticationException,
|
||||
NetMikoTimeoutException,
|
||||
NetmikoAuthError,
|
||||
NetmikoTimeoutError,
|
||||
) as netmiko_exception:
|
||||
response = config["msg_error_general"]
|
||||
status = codes["danger"]
|
||||
logger.error(f"{netmiko_exception}, {status}")
|
||||
return response, status
|
||||
|
||||
def proxied(self):
|
||||
"""
|
||||
Connects to the proxy server via netmiko library, then logs into the router via \
|
||||
standard SSH
|
||||
"""
|
||||
proxy_name = self.device["proxy"]
|
||||
device_proxy = configuration.proxy(proxy_name)
|
||||
nm_proxy = {
|
||||
"host": device_proxy["address"],
|
||||
"username": device_proxy["username"],
|
||||
"password": device_proxy["password"],
|
||||
"device_type": device_proxy["type"],
|
||||
"global_delay_factor": 0.5,
|
||||
}
|
||||
nm_connect_proxied = ConnectHandler(**nm_proxy)
|
||||
nm_ssh_command = dp.ssh_command.format(**nm_host) + "\n"
|
||||
|
||||
nm_ssh_command = device_proxy["ssh_command"].format(**self.nm_host) + "\n"
|
||||
nm_connect_proxied.write_channel(nm_ssh_command)
|
||||
time.sleep(1)
|
||||
proxy_output = nm_connect_proxied.read_channel()
|
||||
|
||||
try:
|
||||
# Accept SSH key warnings
|
||||
if "Are you sure you want to continue connecting" in proxy_output:
|
||||
nm_connect_proxied.write_channel("yes" + "\n")
|
||||
nm_connect_proxied.write_channel(nm_host["password"] + "\n")
|
||||
nm_connect_proxied.write_channel(self.nm_host["password"] + "\n")
|
||||
# Send password on prompt
|
||||
elif "assword" in proxy_output:
|
||||
nm_connect_proxied.write_channel(nm_host["password"] + "\n")
|
||||
nm_connect_proxied.write_channel(self.nm_host["password"] + "\n")
|
||||
proxy_output += nm_connect_proxied.read_channel()
|
||||
# Reclassify netmiko connection as configured device type
|
||||
redispatch(nm_connect_proxied, nm_host["device_type"])
|
||||
|
||||
host_output = nm_connect_proxied.send_command(ssh.command)
|
||||
|
||||
if host_output:
|
||||
return host_output
|
||||
except:
|
||||
msg = f'Proxy server {nm_proxy["host"]} unable to reach target {nm_host["host"]}'
|
||||
logger.error(f"{msg}, {code.danger}, {lg_params}")
|
||||
raise
|
||||
return (general.message_general_error, code.danger, lg_params)
|
||||
|
||||
|
||||
def execute(lg_data):
|
||||
"""Ingests user input, runs blacklist check, runs prefix length check (if enabled),
|
||||
pulls all configuraiton variables for the input router."""
|
||||
|
||||
logger.info(f"Received lookup request for: {lg_data}")
|
||||
|
||||
# Create global variables for POSTed JSON from main app
|
||||
global lg_router
|
||||
lg_router = lg_data["router"]
|
||||
|
||||
global lg_cmd
|
||||
lg_cmd = lg_data["cmd"]
|
||||
|
||||
global lg_ipprefix
|
||||
lg_ipprefix = lg_data["ipprefix"]
|
||||
|
||||
global lg_params
|
||||
lg_params = lg_data
|
||||
|
||||
# Initialize status code class, create global variable for reuse.
|
||||
global code
|
||||
code = configuration.codes()
|
||||
|
||||
# Initialize general configuration parameters class, create global variable for reuse.
|
||||
global general
|
||||
general = configuration.general()
|
||||
|
||||
# Validate prefix input with netaddr library
|
||||
if lg_cmd in ["bgp_route", "ping", "traceroute"]:
|
||||
msg = general.msg_error_invalidip.format(i=lg_ipprefix)
|
||||
try:
|
||||
# Initialize prefix regex check class
|
||||
ipc = ipcheck().test(lg_ipprefix)
|
||||
if IPNetwork(lg_ipprefix).ip.is_reserved():
|
||||
return (msg, code.danger, lg_data)
|
||||
elif IPNetwork(lg_ipprefix).ip.is_netmask():
|
||||
return (msg, code.danger, lg_data)
|
||||
elif IPNetwork(lg_ipprefix).ip.is_hostmask():
|
||||
return (msg, code.danger, lg_data)
|
||||
elif IPNetwork(lg_ipprefix).ip.is_loopback():
|
||||
return (msg, code.danger, lg_data)
|
||||
elif IPNetwork(lg_ipprefix).ip.is_unicast():
|
||||
pass
|
||||
else:
|
||||
return (msg, code.danger, lg_data)
|
||||
except:
|
||||
return (msg, code.danger, lg_data)
|
||||
|
||||
if lg_cmd == "Query Type":
|
||||
return (general.msg_error_querytype, code.warning, lg_data)
|
||||
|
||||
global d
|
||||
d = configuration.device(lg_router)
|
||||
|
||||
# Checks if device type is on the requires_ipv6_cidr list
|
||||
requires_ipv6_cidr = configuration.requires_ipv6_cidr(d.type)
|
||||
|
||||
if lg_cmd in ["bgp_route", "ping", "traceroute"]:
|
||||
blacklist = IPSet(configuration.blacklist())
|
||||
msg = general.msg_error_notallowed.format(i=lg_ipprefix)
|
||||
# Check blacklist list for prefixes/IPs, return error upon a match
|
||||
if IPNetwork(lg_ipprefix).ip in blacklist:
|
||||
return (msg, code.warning, lg_data)
|
||||
# Check if device requires IPv6 queries to be in CIDR format, return error if True
|
||||
if lg_cmd == "bgp_route" and IPNetwork(lg_ipprefix).version == 6:
|
||||
if requires_ipv6_cidr == True and ipc["type"] == "host":
|
||||
msg = general.msg_error_ipv6cidr.format(d=d.display_name)
|
||||
return (msg, code.warning, lg_data)
|
||||
# Check if input prefix is in CIDR format, and if command is ping/traceroute, return error if True
|
||||
if lg_cmd in ["ping", "traceroute"] and ipc["type"] == "cidr":
|
||||
return (msg, code.warning, lg_data)
|
||||
|
||||
# If enable_max_prefix feature enabled, require BGP Route queries be smaller than prefix size limit
|
||||
if lg_cmd == "bgp_route" and general.enable_max_prefix == True:
|
||||
if (
|
||||
IPNetwork(lg_ipprefix).version == 4
|
||||
and IPNetwork(lg_ipprefix).prefixlen > general.max_prefix_length_ipv4
|
||||
):
|
||||
msg = general.msg_max_prefix.format(
|
||||
m=general.max_prefix_length_ipv4, i=IPNetwork(lg_ipprefix)
|
||||
redispatch(nm_connect_proxied, self.nm_host["device_type"])
|
||||
response = nm_connect_proxied.send_command(self.command)
|
||||
status = codes["success"]
|
||||
except (
|
||||
NetMikoAuthenticationException,
|
||||
NetMikoTimeoutException,
|
||||
NetmikoAuthError,
|
||||
NetmikoTimeoutError,
|
||||
) as netmiko_exception:
|
||||
response = config["msg_error_general"]
|
||||
status = codes["danger"]
|
||||
logger.error(
|
||||
f'{netmiko_exception}, {status},Proxy: {self.nm_host["proxy"]}'
|
||||
)
|
||||
return (msg, code.warning, lg_data)
|
||||
if (
|
||||
IPNetwork(lg_ipprefix).version == 6
|
||||
and IPNetwork(lg_ipprefix).prefixlen > general.max_prefix_length_ipv6
|
||||
):
|
||||
msg = general.msg_max_prefix.format(
|
||||
m=general.max_prefix_length_ipv6, i=IPNetwork(lg_ipprefix)
|
||||
return response, status
|
||||
|
||||
|
||||
class Execute:
|
||||
"""
|
||||
Ingests user input, runs blacklist check, runs prefix length check (if enabled), pulls all \
|
||||
configuraiton variables for the input router.
|
||||
"""
|
||||
|
||||
def __init__(self, lg_data):
|
||||
self.input_data = lg_data
|
||||
self.input_router = lg_data["router"]
|
||||
self.input_cmd = lg_data["cmd"]
|
||||
self.input_target = lg_data["ipprefix"]
|
||||
self.device_config = configuration.device(self.input_router)
|
||||
|
||||
def parse(self, output):
|
||||
"""Splits BGP output by AFI, returns only IPv4 & IPv6 output for protocol-agnostic \
|
||||
commands (Community & AS_PATH Lookups)"""
|
||||
nos = self.device_config["type"]
|
||||
parsed = output
|
||||
if self.input_cmd in ["bgp_community", "bgp_aspath"]:
|
||||
if nos in ["cisco_ios"]:
|
||||
delimiter = "For address family: "
|
||||
parsed_ipv4 = output.split(delimiter)[1]
|
||||
parsed_ipv6 = output.split(delimiter)[2]
|
||||
parsed = delimiter + parsed_ipv4 + delimiter + parsed_ipv6
|
||||
if nos in ["cisco_xr"]:
|
||||
delimiter = "Address Family: "
|
||||
parsed_ipv4 = output.split(delimiter)[1]
|
||||
parsed_ipv6 = output.split(delimiter)[2]
|
||||
parsed = delimiter + parsed_ipv4 + delimiter + parsed_ipv6
|
||||
return parsed
|
||||
|
||||
def response(self):
|
||||
"""
|
||||
Initializes Execute.filter(), if input fails to pass filter, returns errors to front end. \
|
||||
Otherwise, executes queries.
|
||||
"""
|
||||
# Return error if no query type is specified
|
||||
if self.input_cmd == "Query Type":
|
||||
msg = config["msg_error_querytype"]
|
||||
status = codes["warning"]
|
||||
return msg, status, self.input_data
|
||||
validity, msg, status = getattr(Validate(self.device_config), self.input_cmd)(
|
||||
self.input_target
|
||||
)
|
||||
return (msg, code.warning, lg_data)
|
||||
# Sends validated data to target execution library and returns output
|
||||
if d.type == "frr":
|
||||
http = params().http()
|
||||
try:
|
||||
if http.status in range(200, 300):
|
||||
output, frr_status = connect.restapi.frr()
|
||||
parsed_output = parse.parse(output, d.type, lg_cmd)
|
||||
return parsed_output, frr_status, http()
|
||||
elif http.status in range(400, 500):
|
||||
return http.msg, http.status, http()
|
||||
if not validity:
|
||||
return msg, status, self.input_data
|
||||
connection = None
|
||||
output = config["msg_error_general"]
|
||||
info = self.input_data
|
||||
if self.device_config["type"] == "frr":
|
||||
connection = Rest(
|
||||
"rest", self.device_config, self.input_cmd, self.input_target
|
||||
)
|
||||
raw_output, status = connection.frr()
|
||||
output = self.parse(raw_output)
|
||||
if self.device_config["type"] in configuration.scrape_list():
|
||||
connection = Netmiko(
|
||||
"scrape", self.device_config, self.input_cmd, self.input_target
|
||||
)
|
||||
if self.device_config["proxy"]:
|
||||
raw_output, status = connection.proxied()
|
||||
else:
|
||||
logger.error(general.message_general_error, 500, http())
|
||||
return general.message_general_error, 500, http()
|
||||
except:
|
||||
raise
|
||||
raw_output, status = connection.direct()
|
||||
output = self.parse(raw_output)
|
||||
else:
|
||||
try:
|
||||
ssh = params().ssh()
|
||||
if ssh.status in range(200, 300):
|
||||
if d.proxy:
|
||||
output = connect.nm.proxied(d.proxy)
|
||||
parsed_output = parse.parse(output, d.type, lg_cmd)
|
||||
return parsed_output, ssh.status, ssh.router, ssh.command
|
||||
elif not d.proxy:
|
||||
output = connect.nm.direct()
|
||||
parsed_output = parse.parse(output, d.type, lg_cmd)
|
||||
return parsed_output, ssh.status, ssh.router, ssh.command
|
||||
elif ssh.status in range(400, 500):
|
||||
return ssh.msg, ssh.status, ssh()
|
||||
else:
|
||||
logger.error(general.message_general_error, 500, ssh())
|
||||
return general.message_general_error, 500, ssh()
|
||||
except:
|
||||
raise
|
||||
logger.error(f"{output}, {status}, {info}")
|
||||
return output, status, info
|
||||
|
@@ -1,17 +0,0 @@
|
||||
def parse(output, type, cmd):
|
||||
"""Splits Cisco IOS BGP output by AFI, returns only IPv4 & IPv6 output for protocol-agnostic commands (Community & AS_PATH Lookups)"""
|
||||
try:
|
||||
if cmd in ["bgp_community", "bgp_aspath"] and type in ["cisco_ios"]:
|
||||
delimiter = "For address family: "
|
||||
parsed_ipv4 = output.split(delimiter)[1]
|
||||
parsed_ipv6 = output.split(delimiter)[2]
|
||||
return delimiter + parsed_ipv4 + delimiter + parsed_ipv6
|
||||
else:
|
||||
return output
|
||||
if cmd in ["bgp_community", "bgp_aspath"] and type in ["cisco_xr"]:
|
||||
delimiter = "Address Family: "
|
||||
parsed_ipv4 = output.split(delimiter)[1]
|
||||
parsed_ipv6 = output.split(delimiter)[2]
|
||||
return delimiter + parsed_ipv4 + delimiter + parsed_ipv6
|
||||
except:
|
||||
raise
|
281
hyperglass/command/validate.py
Normal file
281
hyperglass/command/validate.py
Normal file
@@ -0,0 +1,281 @@
|
||||
# https://github.com/checktheroads/hyperglass
|
||||
"""
|
||||
Accepts raw input data from execute.py, passes it through specific filters based on query type, \
|
||||
returns validity boolean and specific error message.
|
||||
"""
|
||||
# Module Imports
|
||||
import re
|
||||
import inspect
|
||||
from logzero import logger
|
||||
from netaddr.core import AddrFormatError
|
||||
from netaddr import IPNetwork, IPAddress, IPSet # pylint: disable=unused-import
|
||||
|
||||
# Project Imports
|
||||
from hyperglass import configuration
|
||||
|
||||
# Configuration Imports
|
||||
config = configuration.general()
|
||||
|
||||
|
||||
class IPType:
|
||||
"""
|
||||
Passes input through IPv4/IPv6 regex patterns to determine if input is formatted as a host \
|
||||
(e.g. 192.0.2.1), or as CIDR (e.g. 192.0.2.0/24). is_host() and is_cidr() return a boolean.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.ipv4_host = (
|
||||
r"^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4]"
|
||||
r"[0-9]|[01]?[0-9][0-9]?)?$"
|
||||
)
|
||||
self.ipv4_cidr = (
|
||||
r"^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4]"
|
||||
r"[0-9]|[01]?[0-9][0-9]?)\/(3[0-2]|2[0-9]|1[0-9]|[0-9])?$"
|
||||
)
|
||||
self.ipv6_host = (
|
||||
r"^(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:)"
|
||||
r"{1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}"
|
||||
r"(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|"
|
||||
r"([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA\-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}"
|
||||
r"(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:("
|
||||
r"(:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::"
|
||||
r"(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25"
|
||||
r"[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]"
|
||||
r"|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))?$"
|
||||
)
|
||||
self.ipv6_cidr = (
|
||||
r"^(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|"
|
||||
r"([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]"
|
||||
r"{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}"
|
||||
r":){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}"
|
||||
r"|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:"
|
||||
r"(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|"
|
||||
r"(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])"
|
||||
r"|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25"
|
||||
r"[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))\/((1(1[0-9]|2[0-8]))|([0-9][0-9])|([0-9]"
|
||||
r"))?$"
|
||||
)
|
||||
|
||||
def is_host(self, target):
|
||||
"""Tests input to see if formatted as host"""
|
||||
ip_version = IPNetwork(target).ip.version
|
||||
state = False
|
||||
if ip_version == 4 and re.match(self.ipv4_host, target):
|
||||
state = True
|
||||
if ip_version == 6 and re.match(self.ipv6_host, target):
|
||||
state = True
|
||||
return state
|
||||
|
||||
def is_cidr(self, target):
|
||||
"""Tests input to see if formatted as CIDR"""
|
||||
ip_version = IPNetwork(target).ip.version
|
||||
state = False
|
||||
if ip_version == 4 and re.match(self.ipv4_cidr, target):
|
||||
state = True
|
||||
if ip_version == 6 and re.match(self.ipv6_cidr, target):
|
||||
state = True
|
||||
return state
|
||||
|
||||
|
||||
def ip_validate(target):
|
||||
"""Validates if input is a valid IP address"""
|
||||
validity = False
|
||||
try:
|
||||
valid_ip = IPNetwork(target).ip
|
||||
if (
|
||||
valid_ip.is_reserved()
|
||||
or valid_ip.is_netmask()
|
||||
or valid_ip.is_hostmask()
|
||||
or valid_ip.is_loopback()
|
||||
):
|
||||
validity = False
|
||||
if valid_ip.is_unicast():
|
||||
validity = True
|
||||
except AddrFormatError:
|
||||
validity = False
|
||||
return validity
|
||||
|
||||
|
||||
def ip_blacklist(target):
|
||||
"""Check blacklist list for prefixes/IPs, return boolean based on list membership"""
|
||||
blacklist = IPSet(configuration.blacklist())
|
||||
membership = False
|
||||
if target in blacklist:
|
||||
membership = True
|
||||
return membership
|
||||
|
||||
|
||||
def ip_attributes(target):
|
||||
"""Construct dictionary of validated IP attributes for repeated use"""
|
||||
network = IPNetwork(target)
|
||||
addr = network.ip
|
||||
ip_version = addr.version
|
||||
afi = f"ipv{ip_version}"
|
||||
afi_pretty = f"IPv{ip_version}"
|
||||
length = network.prefixlen
|
||||
valid_attributes = {
|
||||
"prefix": target,
|
||||
"network": network,
|
||||
"version": ip_version,
|
||||
"length": length,
|
||||
"afi": afi,
|
||||
"afi_pretty": afi_pretty,
|
||||
}
|
||||
return valid_attributes
|
||||
|
||||
|
||||
def ip_type_check(cmd, target, device):
|
||||
"""Checks multiple IP address related validation parameters"""
|
||||
prefix_attr = ip_attributes(target)
|
||||
requires_ipv6_cidr = configuration.requires_ipv6_cidr(device["type"])
|
||||
validity = False
|
||||
msg = config["msg_error_notallowed"].format(i=target)
|
||||
# If target is a member of the blacklist, return an error.
|
||||
if ip_blacklist(target):
|
||||
validity = False
|
||||
return (validity, msg)
|
||||
# If enable_max_prefix feature enabled, require that BGP Route queries be smaller than\
|
||||
# configured size limit.
|
||||
if cmd == "bgp_route" and config["enable_max_prefix"]:
|
||||
max_length = config[f'max_prefix_length_{prefix_attr["afi"]}']
|
||||
if prefix_attr["length"] > max_length:
|
||||
validity = False
|
||||
msg = config["msg_max_prefix"].format(
|
||||
m=max_length, i=prefix_attr["network"]
|
||||
)
|
||||
return (validity, msg)
|
||||
# If device NOS is listed in requires_ipv6_cidr.toml, and query is an IPv6 host address, \
|
||||
# return an error.
|
||||
if (
|
||||
cmd == "bgp_route"
|
||||
and prefix_attr["version"] == 6
|
||||
and requires_ipv6_cidr
|
||||
and IPType().is_host(target)
|
||||
):
|
||||
msg = config["msg_error_ipv6cidr"].format(d=device["display_name"])
|
||||
validity = False
|
||||
return (validity, msg)
|
||||
# If query type is ping or traceroute, and query target is in CIDR format, return an error.
|
||||
if cmd in ["ping", "traceroute"] and IPType().is_cidr(target):
|
||||
msg = config["msg_error_directed_cidr"].format(cmd=cmd.capitalize())
|
||||
validity = False
|
||||
return (validity, msg)
|
||||
validity = True
|
||||
msg = f"{target} is a valid {cmd} query."
|
||||
return (validity, msg)
|
||||
|
||||
|
||||
def current_function():
|
||||
"""Returns name of current function for easy initialization & calling."""
|
||||
this_function = inspect.stack()[1][3]
|
||||
return this_function
|
||||
|
||||
|
||||
class Validate:
|
||||
"""Accepts raw input and associated device parameters from execute.py and validates the input \
|
||||
based on specific query type. Returns boolean for validity, specific error message, and status \
|
||||
code."""
|
||||
|
||||
def __init__(self, device):
|
||||
"""Initialize device parameters and error codes."""
|
||||
self.device = device
|
||||
self.codes = configuration.codes()
|
||||
|
||||
def ping(self, target):
|
||||
"""Ping Query: Input Validation & Error Handling"""
|
||||
cmd = current_function()
|
||||
validity = False
|
||||
msg = config["msg_error_invalidip"].format(i=target)
|
||||
status = self.codes["warning"]
|
||||
# Perform basic validation of an IP address, return error if not a valid IP.
|
||||
if not ip_validate(target):
|
||||
status = self.codes["danger"]
|
||||
logger.error(f"{msg}, {status}")
|
||||
return (validity, msg, status)
|
||||
# Perform further validation of a valid IP address, return an error upon failure.
|
||||
valid_query, msg = ip_type_check(cmd, target, self.device)
|
||||
if valid_query:
|
||||
validity = True
|
||||
msg = f"{target} is a valid {cmd} query."
|
||||
status = self.codes["success"]
|
||||
return (validity, msg, status)
|
||||
return (validity, msg, status)
|
||||
|
||||
def traceroute(self, target):
|
||||
"""Traceroute Query: Input Validation & Error Handling"""
|
||||
cmd = current_function()
|
||||
validity = False
|
||||
msg = config["msg_error_invalidip"].format(i=target)
|
||||
status = self.codes["warning"]
|
||||
# Perform basic validation of an IP address, return error if not a valid IP.
|
||||
if not ip_validate(target):
|
||||
status = self.codes["danger"]
|
||||
logger.error(f"{msg}, {status}")
|
||||
return (validity, msg, status)
|
||||
# Perform further validation of a valid IP address, return an error upon failure.
|
||||
valid_query, msg = ip_type_check(cmd, target, self.device)
|
||||
if valid_query:
|
||||
validity = True
|
||||
msg = f"{target} is a valid {cmd} query."
|
||||
status = self.codes["success"]
|
||||
return (validity, msg, status)
|
||||
return (validity, msg, status)
|
||||
|
||||
def bgp_route(self, target):
|
||||
"""BGP Route Query: Input Validation & Error Handling"""
|
||||
cmd = current_function()
|
||||
validity = False
|
||||
msg = config["msg_error_invalidip"].format(i=target)
|
||||
status = self.codes["warning"]
|
||||
# Perform basic validation of an IP address, return error if not a valid IP.
|
||||
if not ip_validate(target):
|
||||
status = self.codes["danger"]
|
||||
logger.error(f"{msg}, {status}")
|
||||
return (validity, msg, status)
|
||||
# Perform further validation of a valid IP address, return an error upon failure.
|
||||
valid_query, msg = ip_type_check(cmd, target, self.device)
|
||||
if valid_query:
|
||||
validity = True
|
||||
msg = f"{target} is a valid {cmd} query."
|
||||
status = self.codes["success"]
|
||||
return (validity, msg, status)
|
||||
return (validity, msg, status)
|
||||
|
||||
def bgp_community(self, target):
|
||||
"""BGP Community Query: Input Validation & Error Handling"""
|
||||
validity = False
|
||||
msg = config["msg_error_invaliddual"].format(i=target, qt="BGP Community")
|
||||
status = self.codes["danger"]
|
||||
# Validate input communities against configured or default regex pattern
|
||||
# Extended Communities, new-format
|
||||
if re.match(config["re_bgp_community_new"], target):
|
||||
validity = True
|
||||
msg = f"{target} matched new-format community."
|
||||
status = self.codes["success"]
|
||||
# Extended Communities, 32 bit format
|
||||
if re.match(config["re_bgp_community_32bit"], target):
|
||||
validity = True
|
||||
msg = f"{target} matched 32 bit community."
|
||||
status = self.codes["success"]
|
||||
# RFC 8092 Large Community Support
|
||||
if re.match(config["re_bgp_community_large"], target):
|
||||
validity = True
|
||||
msg = f"{target} matched large community."
|
||||
status = self.codes["success"]
|
||||
if not validity:
|
||||
logger.error(f"{msg}, {status}")
|
||||
return (validity, msg, status)
|
||||
|
||||
def bgp_aspath(self, target):
|
||||
"""BGP AS Path Query: Input Validation & Error Handling"""
|
||||
validity = False
|
||||
msg = config["msg_error_invaliddual"].format(i=target, qt="AS Path")
|
||||
status = self.codes["danger"]
|
||||
# Validate input AS_PATH regex pattern against configured or default regex pattern
|
||||
if re.match(config["re_bgp_aspath"], target):
|
||||
validity = True
|
||||
msg = f"{target} matched AS_PATH regex."
|
||||
status = self.codes["success"]
|
||||
if not validity:
|
||||
logger.error(f"{msg}, {status}")
|
||||
return (validity, msg, status)
|
@@ -1,271 +1,322 @@
|
||||
# Module Imports
|
||||
# https://github.com/checktheroads/hyperglass
|
||||
"""
|
||||
Imports configuration varibles from configuration files and returns default values if undefined.
|
||||
"""
|
||||
# Standard Imports
|
||||
import os
|
||||
import math
|
||||
|
||||
# Module Imports
|
||||
import toml
|
||||
|
||||
# Project Imports
|
||||
import hyperglass
|
||||
|
||||
# Project Directories
|
||||
dir = os.path.dirname(os.path.abspath(__file__))
|
||||
working_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
hyperglass_root = os.path.dirname(hyperglass.__file__)
|
||||
|
||||
# TOML Imports
|
||||
configuration = toml.load(os.path.join(dir, "configuration.toml"))
|
||||
devices = toml.load(os.path.join(dir, "devices.toml"))
|
||||
configuration = toml.load(os.path.join(working_dir, "configuration.toml"))
|
||||
devices = toml.load(os.path.join(working_dir, "devices.toml"))
|
||||
|
||||
|
||||
def blacklist():
|
||||
"""Returns list of subnets/IPs defined in blacklist.toml"""
|
||||
b = toml.load(os.path.join(dir, "blacklist.toml"))
|
||||
return b["blacklist"]
|
||||
blacklist_config = toml.load(os.path.join(working_dir, "blacklist.toml"))
|
||||
return blacklist_config["blacklist"]
|
||||
|
||||
|
||||
def requires_ipv6_cidr(nos):
|
||||
"""Returns boolean for input NOS association with the NOS list defined
|
||||
in requires_ipv6_cidr.toml"""
|
||||
r = toml.load(os.path.join(dir, "requires_ipv6_cidr.toml"))
|
||||
nos_list = r["requires_ipv6_cidr"]
|
||||
if nos in nos_list:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
"""Returns boolean for input NOS association with the NOS list defined in \
|
||||
requires_ipv6_cidr.toml"""
|
||||
nos_list = configuration["requires_ipv6_cidr"]
|
||||
return bool(nos in nos_list)
|
||||
|
||||
|
||||
def networks():
|
||||
"""Returns dictionary of ASNs as keys, list of associated locations as values.
|
||||
Imported as a Jinja2 variable on the main page that populates the network/ASN
|
||||
select class."""
|
||||
"""Returns dictionary of ASNs as keys, list of associated locations as values. Imported as a \
|
||||
Jinja2 variable on the main page that populates the network/ASN select class."""
|
||||
asn_dict = {}
|
||||
rl = devices["router"]
|
||||
for r in rl.values():
|
||||
asn = r["asn"]
|
||||
routers_list = devices["router"]
|
||||
for router_config in routers_list.values():
|
||||
asn = router_config["asn"]
|
||||
if asn in asn_dict:
|
||||
asn_dict[asn].append(r["location"])
|
||||
asn_dict[asn].append(router_config["location"])
|
||||
else:
|
||||
asn_dict[asn] = [r["location"]]
|
||||
asn_dict[asn] = [router_config["location"]]
|
||||
return asn_dict
|
||||
|
||||
|
||||
def networks_list():
|
||||
"""Returns a dictionary of ASNs as keys, list of associated locations,
|
||||
router hostnames, and router display names as keys. Used by Flask to
|
||||
populate the /routers/<asn> route, which is ingested by a JS Ajax call
|
||||
to populate the list of locations associated with the selected network/ASN
|
||||
on the main page."""
|
||||
"""Returns a dictionary of ASNs as keys, list of associated locations, router hostnames, and \
|
||||
router display names as keys. Used by Flask to populate the /routers/<asn> route, which is \
|
||||
ingested by a JS Ajax call to populate the list of locations associated with the selected \
|
||||
network/ASN on the main page."""
|
||||
networks_dict = {}
|
||||
rl = devices["router"]
|
||||
for r in rl.values():
|
||||
asn = r["asn"]
|
||||
routers_list = devices["router"]
|
||||
for router_config in routers_list.values():
|
||||
asn = router_config["asn"]
|
||||
if asn in networks_dict:
|
||||
networks_dict[asn].append(
|
||||
dict(
|
||||
location=r["location"],
|
||||
hostname=r["name"],
|
||||
display_name=r["display_name"],
|
||||
location=router_config["location"],
|
||||
hostname=router_config["name"],
|
||||
display_name=router_config["display_name"],
|
||||
)
|
||||
)
|
||||
else:
|
||||
networks_dict[asn] = [
|
||||
dict(
|
||||
location=r["location"],
|
||||
hostname=r["name"],
|
||||
display_name=r["display_name"],
|
||||
location=router_config["location"],
|
||||
hostname=router_config["name"],
|
||||
display_name=router_config["display_name"],
|
||||
)
|
||||
]
|
||||
return networks_dict
|
||||
|
||||
|
||||
class codes:
|
||||
"""Reusable status code attributes"""
|
||||
|
||||
def __init__(self):
|
||||
# 200 OK: renders standard display text
|
||||
self.success = 200
|
||||
# 405 Method Not Allowed: Renders Bulma "warning" class notification message with message text
|
||||
self.warning = 405
|
||||
# 415 Unsupported Media Type: Renders Bulma "danger" class notification message with message text
|
||||
self.danger = 415
|
||||
def codes():
|
||||
"""Reusable status code numbers"""
|
||||
code_dict = {
|
||||
# 200: renders standard display text
|
||||
"success": 200,
|
||||
# 405: Renders Bulma "warning" class notification message with message text
|
||||
"warning": 405,
|
||||
# 415: Renders Bulma "danger" class notification message with message text
|
||||
"danger": 415,
|
||||
}
|
||||
return code_dict
|
||||
|
||||
|
||||
class command:
|
||||
def codes_reason():
|
||||
"""Reusable status code descriptions"""
|
||||
code_desc_dict = {
|
||||
200: "Valid Query",
|
||||
405: "Query Not Allowed",
|
||||
415: "Query Invalid",
|
||||
}
|
||||
return code_desc_dict
|
||||
|
||||
|
||||
def scrape_list():
|
||||
"""Returns list of configured network operating systems"""
|
||||
config_commands = toml.load(os.path.join(working_dir, "commands.toml"))
|
||||
scrape = []
|
||||
for nos in config_commands:
|
||||
scrape.append(nos)
|
||||
return scrape
|
||||
|
||||
|
||||
def command(nos):
|
||||
"""Associates input NOS with matched commands defined in commands.toml"""
|
||||
|
||||
def __init__(self, nos):
|
||||
c = toml.load(os.path.join(dir, "commands.toml"))
|
||||
self.dual = c[nos][0]["dual"]
|
||||
self.ipv4 = c[nos][0]["ipv4"]
|
||||
self.ipv6 = c[nos][0]["ipv6"]
|
||||
|
||||
def __call__(self):
|
||||
return vars(self)
|
||||
config_commands = toml.load(os.path.join(working_dir, "commands.toml"))
|
||||
commands = None
|
||||
if nos in scrape_list():
|
||||
commands = {
|
||||
"dual": config_commands[nos][0]["dual"],
|
||||
"ipv4": config_commands[nos][0]["ipv4"],
|
||||
"ipv6": config_commands[nos][0]["ipv6"],
|
||||
}
|
||||
return commands
|
||||
|
||||
|
||||
class credential:
|
||||
"""Associates input credential key name with configured credential username &
|
||||
password in devices.toml."""
|
||||
|
||||
def __init__(self, cred):
|
||||
def credential(cred):
|
||||
"""Associates input credential key name with configured credential username & password in \
|
||||
devices.toml."""
|
||||
c_list = devices["credential"]
|
||||
self.username = c_list[cred]["username"]
|
||||
self.password = c_list[cred]["password"]
|
||||
|
||||
def __call__(self):
|
||||
return vars(self)
|
||||
return dict(username=c_list[cred]["username"], password=c_list[cred]["password"])
|
||||
|
||||
|
||||
class device:
|
||||
def device(dev):
|
||||
"""Associates input device key name with configured device attributes in devices.toml"""
|
||||
|
||||
def __init__(self, device):
|
||||
d = devices["router"][device]
|
||||
self.address = d.get("address")
|
||||
self.asn = d.get("asn")
|
||||
self.src_addr_ipv4 = d.get("src_addr_ipv4")
|
||||
self.src_addr_ipv6 = d.get("src_addr_ipv6")
|
||||
self.credential = d.get("credential")
|
||||
self.location = d.get("location")
|
||||
self.name = d.get("name")
|
||||
self.display_name = d.get("display_name")
|
||||
self.port = d.get("port")
|
||||
self.type = d.get("type")
|
||||
self.proxy = d.get("proxy")
|
||||
|
||||
def __call__(self):
|
||||
return vars(self)
|
||||
device_config = devices["router"][dev]
|
||||
return dict(
|
||||
address=device_config.get("address"),
|
||||
asn=device_config.get("asn"),
|
||||
src_addr_ipv4=device_config.get("src_addr_ipv4"),
|
||||
src_addr_ipv6=device_config.get("src_addr_ipv6"),
|
||||
credential=device_config.get("credential"),
|
||||
location=device_config.get("location"),
|
||||
name=device_config.get("name"),
|
||||
display_name=device_config.get("display_name"),
|
||||
port=device_config.get("port"),
|
||||
type=device_config.get("type"),
|
||||
proxy=device_config.get("proxy"),
|
||||
)
|
||||
|
||||
|
||||
class proxy:
|
||||
def proxy(prx):
|
||||
"""Associates input proxy key name with configured proxy attributes in devices.toml"""
|
||||
|
||||
def __init__(self, proxy):
|
||||
p = devices["proxy"][proxy]
|
||||
self.address = p["address"]
|
||||
self.username = p["username"]
|
||||
self.password = p["password"]
|
||||
self.type = p["type"]
|
||||
self.ssh_command = p["ssh_command"]
|
||||
proxy_config = devices["proxy"][prx]
|
||||
return dict(
|
||||
address=proxy_config["address"],
|
||||
username=proxy_config["username"],
|
||||
password=proxy_config["password"],
|
||||
type=proxy_config["type"],
|
||||
ssh_command=proxy_config["ssh_command"],
|
||||
)
|
||||
|
||||
|
||||
class general:
|
||||
def general():
|
||||
"""Exports general config variables and sets default values if undefined"""
|
||||
|
||||
def __init__(self):
|
||||
g = configuration["general"][0]
|
||||
self.primary_asn = g.get("primary_asn", "65000")
|
||||
self.org_name = g.get("org_name", "The Company")
|
||||
self.debug = g.get("debug", False)
|
||||
self.google_analytics = g.get("google_analytics", "")
|
||||
self.msg_error_querytype = g.get(
|
||||
gen = configuration["general"]
|
||||
re_bgp_aspath_mode = gen["bgp_aspath"].get("mode", "asplain")
|
||||
if re_bgp_aspath_mode == "asplain":
|
||||
re_bgp_aspath_default = r"^(\^|^\_)(\d+\_|\d+\$|\d+\(\_\.\+\_\))+$"
|
||||
if re_bgp_aspath_mode == "asdot":
|
||||
re_bgp_aspath_default = (
|
||||
r"^(\^|^\_)((\d+\.\d+)\_|(\d+\.\d+)\$|(\d+\.\d+)\(\_\.\+\_\))+$"
|
||||
)
|
||||
return dict(
|
||||
primary_asn=gen.get("primary_asn", "65000"),
|
||||
org_name=gen.get("org_name", "The Company"),
|
||||
debug=gen.get("debug", False),
|
||||
google_analytics=gen.get("google_analytics", ""),
|
||||
msg_error_querytype=gen.get(
|
||||
"msg_error_querytype", "You must select a query type."
|
||||
)
|
||||
self.msg_error_notallowed = g.get(
|
||||
),
|
||||
msg_error_notallowed=gen.get(
|
||||
"msg_error_notallowed", "<b>{i}</b> is not allowed."
|
||||
)
|
||||
self.msg_error_ipv6cidr = g.get(
|
||||
),
|
||||
msg_error_ipv6cidr=gen.get(
|
||||
"msg_error_ipv6cidr",
|
||||
"<b>{d}</b> requires IPv6 BGP lookups to be in CIDR notation.",
|
||||
)
|
||||
self.msg_error_invalidip = g.get(
|
||||
),
|
||||
msg_error_invalidip=gen.get(
|
||||
"msg_error_invalidip", "<b>{i}</b> is not a valid IP address."
|
||||
)
|
||||
self.msg_error_invaliddual = g.get(
|
||||
),
|
||||
msg_error_invaliddual=gen.get(
|
||||
"msg_error_invaliddual", "<b>{i}</b> is an invalid {qt}."
|
||||
)
|
||||
self.msg_error_general = g.get("msg_error_general", "A general error occurred.")
|
||||
self.msg_max_prefix = g.get(
|
||||
),
|
||||
msg_error_general=gen.get("msg_error_general", "A general error occurred."),
|
||||
msg_error_directed_cidr=gen.get(
|
||||
"msg_error_directed_cidr", "<b>{cmd}</b> queries can not be in CIDR format."
|
||||
),
|
||||
msg_max_prefix=gen.get(
|
||||
"msg_max_prefix",
|
||||
"Prefix length must be smaller than /{m}. <b>{i}</b> is too specific.",
|
||||
)
|
||||
self.rate_limit_query = g.get("rate_limit_query", "5")
|
||||
self.message_rate_limit_query = g.get(
|
||||
),
|
||||
rate_limit_query=gen.get("rate_limit_query", "5"),
|
||||
message_rate_limit_query=gen.get(
|
||||
"message_rate_limit_query",
|
||||
f"Query limit of {self.rate_limit_query} per minute reached. Please wait one minute and try again.",
|
||||
)
|
||||
self.enable_bgp_route = g.get("enable_bgp_route", True)
|
||||
self.enable_bgp_community = g.get("enable_bgp_community", True)
|
||||
self.enable_bgp_aspath = g.get("enable_bgp_aspath", True)
|
||||
self.enable_ping = g.get("enable_ping", True)
|
||||
self.enable_traceroute = g.get("enable_traceroute", True)
|
||||
self.rate_limit_site = g.get("rate_limit_site", "120")
|
||||
self.cache_timeout = g.get("cache_timeout", 120)
|
||||
self.cache_directory = g.get(
|
||||
(
|
||||
f'Query limit of {gen.get("rate_limit_query", "5")} per minute reached. '
|
||||
"Please wait one minute and try again."
|
||||
),
|
||||
),
|
||||
enable_bgp_route=gen.get("enable_bgp_route", True),
|
||||
enable_bgp_community=gen.get("enable_bgp_community", True),
|
||||
enable_bgp_aspath=gen.get("enable_bgp_aspath", True),
|
||||
enable_ping=gen.get("enable_ping", True),
|
||||
enable_traceroute=gen.get("enable_traceroute", True),
|
||||
rate_limit_site=gen.get("rate_limit_site", "120"),
|
||||
cache_timeout=gen.get("cache_timeout", 120),
|
||||
cache_directory=gen.get(
|
||||
"cache_directory", os.path.join(hyperglass_root, ".flask_cache")
|
||||
),
|
||||
enable_max_prefix=gen.get("enable_max_prefix", False),
|
||||
max_prefix_length_ipv4=gen.get("max_prefix_length_ipv4", 24),
|
||||
max_prefix_length_ipv6=gen.get("max_prefix_length_ipv6", 64),
|
||||
re_bgp_community_new=gen.get(
|
||||
"re_bgp_community_new", r"^([0-9]{0,5})\:([0-9]{1,5})$"
|
||||
),
|
||||
re_bgp_community_32bit=gen.get("re_bgp_community_32bit", r"^[0-9]{1,10}$"),
|
||||
re_bgp_community_large=gen.get(
|
||||
"re_bgp_community_large", r"^([0-9]{1,10})\:([0-9]{1,10})\:[0-9]{1,10}$"
|
||||
),
|
||||
re_bgp_aspath=gen["bgp_aspath"][re_bgp_aspath_mode].get(
|
||||
"regex", re_bgp_aspath_default
|
||||
),
|
||||
)
|
||||
self.enable_max_prefix = g.get("enable_max_prefix", False)
|
||||
self.max_prefix_length_ipv4 = g.get("max_prefix_length_ipv4", 24)
|
||||
self.max_prefix_length_ipv6 = g.get("max_prefix_length_ipv6", 64)
|
||||
|
||||
|
||||
class branding:
|
||||
def branding():
|
||||
"""Exports branding config variables and sets default values if undefined"""
|
||||
|
||||
def __init__(self):
|
||||
b = configuration["branding"][0]
|
||||
self.site_title = b.get("site_title", "hyperglass")
|
||||
self.title = b.get("title", "hyperglass")
|
||||
self.subtitle = b.get("subtitle", f"AS{general().primary_asn}")
|
||||
self.title_mode = b.get("title_mode", "logo_only")
|
||||
self.enable_footer = b.get("enable_footer", True)
|
||||
self.enable_credit = b.get("enable_credit", True)
|
||||
self.color_btn_submit = b.get("color_btn_submit", "#40798c")
|
||||
|
||||
self.color_tag_loctitle = b.get("color_tag_loctitle", "#330036")
|
||||
self.color_tag_cmdtitle = b.get("color_tag_cmdtitle", "#330036")
|
||||
self.color_tag_cmd = b.get("color_tag_cmd", "#ff5e5b")
|
||||
self.color_tag_loc = b.get("color_tag_loc", "#40798c")
|
||||
self.color_progressbar = b.get("color_progressbar", "#40798c")
|
||||
self.color_bg = b.get("color_bg", "#fbfffe")
|
||||
self.color_danger = b.get("color_danger", "#ff3860")
|
||||
self.logo_path = b.get(
|
||||
brand = configuration["branding"]
|
||||
gen = general()
|
||||
return dict(
|
||||
site_title=brand.get("site_title", "hyperglass"),
|
||||
title=brand.get("title", "hyperglass"),
|
||||
subtitle=brand.get("subtitle", f'AS{gen["primary_asn"]}'),
|
||||
title_mode=brand.get("title_mode", "logo_only"),
|
||||
enable_footer=brand.get("enable_footer", True),
|
||||
enable_credit=brand.get("enable_credit", True),
|
||||
color_btn_submit=brand.get("color_btn_submit", "#40798c"),
|
||||
color_tag_loctitle=brand.get("color_tag_loctitle", "#330036"),
|
||||
color_tag_cmdtitle=brand.get("color_tag_cmdtitle", "#330036"),
|
||||
color_tag_cmd=brand.get("color_tag_cmd", "#ff5e5b"),
|
||||
color_tag_loc=brand.get("color_tag_loc", "#40798c"),
|
||||
color_progressbar=brand.get("color_progressbar", "#40798c"),
|
||||
color_bg=brand.get("color_bg", "#fbfffe"),
|
||||
color_danger=brand.get("color_danger", "#ff3860"),
|
||||
logo_path=brand.get(
|
||||
"logo_path",
|
||||
os.path.join(hyperglass_root, "static/images/hyperglass-dark.png"),
|
||||
)
|
||||
self.logo_width = b.get("logo_width", "384")
|
||||
self.favicon_dir = b.get("favicon_path", "static/images/favicon/")
|
||||
self.placeholder_prefix = b.get(
|
||||
),
|
||||
logo_width=brand.get("logo_width", "384"),
|
||||
favicon_dir=brand.get("favicon_path", "static/images/favicon/"),
|
||||
placeholder_prefix=brand.get(
|
||||
"placeholder_prefix", "IP, Prefix, Community, or AS_PATH"
|
||||
)
|
||||
self.show_peeringdb = b.get("show_peeringdb", True)
|
||||
self.text_results = b.get("text_results", "Results")
|
||||
self.text_location = b.get("text_location", "Select Location...")
|
||||
self.text_cache = b.get(
|
||||
),
|
||||
show_peeringdb=brand.get("show_peeringdb", True),
|
||||
text_results=brand.get("text_results", "Results"),
|
||||
text_location=brand.get("text_location", "Select Location..."),
|
||||
text_cache=brand.get(
|
||||
"text_cache",
|
||||
f"Results will be cached for {math.ceil(general().cache_timeout / 60)} minutes.",
|
||||
)
|
||||
self.primary_font_name = b.get("primary_font_name", "Nunito")
|
||||
self.primary_font_url = b.get(
|
||||
f'Results will be cached for {math.ceil(gen["cache_timeout"] / 60)} minutes.',
|
||||
),
|
||||
primary_font_name=brand.get("primary_font_name", "Nunito"),
|
||||
primary_font_url=brand.get(
|
||||
"primary_font_url",
|
||||
"https://fonts.googleapis.com/css?family=Nunito:400,600,700",
|
||||
)
|
||||
self.mono_font_name = b.get("mono_font_name", "Fira Mono")
|
||||
self.mono_font_url = b.get(
|
||||
),
|
||||
mono_font_name=brand.get("mono_font_name", "Fira Mono"),
|
||||
mono_font_url=brand.get(
|
||||
"mono_font_url", "https://fonts.googleapis.com/css?family=Fira+Mono"
|
||||
)
|
||||
self.text_limiter_title = b.get("text_limiter_title", "Limit Reached")
|
||||
self.text_limiter_subtitle = b.get(
|
||||
),
|
||||
text_limiter_title=brand.get("text_limiter_title", "Limit Reached"),
|
||||
text_limiter_subtitle=brand.get(
|
||||
"text_limiter_subtitle",
|
||||
f"You have accessed this site more than {general().rate_limit_site} times in the last minute.",
|
||||
)
|
||||
self.text_500_title = b.get("text_500_title", "Error")
|
||||
self.text_500_subtitle = b.get("text_500_subtitle", "Something went wrong.")
|
||||
self.text_500_button = b.get("text_500_button", "Home")
|
||||
self.text_help_bgp_route = b.get(
|
||||
(
|
||||
f'You have accessed this site more than {gen["rate_limit_site"]} '
|
||||
"times in the last minute."
|
||||
),
|
||||
),
|
||||
text_500_title=brand.get("text_500_title", "Error"),
|
||||
text_500_subtitle=brand.get("text_500_subtitle", "Something went wrong."),
|
||||
text_500_button=brand.get("text_500_button", "Home"),
|
||||
text_help_bgp_route=brand.get(
|
||||
"text_help_bgp_route",
|
||||
"Performs BGP table lookup based on IPv4/IPv6 prefix.",
|
||||
)
|
||||
self.text_help_bgp_community = b.get(
|
||||
),
|
||||
text_help_bgp_community=brand.get(
|
||||
"text_help_bgp_community",
|
||||
'Performs BGP table lookup based on <a href="https://tools.ietf.org/html/rfc4360">Extended</a> or <a href="https://tools.ietf.org/html/rfc8195">Large</a> community value.',
|
||||
)
|
||||
self.text_help_bgp_aspath = b.get(
|
||||
(
|
||||
'Performs BGP table lookup based on <a href="https://tools.ietf.org/html/rfc4360">'
|
||||
'Extended</a> or <a href="https://tools.ietf.org/html/rfc8195">Large</a> '
|
||||
"community value.<br>"
|
||||
'<a href="#" onclick="bgpHelpCommunity()">BGP Communities</a>'
|
||||
),
|
||||
),
|
||||
text_help_bgp_aspath=brand.get(
|
||||
"text_help_bgp_aspath",
|
||||
'Performs BGP table lookup based on <code>AS_PATH</code> regular expression.<br>For commonly used BGP regular expressions, <a href="https://hyperglass.readthedocs.io/en/latest/Extras/common_as_path_regex/">click here</a>.',
|
||||
)
|
||||
self.text_help_ping = b.get(
|
||||
(
|
||||
"Performs BGP table lookup based on <code>AS_PATH</code> regular expression."
|
||||
'<br>For commonly used BGP regular expressions, <a href="https://hyperglass.'
|
||||
'readthedocs.io/en/latest/Extras/common_as_path_regex/">click here</a>.<br>'
|
||||
'<a href="#" onclick="bgpHelpASPath()">Allowed BGP AS Path Expressions</a>'
|
||||
),
|
||||
),
|
||||
text_help_ping=brand.get(
|
||||
"text_help_ping", "Sends 5 ICMP echo requests to the target."
|
||||
)
|
||||
self.text_help_traceroute = b.get(
|
||||
),
|
||||
text_help_traceroute=brand.get(
|
||||
"text_help_traceroute",
|
||||
'Performs UDP Based traceroute to the target.<br>For information about how to interpret traceroute results, <a href="https://www.nanog.org/meetings/nanog45/presentations/Sunday/RAS_traceroute_N45.pdf">click here</a>.',
|
||||
(
|
||||
"Performs UDP Based traceroute to the target.<br>For information about how to"
|
||||
'interpret traceroute results, <a href="https://www.nanog.org/meetings/nanog45/'
|
||||
'presentations/Sunday/RAS_traceroute_N45.pdf">click here</a>.'
|
||||
),
|
||||
),
|
||||
)
|
||||
|
@@ -1,8 +1,8 @@
|
||||
# Define networks that you don't want users to be able to query. Any IP inside the subnet will return an error message.
|
||||
blacklist = [
|
||||
'100.64.0.0/12',
|
||||
'198.18.0.0/15',
|
||||
'10.0.0.0/8',
|
||||
'192.168.0.0/16',
|
||||
'172.16.0.0/12'
|
||||
"100.64.0.0/10",
|
||||
"198.18.0.0/15",
|
||||
"10.0.0.0/8",
|
||||
"192.168.0.0/16",
|
||||
"172.16.0.0/12"
|
||||
]
|
||||
|
@@ -1,18 +1,16 @@
|
||||
# Cisco IOS/IOS-XE
|
||||
[[cisco_ios]]
|
||||
[cisco_ios.dual]
|
||||
bgp_community = "show bgp all community {target}"
|
||||
bgp_aspath = 'show bgp all quote-regexp "{target}"'
|
||||
[cisco_ios.ipv4]
|
||||
bgp_route = "show bgp ipv4 unicast {target} | exclude pathid:|Epoch"
|
||||
ping = "ping {target} repeat 5 source {src_addr_ipv4}"
|
||||
traceroute = "traceroute {target} timeout 1 probe 2 source {src_addr_ipv4}"
|
||||
ping = "ping {target} repeat 5 source {source}"
|
||||
traceroute = "traceroute {target} timeout 1 probe 2 source {source}"
|
||||
[cisco_ios.ipv6]
|
||||
bgp_route = "show bgp ipv6 unicast {target} | exclude pathid:|Epoch"
|
||||
ping = "ping ipv6 {target} repeat 5 source {src_addr_ipv6}"
|
||||
traceroute = "traceroute ipv6 {target} timeout 1 probe 2 source {src_addr_ipv6}"
|
||||
ping = "ping ipv6 {target} repeat 5 source {source}"
|
||||
traceroute = "traceroute ipv6 {target} timeout 1 probe 2 source {source}"
|
||||
|
||||
# Cisco IOS-XR
|
||||
[[cisco_xr]]
|
||||
[cisco_xr.dual]
|
||||
bgp_community = 'show bgp all unicast community {target} | utility egrep -v "\(BGP |Table |Non-stop\)"'
|
||||
@@ -20,13 +18,12 @@ bgp_aspath = 'show bgp all unicast regexp {target} | utility egrep -v "\(BGP |Ta
|
||||
[cisco_xr.ipv4]
|
||||
bgp_route = 'show bgp ipv4 unicast {target} | util egrep "\(BGP routing table entry|Path \#|aggregated by|Origin |Community:|validity| from \)"'
|
||||
ping = "ping ipv4 {target} count 5 source {src_addr_ipv4}"
|
||||
traceroute = "traceroute ipv4 {target} timeout 1 probe 2 source {src_addr_ipv4}"
|
||||
traceroute = "traceroute ipv4 {target} timeout 1 probe 2 source {source}"
|
||||
[cisco_xr.ipv6]
|
||||
bgp_route = 'show bgp ipv6 unicast {target} | util egrep "\(BGP routing table entry|Path \#|aggregated by|Origin |Community:|validity| from \)"'
|
||||
ping = "ping ipv6 {target} count 5 source {src_addr_ipv6}"
|
||||
traceroute = "traceroute ipv6 {target} timeout 1 probe 2 source {src_addr_ipv6}"
|
||||
traceroute = "traceroute ipv6 {target} timeout 1 probe 2 source {source}"
|
||||
|
||||
# Juniper
|
||||
[[juniper]]
|
||||
[juniper.dual]
|
||||
bgp_community = "show route protocol bgp community {target}"
|
||||
@@ -34,21 +31,8 @@ bgp_aspath = "show route protocol bgp aspath-regex {target}"
|
||||
[juniper.ipv4]
|
||||
bgp_route = "show route protocol bgp table inet.0 {target} detail"
|
||||
ping = "ping inet {target} count 5 source {src_addr_ipv4}"
|
||||
traceroute = "traceroute inet {target} wait 1 source {src_addr_ipv4}"
|
||||
traceroute = "traceroute inet {target} wait 1 source {source}"
|
||||
[juniper.ipv6]
|
||||
bgp_route = "show route protocol bgp table inet6.0 {target} detail"
|
||||
ping = "ping inet6 {target} count 5 source {src_addr_ipv6}"
|
||||
traceroute = "traceroute inet6 {target} wait 1 source {src_addr_ipv6}"
|
||||
|
||||
[[frr]]
|
||||
[frr.dual]
|
||||
bgp_community = "{target}"
|
||||
bgp_aspath = "{target}"
|
||||
[frr.ipv4]
|
||||
bgp_route = "{target}"
|
||||
ping = "{target}"
|
||||
traceroute = "{target}"
|
||||
[frr.ipv6]
|
||||
bgp_route = "{target}"
|
||||
ping = "{target}"
|
||||
traceroute = "{target}"
|
||||
traceroute = "traceroute inet6 {target} wait 1 source {source}"
|
||||
|
@@ -1,5 +1,11 @@
|
||||
# Non-dictionary parameters
|
||||
requires_ipv6_cidr = [
|
||||
"cisco_ios",
|
||||
"cisco_nxos"
|
||||
]
|
||||
|
||||
# General site-wide parameters
|
||||
[[general]]
|
||||
[general]
|
||||
# primary_asn = ""
|
||||
# org_name = ""
|
||||
# google_analytics = ""
|
||||
@@ -11,9 +17,13 @@
|
||||
# enable_max_prefix = ""
|
||||
# max_prefix_length_ipv4 = ""
|
||||
# max_prefix_length_ipv6 = ""
|
||||
[general.bgp_aspath]
|
||||
# mode = "asplain"
|
||||
[general.bgp_aspath.asplain]
|
||||
[general.bgp_aspath.asdot]
|
||||
|
||||
# Branding/Site Customization Parameters
|
||||
[[branding]]
|
||||
# Branding/Visual Customization Parameters
|
||||
[branding]
|
||||
# site_title = ""
|
||||
# title = ""
|
||||
# subtitle = ""
|
||||
|
@@ -1,3 +1,8 @@
|
||||
"""
|
||||
https://github.com/checktheroads/hyperglass
|
||||
Guncorn configuration
|
||||
"""
|
||||
|
||||
import multiprocessing
|
||||
|
||||
command = "/usr/local/bin/gunicorn"
|
||||
@@ -11,7 +16,8 @@ keepalive = 10
|
||||
|
||||
|
||||
def on_starting(server):
|
||||
"""Renders CSS templates at initial code execution with single worker"""
|
||||
import hyperglass
|
||||
|
||||
hyperglass.render.css.renderTemplate()
|
||||
hyperglass.render.css()
|
||||
print(1)
|
||||
|
@@ -1,13 +1,15 @@
|
||||
# https://github.com/checktheroads/hyperglass
|
||||
"""
|
||||
Main Hyperglass Front End
|
||||
"""
|
||||
# Module Imports
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import toml
|
||||
from logzero import logger
|
||||
from flask import Flask, request, Response, jsonify, flash
|
||||
from flask import Flask, request, Response
|
||||
from flask_caching import Cache
|
||||
from flask_limiter import Limiter
|
||||
from flask_limiter.util import get_remote_address
|
||||
from flask_limiter.util import get_ipaddr
|
||||
from prometheus_client import generate_latest, Counter
|
||||
|
||||
# Project Imports
|
||||
import hyperglass.configuration as configuration
|
||||
@@ -19,81 +21,114 @@ app = Flask(__name__, static_url_path="/static")
|
||||
|
||||
# Initialize general configuration parameters for reuse
|
||||
general = configuration.general()
|
||||
codes_reason = configuration.codes_reason()
|
||||
|
||||
# Flask-Limiter Config
|
||||
rate_limit_query = f"{general.rate_limit_query} per minute"
|
||||
rate_limit_site = f"{general.rate_limit_site} per minute"
|
||||
limiter = Limiter(app, key_func=get_remote_address, default_limits=[rate_limit_site])
|
||||
rate_limit_query = f'{general["rate_limit_query"]} per minute'
|
||||
rate_limit_site = f'{general["rate_limit_site"]} per minute'
|
||||
limiter = Limiter(app, key_func=get_ipaddr, default_limits=[rate_limit_site])
|
||||
|
||||
# Flask-Caching Config
|
||||
cache = Cache(
|
||||
app,
|
||||
config={
|
||||
"CACHE_TYPE": "filesystem",
|
||||
"CACHE_DIR": general.cache_directory,
|
||||
"CACHE_DEFAULT_TIMEOUT": general.cache_timeout,
|
||||
"CACHE_DIR": general["cache_directory"],
|
||||
"CACHE_DEFAULT_TIMEOUT": general["cache_timeout"],
|
||||
},
|
||||
)
|
||||
|
||||
# Prometheus Config
|
||||
count_data = Counter(
|
||||
"count_data", "Query Counter", ["source", "type", "loc_id", "target"]
|
||||
)
|
||||
|
||||
count_errors = Counter(
|
||||
"count_errors",
|
||||
"Error Counter",
|
||||
["code", "reason", "source", "type", "loc_id", "target"],
|
||||
)
|
||||
|
||||
count_ratelimit = Counter(
|
||||
"count_ratelimit", "Rate Limit Counter", ["message", "source"]
|
||||
)
|
||||
|
||||
|
||||
@app.route("/metrics")
|
||||
def metrics():
|
||||
CONTENT_TYPE_LATEST = str("text/plain; version=0.0.4; charset=utf-8")
|
||||
return Response(generate_latest(), mimetype=CONTENT_TYPE_LATEST)
|
||||
|
||||
|
||||
@app.errorhandler(429)
|
||||
def error429(e):
|
||||
"""Renders full error page for too many site queries"""
|
||||
html = render.html.renderTemplate("429")
|
||||
html = render.html("429")
|
||||
count_ratelimit.labels(e, get_ipaddr()).inc()
|
||||
logger.error(f"{e}")
|
||||
return html, 429
|
||||
|
||||
|
||||
def error500():
|
||||
"""Renders full error page for generic errors"""
|
||||
html = render.html.renderTemplate("500")
|
||||
@app.errorhandler(500)
|
||||
def general_error():
|
||||
"""General Error Page"""
|
||||
html = render.html("500")
|
||||
return html, 500
|
||||
|
||||
|
||||
def clearCache():
|
||||
def clear_cache():
|
||||
"""Function to clear the Flask-Caching cache"""
|
||||
with app.app_context():
|
||||
try:
|
||||
cache.clear()
|
||||
except:
|
||||
except Exception as error_exception:
|
||||
logger.error(f"Error clearing cache: {error_exception}")
|
||||
raise
|
||||
|
||||
|
||||
@app.route("/", methods=["GET"])
|
||||
@limiter.limit(rate_limit_site)
|
||||
@limiter.limit(rate_limit_site, error_message="Site")
|
||||
def site():
|
||||
"""Main front-end web application"""
|
||||
html = render.html.renderTemplate("index")
|
||||
html = render.html("index")
|
||||
return html
|
||||
|
||||
|
||||
@app.route("/test", methods=["GET"])
|
||||
def testRoute():
|
||||
def test_route():
|
||||
"""Test route for various tests"""
|
||||
html = render.html.renderTemplate("500")
|
||||
html = render.html("500")
|
||||
return html
|
||||
|
||||
|
||||
@app.route("/routers/<asn>", methods=["GET"])
|
||||
def get_routers(asn):
|
||||
"""Flask GET route provides a JSON list of all routers for the selected network/ASN"""
|
||||
nl = configuration.networks_list()
|
||||
nl_json = json.dumps(nl[asn])
|
||||
return nl_json
|
||||
networks_list = configuration.networks_list()
|
||||
networks_list_json = json.dumps(networks_list[asn])
|
||||
return networks_list_json
|
||||
|
||||
|
||||
@app.route("/lg", methods=["POST"])
|
||||
# Invoke Flask-Limiter with configured rate limit
|
||||
@limiter.limit(rate_limit_query)
|
||||
def lg():
|
||||
"""Main backend application initiator. Ingests Ajax POST data from form submit, passes it to the backend application to perform the filtering/lookups"""
|
||||
@limiter.limit(rate_limit_query, error_message="Query")
|
||||
def hyperglass_main():
|
||||
"""Main backend application initiator. Ingests Ajax POST data from form submit, passes it to
|
||||
the backend application to perform the filtering/lookups"""
|
||||
# Get JSON data from Ajax POST
|
||||
lg_data = request.get_json()
|
||||
# Stringify the form response containing serialized JSON for the request, use as key for k/v cache store so each command output value is unique
|
||||
client_addr = request.remote_addr
|
||||
count_data.labels(
|
||||
client_addr, lg_data["cmd"], lg_data["router"], lg_data["ipprefix"]
|
||||
).inc()
|
||||
# Stringify the form response containing serialized JSON for the request, use as key for k/v
|
||||
# cache store so each command output value is unique
|
||||
cache_key = str(lg_data)
|
||||
# Check if cached entry exists
|
||||
if cache.get(cache_key) is None:
|
||||
try:
|
||||
cache_value = execute.execute(lg_data)
|
||||
value_output = cache_value[0]
|
||||
cache_value = execute.Execute(lg_data).response()
|
||||
logger.info(f"Cache Value: {cache_value}")
|
||||
value_code = cache_value[1]
|
||||
value_entry = cache_value[0:2]
|
||||
value_params = cache_value[2:]
|
||||
@@ -101,36 +136,37 @@ def lg():
|
||||
# If it doesn't, create a cache entry
|
||||
cache.set(cache_key, value_entry)
|
||||
logger.info(f"Added cache entry: {value_params}")
|
||||
except:
|
||||
logger.error(f"Unable to add output to cache: {cache_key}")
|
||||
raise
|
||||
# If 200, return output
|
||||
response = cache.get(cache_key)
|
||||
if value_code == 200:
|
||||
try:
|
||||
return Response(response[0], response[1])
|
||||
except:
|
||||
raise
|
||||
# If 400 error, return error message and code
|
||||
# 200 & 400 errors are separated mainly for potential future use
|
||||
elif value_code in [405, 415]:
|
||||
try:
|
||||
# Note: 200 & 400 errors are separated mainly for potential future use
|
||||
if value_code in [405, 415]:
|
||||
count_errors.labels(
|
||||
response[1],
|
||||
codes_reason[response[1]],
|
||||
client_addr,
|
||||
lg_data["cmd"],
|
||||
lg_data["router"],
|
||||
lg_data["ipprefix"],
|
||||
).inc()
|
||||
return Response(response[0], response[1])
|
||||
if value_code == 500:
|
||||
count_errors.labels(
|
||||
response[1],
|
||||
codes_reason[response[1]],
|
||||
client_addr,
|
||||
lg_data["cmd"],
|
||||
lg_data["router"],
|
||||
lg_data["ipprefix"],
|
||||
).inc()
|
||||
return Response(general["msg_error_general"], 500)
|
||||
except:
|
||||
raise
|
||||
elif value_code in [500]:
|
||||
try:
|
||||
return Response(error500(), value_code)
|
||||
except:
|
||||
logger.error(f"Unable to add output to cache: {cache_key}")
|
||||
raise
|
||||
# If it does, return the cached entry
|
||||
else:
|
||||
logger.info(f"Cache match for: {cache_key}, returning cached entry...")
|
||||
response = cache.get(cache_key)
|
||||
try:
|
||||
return Response(response[0], response[1])
|
||||
except:
|
||||
raise
|
||||
# Upon exception, render generic error
|
||||
logger.error(f"Error returning cached entry for: {cache_key}")
|
||||
return Response(error500())
|
||||
|
@@ -1,8 +1,15 @@
|
||||
# Module Imports
|
||||
# https://github.com/checktheroads/hyperglass
|
||||
"""
|
||||
Renders Jinja2 & Sass templates for use by the front end application
|
||||
"""
|
||||
# Standard Imports
|
||||
import os
|
||||
import sass
|
||||
import jinja2
|
||||
import subprocess
|
||||
|
||||
# Module Imports
|
||||
import sass
|
||||
import toml
|
||||
import jinja2
|
||||
from logzero import logger
|
||||
from markdown2 import Markdown
|
||||
from flask import render_template
|
||||
@@ -12,9 +19,9 @@ import hyperglass
|
||||
from hyperglass import configuration
|
||||
|
||||
# Module Directories
|
||||
dir = os.path.dirname(os.path.abspath(__file__))
|
||||
working_directory = os.path.dirname(os.path.abspath(__file__))
|
||||
hyperglass_root = os.path.dirname(hyperglass.__file__)
|
||||
file_loader = jinja2.FileSystemLoader(dir)
|
||||
file_loader = jinja2.FileSystemLoader(working_directory)
|
||||
env = jinja2.Environment(loader=file_loader)
|
||||
|
||||
# Configuration Imports
|
||||
@@ -22,20 +29,91 @@ branding = configuration.branding()
|
||||
general = configuration.general()
|
||||
networks = configuration.networks()
|
||||
|
||||
defaults = {
|
||||
"footer": """
|
||||
+++
|
||||
+++
|
||||
By using {{ site_title }}, you agree to be bound by the following terms of use: All queries \
|
||||
executed on this page are logged for analysis and troubleshooting. Users are prohibited from \
|
||||
automating queries, or attempting to process queries in bulk. This service is provided on a best \
|
||||
effort basis, and {{ org_name }} makes no availability or performance warranties or guarantees \
|
||||
whatsoever.
|
||||
""",
|
||||
"bgp_aspath": r"""
|
||||
+++
|
||||
title = "Supported AS Path Patterns"
|
||||
+++
|
||||
{{ site_title }} accepts the following `AS_PATH` regular expression patterns:
|
||||
|
||||
class html:
|
||||
"""Performs HTML rendering actions"""
|
||||
| Expression | Match |
|
||||
| :----------------------- | ----------------------------------------------------: |
|
||||
| `_65000$` | Originated by AS65000 |
|
||||
| `^65000\_` | Received from AS65000 |
|
||||
| `_65000_` | Via AS65000 |
|
||||
| `_65000_65001_` | Via AS65000 and AS65001 |
|
||||
| `_65000(_.+_)65001$` | Anything from AS65001 that passed through AS65000 |
|
||||
""",
|
||||
"bgp_community": """
|
||||
+++
|
||||
title = "BGP Communities"
|
||||
+++
|
||||
{{ site_title }} makes use of the following BGP communities:
|
||||
|
||||
def renderTemplate(t):
|
||||
"""Renders Jinja2 HTML templates"""
|
||||
| Community | Description |
|
||||
| :-------- | :---------- |
|
||||
| `65000:1` | Example 1 |
|
||||
| `65000:2` | Example 2 |
|
||||
| `65000:3` | Example 3 |
|
||||
""",
|
||||
}
|
||||
|
||||
# Convert templates/footer.md from Markdown to HTML
|
||||
md = Markdown()
|
||||
footer_template = env.get_template("templates/footer.md")
|
||||
footer_jinja = footer_template.render(
|
||||
site_title=branding.site_title, org_name=general.org_name
|
||||
|
||||
def content(file_name):
|
||||
"""Converts Markdown documents to HTML, renders Jinja2 variables, renders TOML frontmatter \
|
||||
variables, returns dictionary of variables and HTML content"""
|
||||
html_classes = {"table": "table"}
|
||||
markdown = Markdown(
|
||||
extras={
|
||||
"break-on-newline": True,
|
||||
"code-friendly": True,
|
||||
"tables": True,
|
||||
"html-classes": html_classes,
|
||||
}
|
||||
)
|
||||
# Render template based on input template name
|
||||
delim = "+++"
|
||||
file = os.path.join(working_directory, f"templates/content/{file_name}.md")
|
||||
frontmatter_dict = None
|
||||
if os.path.exists(file):
|
||||
with open(file, "r") as file_raw:
|
||||
file_read = file_raw.read()
|
||||
_, frontmatter, content_md = file_read.split(delim)
|
||||
frontmatter_dict = {file_name: toml.loads(frontmatter)}
|
||||
content_md_template = jinja2.Environment(loader=jinja2.BaseLoader).from_string(
|
||||
content_md
|
||||
)
|
||||
else:
|
||||
content_read = defaults[file_name]
|
||||
_, frontmatter, content_md = content_read.split(delim)
|
||||
frontmatter_dict = {file_name: toml.loads(frontmatter)}
|
||||
content_md_template = jinja2.Environment(loader=jinja2.BaseLoader).from_string(
|
||||
content_md
|
||||
)
|
||||
content_rendered = content_md_template.render(
|
||||
**general, **branding, **frontmatter_dict
|
||||
)
|
||||
content_html = markdown.convert(content_rendered)
|
||||
frontmatter_dict[file_name]["content"] = content_html
|
||||
return frontmatter_dict
|
||||
|
||||
|
||||
def html(t):
|
||||
"""Renders Jinja2 HTML templates"""
|
||||
content_name_list = ["footer", "bgp_aspath", "bgp_community"]
|
||||
content_dict = {}
|
||||
for content_name in content_name_list:
|
||||
# content_file = os.path.join(working_directory, f"templates/content/{c}.md")
|
||||
content_data = content(content_name)
|
||||
content_dict.update(content_data)
|
||||
if t == "index":
|
||||
template = env.get_template("templates/index.html")
|
||||
elif t == "429":
|
||||
@@ -43,92 +121,30 @@ class html:
|
||||
elif t == "500":
|
||||
template = env.get_template("templates/500.html")
|
||||
return template.render(
|
||||
# General
|
||||
primary_asn=general.primary_asn,
|
||||
org_name=general.org_name,
|
||||
google_analytics=general.google_analytics,
|
||||
enable_bgp_route=general.enable_bgp_route,
|
||||
enable_bgp_community=general.enable_bgp_community,
|
||||
enable_bgp_aspath=general.enable_bgp_aspath,
|
||||
enable_ping=general.enable_ping,
|
||||
enable_traceroute=general.enable_traceroute,
|
||||
cache_timeout=general.cache_timeout,
|
||||
message_rate_limit_query=general.message_rate_limit_query,
|
||||
# Branding
|
||||
site_title=branding.site_title,
|
||||
title=branding.title,
|
||||
subtitle=branding.subtitle,
|
||||
title_mode=branding.title_mode,
|
||||
color_bg=branding.color_bg,
|
||||
color_danger=branding.color_danger,
|
||||
color_btn_submit=branding.color_btn_submit,
|
||||
color_progressbar=branding.color_progressbar,
|
||||
color_tag_loctitle=branding.color_tag_loctitle,
|
||||
color_tag_cmdtitle=branding.color_tag_cmdtitle,
|
||||
color_tag_cmd=branding.color_tag_cmd,
|
||||
color_tag_loc=branding.color_tag_loc,
|
||||
enable_credit=branding.enable_credit,
|
||||
enable_footer=branding.enable_footer,
|
||||
footer_content=md.convert(footer_jinja),
|
||||
logo_path=branding.logo_path,
|
||||
logo_width=branding.logo_width,
|
||||
favicon_dir=branding.favicon_dir,
|
||||
placeholder_prefix=branding.placeholder_prefix,
|
||||
show_peeringdb=branding.show_peeringdb,
|
||||
text_results=branding.text_results,
|
||||
text_location=branding.text_location,
|
||||
text_cache=branding.text_cache,
|
||||
text_500_title=branding.text_500_title,
|
||||
text_500_subtitle=branding.text_500_subtitle,
|
||||
text_500_button=branding.text_500_button,
|
||||
text_help_bgp_route=branding.text_help_bgp_route,
|
||||
text_help_bgp_community=branding.text_help_bgp_community,
|
||||
text_help_bgp_aspath=branding.text_help_bgp_aspath,
|
||||
text_help_ping=branding.text_help_ping,
|
||||
text_help_traceroute=branding.text_help_traceroute,
|
||||
text_limiter_title=branding.text_limiter_title,
|
||||
text_limiter_subtitle=branding.text_limiter_subtitle,
|
||||
# Devices
|
||||
device_networks=configuration.networks(),
|
||||
**general, **branding, **content_dict, device_networks=networks
|
||||
)
|
||||
|
||||
|
||||
class css:
|
||||
"""Performs CSS/Sass rendering actions"""
|
||||
|
||||
def renderTemplate():
|
||||
def css():
|
||||
"""Renders Jinja2 template to Sass file, then compiles Sass as CSS"""
|
||||
|
||||
scss_file = os.path.join(hyperglass_root, "static/sass/hyperglass.scss")
|
||||
css_file = os.path.join(hyperglass_root, "static/css/hyperglass.css")
|
||||
# Renders Jinja2 template as Sass file
|
||||
try:
|
||||
template = env.get_template("templates/hyperglass.scss")
|
||||
rendered_output = template.render(
|
||||
color_btn_submit=branding.color_btn_submit,
|
||||
color_progressbar=branding.color_progressbar,
|
||||
color_tag_loctitle=branding.color_tag_loctitle,
|
||||
color_tag_cmdtitle=branding.color_tag_cmdtitle,
|
||||
color_tag_cmd=branding.color_tag_cmd,
|
||||
color_tag_loc=branding.color_tag_loc,
|
||||
color_bg=branding.color_bg,
|
||||
color_danger=branding.color_danger,
|
||||
primary_font_url=branding.primary_font_url,
|
||||
primary_font_name=branding.primary_font_name,
|
||||
mono_font_url=branding.mono_font_url,
|
||||
mono_font_name=branding.mono_font_name,
|
||||
)
|
||||
template_file = "templates/hyperglass.scss"
|
||||
template = env.get_template(template_file)
|
||||
rendered_output = template.render(**branding)
|
||||
with open(scss_file, "w") as scss_output:
|
||||
scss_output.write(rendered_output)
|
||||
except:
|
||||
logger.error("Error rendering Jinja2 template.")
|
||||
raise TypeError("Error rendering Jinja2 template.")
|
||||
logger.error(f"Error rendering Jinja2 template {template_file}.")
|
||||
raise
|
||||
# Compiles Sass to CSS
|
||||
try:
|
||||
generated_sass = sass.compile(filename=scss_file)
|
||||
with open(css_file, "w") as css_output:
|
||||
css_output.write(generated_sass)
|
||||
logger.info("Rendered Sass templates to CSS files.")
|
||||
logger.info(f"Compiled Sass file {scss_file} to CSS file {css_file}.")
|
||||
except:
|
||||
logger.error("Error rendering Sass template.")
|
||||
logger.error(f"Error compiling Sass in file {scss_file}.")
|
||||
raise
|
||||
|
@@ -37,6 +37,9 @@
|
||||
{% if enable_footer == true %}
|
||||
{% include "templates/footer.html" %}
|
||||
{% endif %}
|
||||
{% if enable_credit == true %}
|
||||
{% include "templates/credit.html" %}
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
</body>
|
||||
</html>
|
||||
|
@@ -37,6 +37,9 @@
|
||||
{% if enable_footer == true %}
|
||||
{% include "templates/footer.html" %}
|
||||
{% endif %}
|
||||
{% if enable_credit == true %}
|
||||
{% include "templates/credit.html" %}
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
</body>
|
||||
</html>
|
||||
|
2
hyperglass/render/templates/bgp_aspath.html
Normal file
2
hyperglass/render/templates/bgp_aspath.html
Normal file
@@ -0,0 +1,2 @@
|
||||
<p class="title">{{ bgp_aspath["title"] }}</p>
|
||||
{{ bgp_aspath["content"] }}
|
2
hyperglass/render/templates/bgp_community.html
Normal file
2
hyperglass/render/templates/bgp_community.html
Normal file
@@ -0,0 +1,2 @@
|
||||
<p class="title">{{ bgp_community["title"] }}</p>
|
||||
{{ bgp_community["content"] }}
|
1
hyperglass/render/templates/content/.gitignore
vendored
Normal file
1
hyperglass/render/templates/content/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
*.md
|
12
hyperglass/render/templates/content/bgp_aspath.md.example
Normal file
12
hyperglass/render/templates/content/bgp_aspath.md.example
Normal file
@@ -0,0 +1,12 @@
|
||||
+++
|
||||
title = "Supported AS Path Patterns"
|
||||
+++
|
||||
{{ site_title }} accepts the following `AS_PATH` regular expression patterns:
|
||||
|
||||
| Expression | Match |
|
||||
| :----------------------- | ----------------------------------------------------: |
|
||||
| `_65000$` | Originated by AS65000 |
|
||||
| `^65000\_` | Received from AS65000 |
|
||||
| `_65000_` | Via AS65000 |
|
||||
| `_65000_65001_` | Via AS65000 and AS65001 |
|
||||
| `_65000(_.+_)65001$` | Anything from AS65001 that passed through AS65000 |
|
10
hyperglass/render/templates/content/bgp_community.md.example
Normal file
10
hyperglass/render/templates/content/bgp_community.md.example
Normal file
@@ -0,0 +1,10 @@
|
||||
+++
|
||||
title = "BGP Communities"
|
||||
+++
|
||||
{{ site_title }} makes use of the following BGP communities:
|
||||
|
||||
| Community | Description |
|
||||
| :-------- | :---------- |
|
||||
| `65000:1` | Example 1 |
|
||||
| `65000:2` | Example 2 |
|
||||
| `65000:3` | Example 3 |
|
@@ -1 +1,3 @@
|
||||
+++
|
||||
+++
|
||||
By using {{ site_title }}, you agree to be bound by the following terms of use: All queries executed on this page are logged for analysis and troubleshooting. Users are prohibited from automating queries, or attempting to process queries in bulk. This service is provided on a best effort basis, and {{ org_name }} makes no availability or performance warranties or guarantees whatsoever.
|
@@ -1 +1,3 @@
|
||||
<div class="content is-small has-text-centered">
|
||||
<p>Powered by <a href="https://github.com/checktheroads/hyperglass">Hyperglass</a>. Source code licensed <a href="https://github.com/checktheroads/hyperglass/blob/master/LICENSE">BSD 3-Clause Clear.</a></p>
|
||||
</div>
|
||||
|
@@ -1,12 +1,7 @@
|
||||
<footer class="footer">
|
||||
<div class="container">
|
||||
<div class="content is-small has-text-centered">
|
||||
{{ footer_content }}
|
||||
{{ footer["content"] }}
|
||||
</div>
|
||||
{% if enable_credit == true %}
|
||||
<div class="content is-small has-text-centered">
|
||||
{% include "templates/credit.html" %}
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
</footer>
|
||||
|
@@ -6,11 +6,7 @@
|
||||
|
||||
<body>
|
||||
{% block content %}
|
||||
{% if modal_active %}
|
||||
<div class="modal is-active" id="ratelimit">
|
||||
{% else %}
|
||||
<div class="modal" id="ratelimit">
|
||||
{% endif %}
|
||||
<div class="modal-background"></div>
|
||||
<div class="modal-content">
|
||||
<article class="message is-danger">
|
||||
@@ -27,6 +23,28 @@
|
||||
</article>
|
||||
</div>
|
||||
</div>
|
||||
{% if enable_bgp_aspath == true %}
|
||||
<div class="modal" id="help_bgp_aspath">
|
||||
<div class="modal-background" onclick="closeModal()"></div>
|
||||
<div class="modal-content is-clipped">
|
||||
<div class="box">
|
||||
{% include "templates/bgp_aspath.html" %}
|
||||
</div>
|
||||
</div>
|
||||
<button class="modal-close is-large" aria-label="close" onclick="closeModal()"></button>
|
||||
</div>
|
||||
{% endif %}
|
||||
{% if enable_bgp_community == true %}
|
||||
<div class="modal" id="help_bgp_community">
|
||||
<div class="modal-background" onclick="closeModal()"></div>
|
||||
<div class="modal-content">
|
||||
<div class="box">
|
||||
{% include "templates/bgp_community.html" %}
|
||||
</div>
|
||||
</div>
|
||||
<button class="modal-close is-large" aria-label="close" onclick="closeModal()"></button>
|
||||
</div>
|
||||
{% endif %}
|
||||
<nav class="navbar">
|
||||
<div class="container is-fluid">
|
||||
<div class="navbar-brand">
|
||||
@@ -231,8 +249,10 @@
|
||||
{% if enable_footer == true %}
|
||||
{% include "templates/footer.html" %}
|
||||
{% endif %}
|
||||
{% if enable_credit == true %}
|
||||
{% include "templates/credit.html" %}
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
</div>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
|
@@ -2,9 +2,10 @@
|
||||
|
||||
var progress = ($('#progress'));
|
||||
var resultsbox = ($('#resultsbox'));
|
||||
resultsbox.hide();
|
||||
progress.hide();
|
||||
var ipprefix_error = ($('#ipprefix_error'));
|
||||
var ipprefix_input = ($('#ipprefix'));
|
||||
adjustDropdowns();
|
||||
clearPage();
|
||||
|
||||
// Bulma Toggable Dropdown - help text
|
||||
let dropdown = document.querySelector('#help-dropdown');
|
||||
@@ -13,6 +14,18 @@ dropdown.addEventListener('click', function(event) {
|
||||
dropdown.classList.toggle('is-active');
|
||||
});
|
||||
|
||||
function bgpHelpASPath() {
|
||||
$("#help_bgp_aspath").addClass("is-active");
|
||||
}
|
||||
|
||||
function bgpHelpCommunity() {
|
||||
$("#help_bgp_community").addClass("is-active");
|
||||
}
|
||||
|
||||
function closeModal() {
|
||||
$(".modal").removeClass("is-active");
|
||||
}
|
||||
|
||||
// Adjust behavior of help text dropdown based on device screen size
|
||||
function adjustHeight() {
|
||||
var actual_width = window.innerWidth;
|
||||
@@ -31,6 +44,34 @@ function adjustDropdowns() {
|
||||
}
|
||||
}
|
||||
|
||||
function clearErrors() {
|
||||
progress.hide();
|
||||
ipprefix_error.hide();
|
||||
if (ipprefix_input.hasClass("is-warning")) {
|
||||
ipprefix_input.removeClass("is-warning");
|
||||
};
|
||||
if (ipprefix_input.hasClass("is-danger")) {
|
||||
ipprefix_input.removeClass("is-danger");
|
||||
};
|
||||
}
|
||||
|
||||
function clearPage() {
|
||||
progress.hide();
|
||||
resultsbox.hide();
|
||||
ipprefix_error.hide();
|
||||
if (ipprefix_input.hasClass("is-warning")) {
|
||||
ipprefix_input.removeClass("is-warning");
|
||||
};
|
||||
if (ipprefix_input.hasClass("is-danger")) {
|
||||
ipprefix_input.removeClass("is-danger");
|
||||
};
|
||||
}
|
||||
|
||||
function prepResults() {
|
||||
progress.show();
|
||||
resultsbox.show();
|
||||
}
|
||||
|
||||
$(document).ready(function() {
|
||||
var defaultasn = $("#network").val();
|
||||
$.ajax({
|
||||
@@ -55,8 +96,8 @@ $('#network').on('change', () => {
|
||||
url: `/routers/${asn}`,
|
||||
type: 'get',
|
||||
success: function(data) {
|
||||
cleanPage();
|
||||
updateRouters(JSON.parse(data));
|
||||
|
||||
},
|
||||
error: function(err) {
|
||||
console.log(err)
|
||||
@@ -75,9 +116,10 @@ $('#lgForm').on('submit', function() {
|
||||
submitForm();
|
||||
});
|
||||
|
||||
|
||||
var submitForm = function() {
|
||||
progress.hide();
|
||||
function submitForm() {
|
||||
clearErrors();
|
||||
// progress.hide();
|
||||
// ipprefix_error.hide();
|
||||
var cmd = $('#cmd option:selected').val();
|
||||
var cmdtitle = $('#cmd option:selected').text();
|
||||
var network = $('#network option:selected').val();
|
||||
@@ -85,9 +127,8 @@ var submitForm = function() {
|
||||
var routername = $('#router option:selected').text();
|
||||
var ipprefix = $('#ipprefix').val();
|
||||
|
||||
$('#output').text("")
|
||||
$('#queryInfo').text("")
|
||||
|
||||
$('#output').text("");
|
||||
$('#queryInfo').text("");
|
||||
$('#queryInfo').html(`
|
||||
<div class="field is-grouped is-grouped-multiline">
|
||||
<div class="control">
|
||||
@@ -103,8 +144,7 @@ var submitForm = function() {
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
`)
|
||||
|
||||
`);
|
||||
|
||||
$.ajax({
|
||||
url: `/lg`,
|
||||
@@ -116,18 +156,17 @@ var submitForm = function() {
|
||||
}),
|
||||
contentType: "application/json; charset=utf-8",
|
||||
context: document.body,
|
||||
readyState: resultsbox.show() && progress.show(),
|
||||
readyState: prepResults(),
|
||||
statusCode: {
|
||||
200: function(response, code) {
|
||||
progress.hide();
|
||||
$('#output').html(`<br><div class="content"><p class="query-output" id="output">${response}</p></div>`);
|
||||
},
|
||||
405: function(response, code) {
|
||||
resultsbox.hide()
|
||||
progress.hide();
|
||||
$('#ipprefix_error').show()
|
||||
$('#ipprefix').addClass('is-warning');
|
||||
$('#ipprefix_error').html(`
|
||||
clearPage();
|
||||
ipprefix_error.show()
|
||||
ipprefix_input.addClass('is-warning');
|
||||
ipprefix_error.html(`
|
||||
<br>
|
||||
<article class="message is-warning is-small" style="display: block;">
|
||||
<div class="message-header" style="display: block;">
|
||||
@@ -140,11 +179,10 @@ var submitForm = function() {
|
||||
`);
|
||||
},
|
||||
415: function(response, code) {
|
||||
resultsbox.hide()
|
||||
progress.hide();
|
||||
$('#ipprefix_error').show()
|
||||
$('#ipprefix').addClass('is-danger');
|
||||
$('#ipprefix_error').html(`
|
||||
clearPage();
|
||||
ipprefix_error.show()
|
||||
ipprefix_input.addClass('is-danger');
|
||||
ipprefix_error.html(`
|
||||
<br>
|
||||
<article class="message is-danger is-small" style="display: block;">
|
||||
<div class="message-header" style="display: block;">
|
||||
@@ -157,7 +195,7 @@ var submitForm = function() {
|
||||
`);
|
||||
},
|
||||
429: function(response, code) {
|
||||
progress.hide();
|
||||
clearPage();
|
||||
$("#ratelimit").addClass("is-active");
|
||||
}
|
||||
}
|
||||
|
@@ -29,6 +29,14 @@ body
|
||||
p, a
|
||||
color: findColorInvert($body-background-color)
|
||||
|
||||
.has-background-danger .content
|
||||
p, a
|
||||
color: findColorInvert($danger)
|
||||
|
||||
.content
|
||||
p, a
|
||||
color: findColorInvert($body-background-color)
|
||||
|
||||
.navbar, .navbar-menu, .navbar-brand, .navbar-tabs
|
||||
background-color: $body-background-color
|
||||
|
||||
|
@@ -1,3 +1,7 @@
|
||||
"""
|
||||
https://github.com/checktheroads/hyperglass
|
||||
Gunicorn WSGI Target
|
||||
"""
|
||||
import os
|
||||
import hyperglass.hyperglass
|
||||
|
||||
|
28
manage.py
28
manage.py
@@ -1,20 +1,22 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Module Imports
|
||||
# Standard Imports
|
||||
import os
|
||||
import grp
|
||||
import pwd
|
||||
import sys
|
||||
import glob
|
||||
import click
|
||||
import random
|
||||
import shutil
|
||||
import string
|
||||
|
||||
# Module Imports
|
||||
import click
|
||||
from passlib.hash import pbkdf2_sha256
|
||||
|
||||
# Project Imports
|
||||
from hyperglass import hyperglass
|
||||
from hyperglass import render as render
|
||||
from hyperglass import render
|
||||
|
||||
# Initialize shutil copy function
|
||||
cp = shutil.copyfile
|
||||
@@ -29,7 +31,7 @@ def hg():
|
||||
def clearcache():
|
||||
"""Clears the Flask-Caching cache"""
|
||||
try:
|
||||
hyperglass.clearCache()
|
||||
hyperglass.clear_cache()
|
||||
click.secho("✓ Successfully cleared cache.", fg="green", bold=True)
|
||||
except:
|
||||
click.secho("✗ Failed to clear cache.", fg="red", bold=True)
|
||||
@@ -54,10 +56,11 @@ Use this hash as the password for the device using the API module. For example,
|
||||
|
||||
|
||||
@hg.command()
|
||||
def testserver():
|
||||
def devserver():
|
||||
"""Starts Flask development server for testing without WSGI/Reverse Proxy"""
|
||||
try:
|
||||
hyperglass.render.css.renderTemplate()
|
||||
hyperglass.render.css()
|
||||
# hyperglass.metrics.start_http_server(9100)
|
||||
hyperglass.app.run(host="0.0.0.0", debug=True, port=5000)
|
||||
click.secho("✓ Started test server.", fg="green", bold=True)
|
||||
except:
|
||||
@@ -69,16 +72,21 @@ def testserver():
|
||||
def render():
|
||||
"""Renders Jinja2 and Sass templates to HTML & CSS files"""
|
||||
try:
|
||||
hyperglass.render.css.renderTemplate()
|
||||
hyperglass.render.css()
|
||||
click.secho("✓ Successfully rendered CSS templates.", fg="green", bold=True)
|
||||
except:
|
||||
click.secho("✗ Failed to render CSS templates.", fg="red", bold=True)
|
||||
raise
|
||||
|
||||
|
||||
@hg.command()
|
||||
def content():
|
||||
"""Renders Jinja2 and Sass templates to HTML & CSS files"""
|
||||
try:
|
||||
hyperglass.render.html.renderTemplate("index")
|
||||
click.secho("✓ Successfully rendered HTML templates.", fg="green", bold=True)
|
||||
hyperglass.render.markdown()
|
||||
click.secho("✓ Successfully rendered content templates.", fg="green", bold=True)
|
||||
except:
|
||||
click.secho("✗ Failed to render HTML templates.", fg="red", bold=True)
|
||||
click.secho("✗ Failed to render content templates.", fg="red", bold=True)
|
||||
raise
|
||||
|
||||
|
||||
|
Reference in New Issue
Block a user