1
0
mirror of https://github.com/checktheroads/hyperglass synced 2024-05-11 05:55:08 +00:00

Async all the things

Flask → Sanic, Requests → HTTP3, Add SSHTunnel for SSH Proxying, Remove Gunicorn dependency
This commit is contained in:
Matt Love
2019-07-15 02:30:42 -07:00
parent c53a8ce373
commit b7747cf1df
15 changed files with 535 additions and 587 deletions

View File

@@ -22,8 +22,9 @@ class Construct:
input parameters.
"""
def __init__(self, device):
def __init__(self, device, transport):
self.device = device
self.transport = transport
def get_src(self, ver):
"""
@@ -48,15 +49,17 @@ class Construct:
cmd_path = f"{nos}.{afi}.{query_type}"
return operator.attrgetter(cmd_path)(commands)
def ping(self, transport, target):
def ping(self, target):
"""Constructs ping query parameters from pre-validated input"""
query_type = "ping"
logger.debug(f"Constructing {query_type} query for {target} via {transport}...")
logger.debug(
f"Constructing {query_type} query for {target} via {self.transport}..."
)
query = None
ip_version = ipaddress.ip_network(target).version
afi = f"ipv{ip_version}"
source = self.get_src(ip_version)
if transport == "rest":
if self.transport == "rest":
query = json.dumps(
{
"query_type": query_type,
@@ -65,24 +68,25 @@ class Construct:
"target": target,
}
)
elif transport == "scrape":
elif self.transport == "scrape":
conf_command = self.device_commands(self.device.nos, afi, query_type)
fmt_command = conf_command.format(target=target, source=source)
query = (self.device.address.exploded, self.device.nos, fmt_command)
query = conf_command.format(target=target, source=source)
logger.debug(f"Constructed query: {query}")
return query
def traceroute(self, transport, target):
def traceroute(self, target):
"""
Constructs traceroute query parameters from pre-validated input.
"""
query_type = "traceroute"
logger.debug(f"Constructing {query_type} query for {target} via {transport}...")
logger.debug(
f"Constructing {query_type} query for {target} via {self.transport}..."
)
query = None
ip_version = ipaddress.ip_network(target).version
afi = f"ipv{ip_version}"
source = self.get_src(ip_version)
if transport == "rest":
if self.transport == "rest":
query = json.dumps(
{
"query_type": query_type,
@@ -92,62 +96,64 @@ class Construct:
}
)
elif transport == "scrape":
elif self.transport == "scrape":
conf_command = self.device_commands(self.device.nos, afi, query_type)
fmt_command = conf_command.format(target=target, source=source)
query = (self.device.address.exploded, self.device.nos, fmt_command)
query = conf_command.format(target=target, source=source)
logger.debug(f"Constructed query: {query}")
return query
def bgp_route(self, transport, target):
def bgp_route(self, target):
"""
Constructs bgp_route query parameters from pre-validated input.
"""
query_type = "bgp_route"
logger.debug(f"Constructing {query_type} query for {target} via {transport}...")
logger.debug(
f"Constructing {query_type} query for {target} via {self.transport}..."
)
query = None
ip_version = ipaddress.ip_network(target).version
afi = f"ipv{ip_version}"
if transport == "rest":
if self.transport == "rest":
query = json.dumps({"query_type": query_type, "afi": afi, "target": target})
if transport == "scrape":
elif self.transport == "scrape":
conf_command = self.device_commands(self.device.nos, afi, query_type)
fmt_command = conf_command.format(target=target)
query = (self.device.address.exploded, self.device.nos, fmt_command)
query = conf_command.format(target=target)
logger.debug(f"Constructed query: {query}")
return query
def bgp_community(self, transport, target):
def bgp_community(self, target):
"""
Constructs bgp_community query parameters from pre-validated
input.
"""
query_type = "bgp_community"
logger.debug(f"Constructing {query_type} query for {target} via {transport}...")
logger.debug(
f"Constructing {query_type} query for {target} via {self.transport}..."
)
afi = "dual"
query = None
if transport == "rest":
if self.transport == "rest":
query = json.dumps({"query_type": query_type, "afi": afi, "target": target})
if transport == "scrape":
elif self.transport == "scrape":
conf_command = self.device_commands(self.device.nos, afi, query_type)
fmt_command = conf_command.format(target=target)
query = (self.device.address.exploded, self.device.nos, fmt_command)
query = conf_command.format(target=target)
logger.debug(f"Constructed query: {query}")
return query
def bgp_aspath(self, transport, target):
def bgp_aspath(self, target):
"""
Constructs bgp_aspath query parameters from pre-validated input.
"""
query_type = "bgp_aspath"
logger.debug(f"Constructing {query_type} query for {target} via {transport}...")
logger.debug(
f"Constructing {query_type} query for {target} via {self.transport}..."
)
afi = "dual"
query = None
if transport == "rest":
if self.transport == "rest":
query = json.dumps({"query_type": query_type, "afi": afi, "target": target})
if transport == "scrape":
elif self.transport == "scrape":
conf_command = self.device_commands(self.device.nos, afi, query_type)
fmt_command = conf_command.format(target=target)
query = (self.device.address.exploded, self.device.nos, fmt_command)
query = conf_command.format(target=target)
logger.debug(f"Constructed query: {query}")
return query

View File

@@ -4,21 +4,15 @@ returns errors if input is invalid. Passes validated parameters to
construct.py, which is used to build & run the Netmiko connectoins or
hyperglass-frr API calls, returns the output back to the front end.
"""
# Standard Library Imports
import json
import time
import asyncio
# Third Party Imports
import http3
import http3.exceptions
import sshtunnel
from logzero import logger
from netmiko import ConnectHandler
from netmiko import NetMikoAuthenticationException
from netmiko import NetmikoAuthError
from netmiko import NetmikoTimeoutError
from netmiko import NetMikoTimeoutException
from netmiko import redispatch
# Project Imports
from hyperglass.command.construct import Construct
@@ -30,47 +24,144 @@ from hyperglass.configuration import params
from hyperglass.configuration import proxies
from hyperglass.constants import Supported
from hyperglass.constants import code
from hyperglass.constants import protocol_map
from hyperglass.exceptions import CantConnect
class Rest:
"""Executes connections to REST API devices"""
class Connect:
"""
Parent class for all connection types:
def __init__(self, transport, device, query_type, target):
self.transport = transport
self.device = device
scrape() connects to devices via SSH for "screen scraping"
rest() connects to devices via HTTP for RESTful API communication
"""
def __init__(self, device_config, query_type, target, transport):
self.device_config = device_config
self.query_type = query_type
self.target = target
self.cred = getattr(credentials, self.device.credential)
self.query = getattr(Construct(self.device), self.query_type)(
self.transport, self.target
)
async def frr(self):
"""Sends HTTP POST to router running the hyperglass-frr API"""
logger.debug(f"FRR host params:\n{self.device}")
logger.debug(f"Query parameters: {self.query}")
self.transport = transport
self.cred = getattr(credentials, device_config.credential)
self.query = getattr(Construct(device_config, transport), query_type)(target)
async def scrape(self):
"""
Connects to the router via Netmiko library, return the command
output. If an SSH proxy is enabled, creates an SSH tunnel via
the sshtunnel library, and netmiko uses the local binding to
connect to the remote device.
"""
response = None
try:
headers = {
"Content-Type": "application/json",
"X-API-Key": self.cred.password.get_secret_value(),
}
frr_endpoint = (
f"http://{self.device.address.exploded}:{self.device.port}/frr"
)
logger.debug(f"HTTP Headers: {headers}")
logger.debug(f"FRR endpoint: {frr_endpoint}")
if self.device_config.proxy:
device_proxy = getattr(proxies, self.device_config.proxy)
logger.debug(
f"Proxy: {device_proxy.address.compressed}:{device_proxy.port}"
)
logger.debug(
"Connecting to {dev} via sshtunnel library...".format(
dev=self.device_config.proxy
)
)
with sshtunnel.open_tunnel(
device_proxy.address.compressed,
device_proxy.port,
ssh_username=device_proxy.username,
ssh_password=device_proxy.password.get_secret_value(),
remote_bind_address=(
self.device_config.address.compressed,
self.device_config.port,
),
local_bind_address=("localhost", 0),
) as tunnel:
logger.debug(f"Established tunnel with {self.device_config.proxy}")
scrape_host = {
"host": "localhost",
"port": tunnel.local_bind_port,
"device_type": self.device_config.nos,
"username": self.cred.username,
"password": self.cred.password.get_secret_value(),
"fast_cli": True,
}
logger.debug(f"Local binding: localhost:{tunnel.local_bind_port}")
try:
logger.debug(
"Connecting to {dev} via Netmiko library...".format(
dev=self.device_config.location
)
)
nm_connect_direct = ConnectHandler(**scrape_host)
response = nm_connect_direct.send_command(self.query)
except (
NetMikoAuthenticationException,
NetMikoTimeoutException,
NetmikoAuthError,
NetmikoTimeoutError,
sshtunnel.BaseSSHTunnelForwarderError,
) as scrape_error:
raise CantConnect(scrape_error)
else:
scrape_host = {
"host": self.device_config.address,
"device_type": self.device_config.nos,
"username": self.cred.username,
"password": self.cred.password.get_secret_value(),
"fast_cli": True,
}
try:
logger.debug(
"Connecting to {dev} via Netmiko library...".format(
dev=self.device_config.location
)
)
nm_connect_direct = ConnectHandler(**scrape_host)
response = nm_connect_direct.send_command(self.query)
except (
NetMikoAuthenticationException,
NetMikoTimeoutException,
NetmikoAuthError,
NetmikoTimeoutError,
sshtunnel.BaseSSHTunnelForwarderError,
) as scrape_error:
raise CantConnect(scrape_error)
if not response:
raise CantConnect("No response")
status = code.valid
logger.debug(f"Output for query: {self.query}:\n{response}")
except CantConnect as scrape_error:
logger.error(scrape_error)
response = params.messages.general
status = code.invalid
return response, status
async def rest(self):
"""Sends HTTP POST to router running a hyperglass API agent"""
logger.debug(f"Query parameters: {self.query}")
uri = Supported.map_rest(self.device_config.nos)
headers = {
"Content-Type": "application/json",
"X-API-Key": self.cred.password.get_secret_value(),
}
http_protocol = protocol_map.get(self.device_config.port, "http")
endpoint = "{protocol}://{addr}:{port}/{uri}".format(
protocol=http_protocol,
addr=self.device_config.address.exploded,
port=self.device_config.port,
uri=uri,
)
logger.debug(f"HTTP Headers: {headers}")
logger.debug(f"URL endpoint: {endpoint}")
try:
http_client = http3.AsyncClient()
frr_response = await http_client.post(
frr_endpoint, headers=headers, json=self.query, timeout=7
raw_response = await http_client.post(
endpoint, headers=headers, json=self.query, timeout=7
)
response = frr_response.text
status = frr_response.status_code
response = raw_response.text
status = raw_response.status_code
logger.debug(f"FRR status code: {status}")
logger.debug(f"FRR response text:\n{response}")
logger.debug(f"HTTP status code: {status}")
logger.debug(f"Output for query {self.query}:\n{response}")
except (
http3.exceptions.ConnectTimeout,
http3.exceptions.CookieConflict,
@@ -87,167 +178,21 @@ class Rest:
http3.exceptions.Timeout,
http3.exceptions.TooManyRedirects,
http3.exceptions.WriteTimeout,
OSError,
) as rest_error:
logger.error(
f"Error connecting to device {self.device.location}: {rest_error}"
)
logger.error(f"Error connecting to device {self.device_config.location}")
logger.error(rest_error)
response = params.messages.general
status = code.invalid
return response, status
def bird(self):
"""Sends HTTP POST to router running the hyperglass-bird API"""
logger.debug(f"BIRD host params:\n{self.device}")
logger.debug(f"Query parameters: {self.query}")
try:
headers = {
"Content-Type": "application/json",
"X-API-Key": self.cred.password.get_secret_value(),
}
bird_endpoint = (
f"http://{self.device.address.exploded}:{self.device.port}/bird"
)
logger.debug(f"HTTP Headers: {headers}")
logger.debug(f"BIRD endpoint: {bird_endpoint}")
http_client = http3.AsyncClient()
bird_response = http_client.post(
bird_endpoint, headers=headers, json=self.query, timeout=7
)
response = bird_response.text
status = bird_response.status_code
logger.debug(f"BIRD status code: {status}")
logger.debug(f"BIRD response text:\n{response}")
except (
http3.exceptions.ConnectTimeout,
http3.exceptions.CookieConflict,
http3.exceptions.DecodingError,
http3.exceptions.InvalidURL,
http3.exceptions.PoolTimeout,
http3.exceptions.ProtocolError,
http3.exceptions.ReadTimeout,
http3.exceptions.RedirectBodyUnavailable,
http3.exceptions.RedirectLoop,
http3.exceptions.ResponseClosed,
http3.exceptions.ResponseNotRead,
http3.exceptions.StreamConsumed,
http3.exceptions.Timeout,
http3.exceptions.TooManyRedirects,
http3.exceptions.WriteTimeout,
) as rest_error:
logger.error(f"Error connecting to device {self.device}: {rest_error}")
response = params.messages.general
status = code.invalid
return response, status
class Netmiko:
"""Executes connections to Netmiko devices"""
def __init__(self, transport, device, query_type, target):
self.device = device
self.target = target
self.cred = getattr(credentials, self.device.credential)
self.location, self.nos, self.command = getattr(Construct(device), query_type)(
transport, target
)
self.nm_host = {
"host": self.location,
"device_type": self.nos,
"username": self.cred.username,
"password": self.cred.password.get_secret_value(),
"global_delay_factor": 0.5,
}
def direct(self):
"""
Connects to the router via netmiko library, return the command
output.
"""
logger.debug(f"Connecting to {self.device.location} via Netmiko library...")
try:
nm_connect_direct = ConnectHandler(**self.nm_host)
response = nm_connect_direct.send_command(self.command)
status = code.valid
logger.debug(f"Response for direct command {self.command}:\n{response}")
except (
NetMikoAuthenticationException,
NetMikoTimeoutException,
NetmikoAuthError,
NetmikoTimeoutError,
) as netmiko_exception:
response = params.messages.general
status = code.invalid
logger.error(f"{netmiko_exception}, {status}")
return response, status
def proxied(self):
"""
Connects to the proxy server via netmiko library, then logs
into the router via SSH.
"""
device_proxy = getattr(proxies, self.device.proxy)
nm_proxy = {
"host": device_proxy.address.exploded,
"username": device_proxy.username,
"password": device_proxy.password.get_secret_value(),
"device_type": device_proxy.nos,
"global_delay_factor": 0.5,
}
nm_connect_proxied = ConnectHandler(**nm_proxy)
nm_ssh_command = device_proxy.ssh_command.format(**self.nm_host) + "\n"
logger.debug(f"Netmiko proxy {self.device.proxy}")
logger.debug(f"Proxy SSH command: {nm_ssh_command}")
nm_connect_proxied.write_channel(nm_ssh_command)
time.sleep(1)
proxy_output = nm_connect_proxied.read_channel()
logger.debug(f"Proxy output:\n{proxy_output}")
try:
# Accept SSH key warnings
if "Are you sure you want to continue connecting" in proxy_output:
logger.debug("Received OpenSSH key warning")
nm_connect_proxied.write_channel("yes" + "\n")
nm_connect_proxied.write_channel(self.nm_host["password"] + "\n")
# Send password on prompt
elif "assword" in proxy_output:
logger.debug("Received password prompt")
nm_connect_proxied.write_channel(self.nm_host["password"] + "\n")
proxy_output += nm_connect_proxied.read_channel()
# Reclassify netmiko connection as configured device type
logger.debug(
f'Redispatching netmiko with device class {self.nm_host["device_type"]}'
)
redispatch(nm_connect_proxied, self.nm_host["device_type"])
response = nm_connect_proxied.send_command(self.command)
status = code.valid
logger.debug(f"Netmiko proxied response:\n{response}")
except (
NetMikoAuthenticationException,
NetMikoTimeoutException,
NetmikoAuthError,
NetmikoTimeoutError,
) as netmiko_exception:
response = params.messages.general
status = code.invalid
logger.error(f"{netmiko_exception}, {status},Proxy: {self.device.proxy}")
return response, status
class Execute:
"""
Ingests user input, runs blacklist check, runs prefix length check
(if enabled), pulls all configuraiton variables for the input
router.
Ingests raw user input, performs validation of target input, pulls
all configuraiton variables for the input router and connects to the
selected device to execute the query.
"""
def __init__(self, lg_data):
@@ -256,29 +201,24 @@ class Execute:
self.input_type = self.input_data["type"]
self.input_target = self.input_data["target"]
def parse(self, output, nos):
def parse(self, raw_output, nos):
"""
Splits BGP output by AFI, returns only IPv4 & IPv6 output for
Splits BGP raw output by AFI, returns only IPv4 & IPv6 output for
protocol-agnostic commands (Community & AS_PATH Lookups).
"""
logger.debug("Parsing output...")
logger.debug("Parsing raw output...")
parsed = output
parsed = raw_output
if self.input_type in ("bgp_community", "bgp_aspath"):
logger.debug(f"Parsing raw output for device type {nos}")
if nos in ("cisco_ios",):
logger.debug(f"Parsing output for device type {nos}")
delimiter = "For address family: "
parsed_ipv4 = output.split(delimiter)[1]
parsed_ipv6 = output.split(delimiter)[2]
parsed = delimiter + parsed_ipv4 + delimiter + parsed_ipv6
parsed_raw = raw_output.split(delimiter)[1:3]
parsed = "\n\n".join([delimiter + afi.rstrip() for afi in parsed_raw])
elif nos in ("cisco_xr",):
logger.debug(f"Parsing output for device type {nos}")
delimiter = "Address Family: "
parsed_ipv4 = output.split(delimiter)[1]
parsed_ipv6 = output.split(delimiter)[2]
parsed = delimiter + parsed_ipv4 + delimiter + parsed_ipv6
parsed_raw = raw_output.split(delimiter)[1:3]
parsed = "\n\n".join([delimiter + afi.rstrip() for afi in parsed_raw])
return parsed
async def response(self):
@@ -303,23 +243,16 @@ class Execute:
logger.debug(f"Validity: {validity}, Message: {msg}, Status: {status}")
transport = Supported.map_transport(device_config.nos)
connection = Connect(
device_config, self.input_type, self.input_target, transport
)
if Supported.is_rest(device_config.nos):
connection = Rest("rest", device_config, self.input_type, self.input_target)
raw_output, status = await getattr(connection, device_config.nos)()
output = self.parse(raw_output, device_config.nos)
raw_output, status = await connection.rest()
elif Supported.is_scrape(device_config.nos):
logger.debug("Initializing Netmiko...")
raw_output, status = await connection.scrape()
output = self.parse(raw_output, device_config.nos)
connection = Netmiko(
"scrape", device_config, self.input_type, self.input_target
)
if device_config.proxy:
raw_output, status = connection.proxied()
elif not device_config.proxy:
raw_output, status = connection.direct()
output = self.parse(raw_output, device_config.nos)
logger.debug(f"Parsed output for device type {device_config.nos}:\n{output}")
logger.debug(
f"Parsed output for device type {device_config.nos}:\n{output}"
)
return (output, status)

View File

@@ -86,11 +86,8 @@ def ip_validate(target):
try:
valid_ip = ipaddress.ip_network(target)
if valid_ip.is_reserved or valid_ip.is_unspecified or valid_ip.is_loopback:
validity = False
logger.debug(f"IP {valid_ip} is invalid")
if valid_ip.is_global:
validity = True
logger.debug(f"IP {valid_ip} is valid")
raise ValueError
validity = True
except (ipaddress.AddressValueError, ValueError):
logger.debug(f"IP {target} is invalid")
validity = False
@@ -299,8 +296,6 @@ class Validate:
validity = True
msg = f"{target} matched large community."
status = code.valid
if not validity:
logger.error(f"{msg}, {status}")
logger.debug(f"{msg}, {status}")
return (validity, msg, status)
@@ -320,7 +315,5 @@ class Validate:
validity = True
msg = f"{target} matched AS_PATH regex."
status = code.valid
if not validity:
logger.error(f"{msg}, {status}")
logger.debug(f"{msg}, {status}")
return (validity, msg, status)

View File

@@ -174,6 +174,7 @@ class Proxy(BaseSettings):
"""Model for per-proxy config in devices.yaml"""
address: Union[IPvAnyAddress, str]
port: int = 22
username: str
password: SecretStr
nos: str
@@ -348,7 +349,7 @@ class Messages(BaseSettings):
"<b>{d}</b> requires IPv6 BGP lookups" "to be in CIDR notation."
)
invalid_ip: str = "<b>{i}</b> is not a valid IP address."
invalid_dual: str = "invalid_dual <b>{i}</b> is an invalid {qt}."
invalid_dual: str = "<b>{i}</b> is an invalid {qt}."
general: str = "An error occurred."
directed_cidr: str = "<b>{q}</b> queries can not be in CIDR format."

View File

@@ -2,6 +2,8 @@
Global Constants for hyperglass
"""
protocol_map = {80: "http", 8080: "http", 443: "https", 8443: "https"}
class Status:
"""
@@ -9,8 +11,6 @@ class Status:
hyperglass.
"""
# pylint: disable=too-few-public-methods
codes_dict = {
200: ("valid", "Valid Query"),
405: ("not_allowed", "Query Not Allowed"),
@@ -163,3 +163,21 @@ class Supported:
query_type tuple.
"""
return bool(query_type in Supported.query_types)
@staticmethod
def map_transport(nos):
"""
Returns "scrape" if input nos is in Supported.scrape tuple, or
"rest" if input nos is in Supported.rest tuple.
"""
transport = None
if nos in Supported.scrape:
transport = "scrape"
elif nos in Supported.rest:
transport = "rest"
return transport
@staticmethod
def map_rest(nos):
uri_map = {"frr": "frr", "bird": "bird"}
return uri_map.get(nos)

View File

@@ -24,6 +24,28 @@ class ConfigError(HyperglassError):
return self.message
class CantConnect(HyperglassError):
def __init__(self, message):
super().__init__(message)
self.message = message
def __str__(self):
return self.message
class ParseError(HyperglassError):
"""
Raised when an ouput parser encounters an error.
"""
def __init__(self, message):
super().__init__(message)
self.message = message
def __str__(self):
return self.message
class UnsupportedDevice(HyperglassError):
"""
Raised when an input NOS is not in the supported NOS list.

View File

@@ -1,71 +0,0 @@
"""
https://github.com/checktheroads/hyperglass
Guncorn configuration
"""
import os
import shutil
import multiprocessing
from logzero import logger
command = "/usr/local/bin/gunicorn"
pythonpath = "/opt/hyperglass"
bind = "[::1]:8001"
preload = True
workers = multiprocessing.cpu_count() * 2
user = "www-data"
timeout = 60
keepalive = 10
# Prometheus Multiprocessing directory, set as environment variable
prometheus_multiproc_dir = "/tmp/hyperglass_prometheus"
def on_starting(server): # pylint: disable=unused-argument
"""Pre-startup Gunicorn Tasks"""
# Renders Jinja2 -> Sass, compiles Sass -> CSS prior to worker load
try:
import hyperglass.render
hyperglass.render.css()
print(1)
except ImportError as error_exception:
logger.error(f"Exception occurred:\n{error_exception}")
# Verify Redis is running
try:
import hyperglass.configuration
import redis
config = hyperglass.configuration.params()
redis_config = {
"host": config["general"]["redis_host"],
"port": config["general"]["redis_port"],
"charset": "utf-8",
"decode_responses": True,
"db": config["features"]["cache"]["redis_id"],
}
r_cache = redis.Redis(**redis_config)
if r_cache.set("testkey", "testvalue", ex=1):
logger.debug("Redis is working properly")
except (redis.exceptions.ConnectionError):
logger.error("Redis is not running")
raise EnvironmentError("Redis is not running")
# Prometheus multiprocessing directory
if os.path.exists(prometheus_multiproc_dir):
shutil.rmtree(prometheus_multiproc_dir)
else:
os.mkdir(prometheus_multiproc_dir)
os.environ["prometheus_multiproc_dir"] = prometheus_multiproc_dir
def worker_exit(server, worker): # pylint: disable=unused-argument
"""Prometheus multiprocessing WSGI support"""
from prometheus_client import multiprocess
multiprocess.mark_process_dead(worker.pid)
def on_exit(server):
"""Pre-shutdown Gunicorn Tasks"""
if os.path.exists(prometheus_multiproc_dir):
shutil.rmtree(prometheus_multiproc_dir)

View File

@@ -1,22 +1,24 @@
"""
Main Hyperglass Front End
"""
"""Hyperglass Front End"""
# Standard Library Imports
import json
import time
from ast import literal_eval
from pathlib import Path
# Third Party Imports
import redis
from flask import Flask
from flask import Response
from flask import request
from flask_limiter import Limiter
from flask_limiter.util import get_ipaddr
import aredis
from logzero import logger
from prometheus_client import CollectorRegistry
from prometheus_client import Counter
from prometheus_client import generate_latest
from prometheus_client import multiprocess
from sanic import Sanic
from sanic import response
from sanic.exceptions import NotFound
from sanic.exceptions import ServerError
from sanic_limiter import Limiter
from sanic_limiter import RateLimitExceeded
from sanic_limiter import get_remote_address
# Project Imports
from hyperglass import render
@@ -34,17 +36,19 @@ logger.debug(f"Configuration Parameters:\n {params.dict()}")
redis_config = {
"host": params.general.redis_host,
"port": params.general.redis_port,
"charset": "utf-8",
"decode_responses": True,
}
# Main Flask definition
app = Flask(__name__, static_url_path="/static")
# Main Sanic app definition
static_dir = Path(__file__).parent / "static"
logger.debug(f"Static Files: {static_dir}")
app = Sanic(__name__)
app.static("/static", str(static_dir))
# Redis Cache Config
r_cache = redis.Redis(db=params.features.rate_limit.redis_id, **redis_config)
r_cache = aredis.StrictRedis(db=params.features.cache.redis_id, **redis_config)
# Flask-Limiter Config
# Sanic-Limiter Config
query_rate = params.features.rate_limit.query.rate
query_period = params.features.rate_limit.query.period
site_rate = params.features.rate_limit.site.rate
@@ -55,14 +59,20 @@ rate_limit_site = f"{site_rate} per {site_period}"
logger.debug(f"Query rate limit: {rate_limit_query}")
logger.debug(f"Site rate limit: {rate_limit_site}")
# Redis Config for Flask-Limiter storage
# Redis Config for Sanic-Limiter storage
r_limiter_db = params.features.rate_limit.redis_id
r_limiter_url = f'redis://{redis_config["host"]}:{redis_config["port"]}/{r_limiter_db}'
r_limiter = redis.Redis(**redis_config, db=params.features.rate_limit.redis_id)
# Adds Flask config variable for Flask-Limiter
r_limiter_url = "redis://{host}:{port}/{db}".format(
host=params.general.redis_host,
port=params.general.redis_port,
db=params.features.rate_limit.redis_id,
)
r_limiter = aredis.StrictRedis(db=params.features.rate_limit.redis_id, **redis_config)
# Adds Sanic config variable for Sanic-Limiter
app.config.update(RATELIMIT_STORAGE_URL=r_limiter_url)
# Initializes Flask-Limiter
limiter = Limiter(app, key_func=get_ipaddr, default_limits=[rate_limit_site])
# Initializes Sanic-Limiter
limiter = Limiter(app, key_func=get_remote_address, global_limits=[rate_limit_site])
# Prometheus Config
count_data = Counter(
@@ -85,49 +95,50 @@ count_notfound = Counter(
@app.route("/metrics")
def metrics():
@limiter.exempt
async def metrics(request):
"""Prometheus metrics"""
content_type_latest = str("text/plain; version=0.0.4; charset=utf-8")
registry = CollectorRegistry()
multiprocess.MultiProcessCollector(registry)
return Response(generate_latest(registry), mimetype=content_type_latest)
latest = generate_latest(registry)
return response.text(latest)
@app.errorhandler(404)
def handle_404(e):
@app.exception(NotFound)
async def handle_404(request, exception):
"""Renders full error page for invalid URI"""
html = render.html("404")
path = request.path
client_addr = get_ipaddr()
count_notfound.labels(e, path, client_addr).inc()
logger.error(f"Error: {e}, Path: {path}, Source: {client_addr}")
return html, 404
client_addr = get_remote_address(request)
count_notfound.labels(exception, path, client_addr).inc()
logger.error(f"Error: {exception}, Path: {path}, Source: {client_addr}")
return response.html(html, status=404)
@app.errorhandler(429)
def handle_429(e):
@app.exception(RateLimitExceeded)
async def handle_429(request, exception):
"""Renders full error page for too many site queries"""
html = render.html("429")
client_addr = get_ipaddr()
count_ratelimit.labels(e, client_addr).inc()
logger.error(f"Error: {e}, Source: {client_addr}")
return html, 429
client_addr = get_remote_address(request)
count_ratelimit.labels(exception, client_addr).inc()
logger.error(f"Error: {exception}, Source: {client_addr}")
return response.html(html, status=429)
@app.errorhandler(500)
def handle_500(e):
@app.exception(ServerError)
async def handle_500(request, exception):
"""General Error Page"""
client_addr = get_ipaddr()
count_errors.labels(500, e, client_addr, None, None, None).inc()
logger.error(f"Error: {e}, Source: {client_addr}")
client_addr = get_remote_address(request)
count_errors.labels(500, exception, client_addr, None, None, None).inc()
logger.error(f"Error: {exception}, Source: {client_addr}")
html = render.html("500")
return html, 500
return response.html(html, status=500)
def clear_cache():
async def clear_cache():
"""Function to clear the Redis cache"""
try:
r_cache.flushdb()
await r_cache.flushdb()
except Exception as error_exception:
logger.error(f"Error clearing cache: {error_exception}")
raise HyperglassError(f"Error clearing cache: {error_exception}")
@@ -135,60 +146,59 @@ def clear_cache():
@app.route("/", methods=["GET"])
@limiter.limit(rate_limit_site, error_message="Site")
def site():
async def site(request):
"""Main front-end web application"""
return render.html("index")
return response.html(render.html("index"))
@app.route("/test", methods=["GET"])
def test_route():
async def test_route(request):
"""Test route for various tests"""
html = render.html("500")
return html, 500
return response.html(html, status=500), 500
@app.route("/locations/<asn>", methods=["GET"])
def get_locations(asn):
async def get_locations(request, asn):
"""
Flask GET route provides a JSON list of all locations for the
selected network/ASN.
GET route provides a JSON list of all locations for the selected
network/ASN.
"""
locations_list_json = json.dumps(devices.locations[asn])
logger.debug(f"Locations list:{devices.locations[asn]}")
return locations_list_json
return response.json(devices.locations[asn])
@app.route("/lg", methods=["POST"])
# Invoke Flask-Limiter with configured rate limit
@limiter.limit(rate_limit_query, error_message="Query")
def hyperglass_main():
async def hyperglass_main(request):
"""
Main backend application initiator. Ingests Ajax POST data from
form submit, passes it to the backend application to perform the
filtering/lookups.
"""
# Get JSON data from Ajax POST
lg_data = request.get_json()
lg_data = request.json
logger.debug(f"Unvalidated input: {lg_data}")
# Return error if no target is specified
if not lg_data["target"]:
logger.debug("No input specified")
return Response(params.messages.no_input, code.invalid)
return response.html(params.messages.no_input, status=code.invalid)
# Return error if no location is selected
if lg_data["location"] not in devices.hostnames:
logger.debug("No selection specified")
return Response(params.messages.no_location, code.invalid)
return response.html(params.messages.no_location, status=code.invalid)
# Return error if no query type is selected
if not Supported.is_supported_query(lg_data["type"]):
logger.debug("No query specified")
return Response(params.messages.no_query_type, code.invalid)
return response.html(params.messages.no_query_type, status=code.invalid)
# Get client IP address for Prometheus logging & rate limiting
client_addr = get_ipaddr()
client_addr = get_remote_address(request)
# Increment Prometheus counter
count_data.labels(
client_addr, lg_data["type"], lg_data["location"], lg_data["target"]
).inc()
logger.debug(f"Client Address: {client_addr}")
# Stringify the form response containing serialized JSON for the
# request, use as key for k/v cache store so each command output
# value is unique
@@ -197,24 +207,26 @@ def hyperglass_main():
cache_timeout = params.features.cache.timeout
logger.debug(f"Cache Timeout: {cache_timeout}")
# Check if cached entry exists
if not r_cache.get(cache_key):
if not await r_cache.get(cache_key):
logger.debug(f"Sending query {cache_key} to execute module...")
# Pass request to execution module
cache_value = Execute(lg_data).response()
logger.debug("Validated Response...")
logger.debug(f"Status: {cache_value[1]}")
logger.debug(f"Output:\n {cache_value[0]}")
starttime = time.time()
cache_value = await Execute(lg_data).response()
endtime = time.time()
elapsedtime = round(endtime - starttime, 4)
logger.debug(
f"Execution for query {cache_key} took {elapsedtime} seconds to run."
)
# Create a cache entry
r_cache.set(cache_key, str(cache_value))
r_cache.expire(cache_key, cache_timeout)
await r_cache.set(cache_key, str(cache_value))
await r_cache.expire(cache_key, cache_timeout)
logger.debug(f"Added cache entry for query: {cache_key}")
logger.error(f"Unable to add output to cache: {cache_key}")
# If it does, return the cached entry
cache_response = r_cache.get(cache_key)
response = literal_eval(cache_response)
response_output, response_status = response
cache_response = await r_cache.get(cache_key)
# Serialize stringified tuple response from cache
serialized_response = literal_eval(cache_response)
response_output, response_status = serialized_response
logger.debug(f"Cache match for: {cache_key}, returning cached entry")
logger.debug(f"Cache Output: {response_output}")
@@ -229,4 +241,4 @@ def hyperglass_main():
lg_data["location"],
lg_data["target"],
).inc()
return Response(*response)
return response.html(response_output, status=response_status)

View File

@@ -7,7 +7,7 @@ Requires=redis-server.service
User=www-data
Group=www-data
WorkingDirectory=/opt/hyperglass
ExecStart=/usr/local/bin/gunicorn -c /opt/hyperglass/hyperglass/gunicorn_config.py hyperglass.wsgi
ExecStart=/usr/local/bin/python3 -m sanic hyperglass.web.app
[Install]
WantedBy=multi-user.target

View File

@@ -81,7 +81,7 @@ link: <a href="#" id="helplink_bgpc">{{ general.org_name }} BGP Communities</a>
Performs BGP table lookup based on [Extended](https://tools.ietf.org/html/rfc4360) \
or [Large](https://tools.ietf.org/html/rfc8195) community value.
{{ info["link"] }}
{{ info["link"] | safe }}
""",
"bgp_aspath": """
---
@@ -90,7 +90,7 @@ link: <a href="#" id="helplink_bgpa">Supported BGP AS Path Expressions</a>
---
Performs BGP table lookup based on `AS_PATH` regular expression.
{{ info["link"] }}
{{ info["link"] | safe }}
""",
"ping": """
---

View File

@@ -9,7 +9,7 @@ clearPage();
// Bulma Toggable Dropdown - help text
$('#help-dropdown').click(
function(event) {
function (event) {
event.stopPropagation();
$(this).toggleClass('is-active');
}
@@ -18,21 +18,21 @@ $('#help-dropdown').click(
// ClipboardJS Elements
var btn_copy = document.getElementById('btn-copy');
var clipboard = new ClipboardJS(btn_copy);
clipboard.on('success', function(e) {
clipboard.on('success', function (e) {
console.log(e);
$('#btn-copy').addClass('is-success').addClass('is-outlined');
$('#copy-icon').removeClass('icofont-ui-copy').addClass('icofont-check');
setTimeout(function() {
setTimeout(function () {
$('#btn-copy').removeClass('is-success').removeClass('is-outlined');
$('#copy-icon').removeClass('icofont-check').addClass('icofont-ui-copy');
}, 1000);
});
clipboard.on('error', function(e) {
clipboard.on('error', function (e) {
console.log(e);
});
$('.modal-background, .modal-close').click(
function(event) {
function (event) {
event.stopPropagation();
$('.modal').removeClass("is-active");
}
@@ -40,21 +40,21 @@ $('.modal-background, .modal-close').click(
// Adjust behavior of help text dropdown based on device screen size
$('#help-dropdown-button').click(
function(event) {
function (event) {
if (window.innerWidth < 1024) {
$('#help-dropdown').removeClass('is-right');
$('.lg-help').addClass('lg-help-mobile').removeClass('lg-help');
}
$('#help-dropdown').removeClass('is-right');
$('.lg-help').addClass('lg-help-mobile').removeClass('lg-help');
}
}
);
function adjustDropdowns() {
var actual_width = window.innerWidth;
if (actual_width < 1024) {
$('#lg-netlocdropdown').removeClass('has-addons').removeClass('has-addons-centered').addClass('is-grouped').addClass('is-grouped-centered').addClass('is-grouped-multiline');
$('#network').css('width', actual_width * 0.85);
$('#location').css('width', actual_width * 0.85);
}
var actual_width = window.innerWidth;
if (actual_width < 1024) {
$('#lg-netlocdropdown').removeClass('has-addons').removeClass('has-addons-centered').addClass('is-grouped').addClass('is-grouped-centered').addClass('is-grouped-multiline');
$('#network').css('width', actual_width * 0.85);
$('#location').css('width', actual_width * 0.85);
}
}
function clearErrors() {
@@ -81,179 +81,179 @@ function clearPage() {
}
function prepResults() {
progress.show();
resultsbox.show();
progress.show();
resultsbox.show();
}
$(document).ready(function() {
$(document).ready(function () {
var defaultasn = $("#network").val();
$.ajax({
url: '/locations/'+defaultasn,
url: '/locations/' + defaultasn,
context: document.body,
type: 'get',
success: function(data) {
selectedRouters = JSON.parse(data);
success: function (data) {
selectedRouters = data;
console.log(selectedRouters);
updateRouters(selectedRouters);
},
error: function(err) {
error: function (err) {
console.log(err);
}
});
});
$('#network').on('change', (function(event) {
var asn = $("select[id=network").val();
$('#location').children(":not(#text_location)").remove();
$.ajax({
url: '/locations/'+asn,
type: 'get',
success: function(data) {
clearPage();
updateRouters(JSON.parse(data));
},
error: function(err) {
console.log(err);
}
});
$('#network').on('change', (function (event) {
var asn = $("select[id=network").val();
$('#location').children(":not(#text_location)").remove();
$.ajax({
url: '/locations/' + asn,
type: 'get',
success: function (data) {
clearPage();
updateRouters(JSON.parse(data));
},
error: function (err) {
console.log(err);
}
});
}));
function updateRouters(locations) {
locations.forEach(function(r) {
locations.forEach(function (r) {
$('#location').append($("<option>").attr('value', r.hostname).text(r.display_name));
});
}
$('#helplink_bgpc').click(function(event) {
$('#helplink_bgpc').click(function (event) {
$('#help_bgp_community').addClass("is-active");
});
$('#helplink_bgpa').click(function(event) {
$('#helplink_bgpa').click(function (event) {
$('#help_bgp_aspath').addClass("is-active");
});
// Submit Form Action
$('#lgForm').on('submit', function() {
submitForm();
$('#lgForm').on('submit', function () {
submitForm();
});
function submitForm() {
clearErrors();
var type = $('#type option:selected').val();
var type_title = $('#type option:selected').text();
var network = $('#network option:selected').val();
var location = $('#location option:selected').val();
var location_name = $('#location option:selected').text();
var target = $('#target').val();
clearErrors();
var type = $('#type option:selected').val();
var type_title = $('#type option:selected').text();
var network = $('#network option:selected').val();
var location = $('#location option:selected').val();
var location_name = $('#location option:selected').text();
var target = $('#target').val();
var tags = [
'<div class="field is-grouped is-grouped-multiline">',
'<div class="control">',
'<div class="tags has-addons">',
'<span class="tag lg-tag-loc-title">AS',
network,
'</span>',
'<span class="tag lg-tag-loc">',
location_name,
'</span>',
'</div>',
'</div>',
var tags = [
'<div class="field is-grouped is-grouped-multiline">',
'<div class="control">',
'<div class="tags has-addons">',
'<span class="tag lg-tag-type-title">',
type_title,
'</span>',
'<span class="tag lg-tag-type">',
target,
'</span>',
'</div>',
'<span class="tag lg-tag-loc-title">AS',
network,
'</span>',
'<span class="tag lg-tag-loc">',
location_name,
'</span>',
'</div>',
'</div>'
].join('');
'</div>',
'<div class="control">',
'<div class="tags has-addons">',
'<span class="tag lg-tag-type-title">',
type_title,
'</span>',
'<span class="tag lg-tag-type">',
target,
'</span>',
'</div>',
'</div>',
'</div>'
].join('');
$('#output').text("");
$('#queryInfo').text("");
$('#queryInfo').html(tags);
$('#output').text("");
$('#queryInfo').text("");
$('#queryInfo').html(tags);
$.ajax(
{
url: '/lg',
type: 'POST',
data: JSON.stringify(
{
location: location,
type: type,
target: target
}
),
contentType: "application/json; charset=utf-8",
context: document.body,
readyState: prepResults(),
statusCode: {
200: function(response, code) {
response_html = [
'<br>',
'<div class="content">',
'<p class="query-output" id="output">',
response,
'</p>',
'</div>',
];
progress.hide();
$('#output').html(response_html);
},
401: function(response, code) {
response_html = [
'<br>',
'<div class="notification is-danger">',
response.responseText,
'</div>',
].join('');
clearPage();
target_error.show();
target_input.addClass('is-danger');
target_error.html(response_html);
},
405: function(response, code) {
response_html = [
'<br>',
'<div class="notification is-warning">',
response.responseText,
'</div>',
].join('');
clearPage();
target_error.show();
target_input.addClass('is-warning');
target_error.html(response_html);
},
415: function(response, code) {
response_html = [
'<br>',
'<div class="notification is-danger">',
response.responseText,
'</div>',
].join('');
clearPage();
target_error.show();
target_input.addClass('is-danger');
target_error.html(response_html);
},
429: function(response, code) {
clearPage();
$("#ratelimit").addClass("is-active");
},
504: function(response, code) {
response_html = [
'<br>',
'<div class="notification is-danger">',
response.responseText,
'</div>',
].join('');
clearPage();
target_error.show();
target_error.html(response_html);
}
}
}
);
$.ajax(
{
url: '/lg',
type: 'POST',
data: JSON.stringify(
{
location: location,
type: type,
target: target
}
),
contentType: "application/json; charset=utf-8",
context: document.body,
readyState: prepResults(),
statusCode: {
200: function (response, code) {
response_html = [
'<br>',
'<div class="content">',
'<p class="query-output" id="output">',
response,
'</p>',
'</div>',
];
progress.hide();
$('#output').html(response_html);
},
401: function (response, code) {
response_html = [
'<br>',
'<div class="notification is-danger">',
response.responseText,
'</div>',
].join('');
clearPage();
target_error.show();
target_input.addClass('is-danger');
target_error.html(response_html);
},
405: function (response, code) {
response_html = [
'<br>',
'<div class="notification is-warning">',
response.responseText,
'</div>',
].join('');
clearPage();
target_error.show();
target_input.addClass('is-warning');
target_error.html(response_html);
},
415: function (response, code) {
response_html = [
'<br>',
'<div class="notification is-danger">',
response.responseText,
'</div>',
].join('');
clearPage();
target_error.show();
target_input.addClass('is-danger');
target_error.html(response_html);
},
429: function (response, code) {
clearPage();
$("#ratelimit").addClass("is-active");
},
504: function (response, code) {
response_html = [
'<br>',
'<div class="notification is-danger">',
response.responseText,
'</div>',
].join('');
clearPage();
target_error.show();
target_error.html(response_html);
}
}
}
);
}

50
hyperglass/web.py Normal file
View File

@@ -0,0 +1,50 @@
"""
Hyperglass web app initiator. Launches Sanic with appropriate number of
workers per their documentation (equal to number of CPU cores).
"""
# Override web server listen host & port if necessary:
host = "localhost"
port = 8001
try:
import multiprocessing
from hyperglass import render
from hyperglass import hyperglass
from hyperglass.configuration import params
except ImportError as import_error:
raise RuntimeError(import_error)
if params.general.debug:
debug = True
access_log = False
elif not params.general.debug:
debug = False
access_log = True
# Override the number of web server workers if necessary:
workers = multiprocessing.cpu_count()
def start():
"""
Compiles configured Sass variables to CSS, then starts Sanic web
server.
"""
try:
render.css()
except Exception as render_error:
raise RuntimeError(render_error)
try:
hyperglass.app.run(
host=host,
port=port,
debug=params.general.debug,
workers=workers,
access_log=access_log,
)
except Exception as hyperglass_error:
raise RuntimeError(hyperglass_error)
app = start()

View File

@@ -1,17 +0,0 @@
"""
https://github.com/checktheroads/hyperglass
Gunicorn WSGI Target
"""
# Standard Library Imports
import os
# Project Imports
import hyperglass.hyperglass
application = hyperglass.hyperglass.app
hyperglass_root = os.path.dirname(hyperglass.__file__)
static = os.path.join(hyperglass_root, "static")
if __name__ == "__main__":
application.run(static_folder=static)