1
0
mirror of https://github.com/checktheroads/hyperglass synced 2024-05-11 05:55:08 +00:00
Files
checktheroads-hyperglass/hyperglass/hyperglass.py

485 lines
16 KiB
Python
Raw Normal View History

"""Hyperglass Front End"""
# Standard Library Imports
2019-09-09 00:18:01 -07:00
import operator
import time
from pathlib import Path
2019-06-10 12:22:38 -07:00
# Third Party Imports
import aredis
2019-09-25 22:40:02 -07:00
from logzero import logger as log
from prometheus_client import CollectorRegistry
from prometheus_client import Counter
from prometheus_client import generate_latest
from prometheus_client import multiprocess
2019-07-29 22:13:11 -07:00
from prometheus_client import CONTENT_TYPE_LATEST
from sanic import Sanic
from sanic import response
from sanic.exceptions import NotFound
from sanic.exceptions import ServerError
2019-08-31 23:50:02 -07:00
from sanic.exceptions import InvalidUsage
2019-09-04 01:29:49 -07:00
from sanic.exceptions import ServiceUnavailable
from sanic_limiter import Limiter
from sanic_limiter import RateLimitExceeded
from sanic_limiter import get_remote_address
2019-05-07 23:21:41 -07:00
# Project Imports
from hyperglass.render import render_html
from hyperglass.command.execute import Execute
from hyperglass.configuration import devices
2019-09-30 07:51:17 -07:00
from hyperglass.configuration import vrfs
from hyperglass.configuration import logzero_config # noqa: F401
2019-10-04 17:17:08 -07:00
from hyperglass.configuration import stack # NOQA: F401
from hyperglass.configuration import params
from hyperglass.constants import Supported
2019-08-31 23:50:02 -07:00
from hyperglass.exceptions import (
HyperglassError,
AuthError,
ScrapeError,
RestError,
InputInvalid,
InputNotAllowed,
2019-09-03 00:44:17 -07:00
DeviceTimeout,
2019-08-31 23:50:02 -07:00
)
2019-05-07 23:21:41 -07:00
2019-09-25 22:40:02 -07:00
log.debug(f"Configuration Parameters:\n {params.dict()}")
2019-06-15 12:42:28 -07:00
# Redis Config
redis_config = {
"host": params.general.redis_host,
"port": params.general.redis_port,
2019-06-15 12:42:28 -07:00
"decode_responses": True,
}
2019-08-06 01:09:55 -07:00
# Static File Definitions
static_dir = Path(__file__).parent / "static" / "ui"
# Main Sanic app definition
2019-09-25 22:40:02 -07:00
log.debug(f"Static Files: {static_dir}")
2019-08-06 01:09:55 -07:00
app = Sanic(__name__)
2019-08-06 01:09:55 -07:00
app.static("/ui", str(static_dir))
2019-09-25 22:40:02 -07:00
log.debug(app.config)
2019-06-15 12:42:28 -07:00
# Redis Cache Config
r_cache = aredis.StrictRedis(db=params.features.cache.redis_id, **redis_config)
2019-06-15 12:42:28 -07:00
# Sanic-Limiter Config
query_rate = params.features.rate_limit.query.rate
query_period = params.features.rate_limit.query.period
site_rate = params.features.rate_limit.site.rate
site_period = params.features.rate_limit.site.period
#
2019-06-10 12:22:38 -07:00
rate_limit_query = f"{query_rate} per {query_period}"
rate_limit_site = f"{site_rate} per {site_period}"
2019-09-25 22:40:02 -07:00
log.debug(f"Query rate limit: {rate_limit_query}")
log.debug(f"Site rate limit: {rate_limit_site}")
2019-05-07 23:21:41 -07:00
# Redis Config for Sanic-Limiter storage
r_limiter_db = params.features.rate_limit.redis_id
r_limiter_url = "redis://{host}:{port}/{db}".format(
host=params.general.redis_host,
port=params.general.redis_port,
db=params.features.rate_limit.redis_id,
)
r_limiter = aredis.StrictRedis(db=params.features.rate_limit.redis_id, **redis_config)
# Adds Sanic config variable for Sanic-Limiter
2019-06-15 12:42:28 -07:00
app.config.update(RATELIMIT_STORAGE_URL=r_limiter_url)
# Initializes Sanic-Limiter
limiter = Limiter(app, key_func=get_remote_address, global_limits=[rate_limit_site])
2019-05-12 19:22:17 -07:00
# Prometheus Config
count_data = Counter(
2019-09-09 00:18:01 -07:00
"count_data", "Query Counter", ["source", "query_type", "loc_id", "target", "vrf"]
)
count_errors = Counter(
"count_errors",
"Error Counter",
2019-09-04 01:29:49 -07:00
["reason", "source", "query_type", "loc_id", "target"],
)
count_ratelimit = Counter(
"count_ratelimit", "Rate Limit Counter", ["message", "source"]
)
count_notfound = Counter(
2019-06-13 07:49:49 -07:00
"count_notfound", "404 Not Found Counter", ["message", "path", "source"]
)
@app.route("/metrics")
@limiter.exempt
async def metrics(request):
2019-06-10 12:22:38 -07:00
"""Prometheus metrics"""
registry = CollectorRegistry()
multiprocess.MultiProcessCollector(registry)
latest = generate_latest(registry)
2019-07-29 22:13:11 -07:00
return response.text(
latest,
headers={
"Content-Type": CONTENT_TYPE_LATEST,
"Content-Length": str(len(latest)),
},
)
2019-06-10 12:22:38 -07:00
2019-08-31 23:50:02 -07:00
@app.exception(InvalidUsage)
2019-09-04 01:29:49 -07:00
async def handle_frontend_errors(request, exception):
"""Handles user-facing feedback related to frontend/input errors"""
2019-08-31 23:50:02 -07:00
client_addr = get_remote_address(request)
error = exception.args[0]
2019-09-04 01:29:49 -07:00
alert = error["alert"]
2019-09-25 22:40:02 -07:00
log.info(error)
2019-08-31 23:50:02 -07:00
count_errors.labels(
2019-09-04 01:29:49 -07:00
"Front End Error",
2019-08-31 23:50:02 -07:00
client_addr,
2019-09-04 01:29:49 -07:00
request.json.get("query_type"),
request.json.get("location"),
request.json.get("target"),
2019-08-31 23:50:02 -07:00
).inc()
2019-09-25 22:40:02 -07:00
log.error(f'Error: {error["message"]}, Source: {client_addr}')
2019-08-31 23:50:02 -07:00
return response.json(
2019-09-04 01:29:49 -07:00
{"output": error["message"], "alert": alert, "keywords": error["keywords"]},
status=400,
2019-08-31 23:50:02 -07:00
)
2019-09-04 01:29:49 -07:00
@app.exception(ServiceUnavailable)
async def handle_backend_errors(request, exception):
"""Handles user-facing feedback related to backend errors"""
client_addr = get_remote_address(request)
error = exception.args[0]
alert = error["alert"]
2019-09-25 22:40:02 -07:00
log.info(error)
2019-09-04 01:29:49 -07:00
count_errors.labels(
"Back End Error",
client_addr,
request.json.get("query_type"),
request.json.get("location"),
request.json.get("target"),
).inc()
2019-09-25 22:40:02 -07:00
log.error(f'Error: {error["message"]}, Source: {client_addr}')
2019-09-04 01:29:49 -07:00
return response.json(
{"output": error["message"], "alert": alert, "keywords": error["keywords"]},
status=503,
)
2019-08-31 23:50:02 -07:00
@app.exception(NotFound)
async def handle_404(request, exception):
"""Renders full error page for invalid URI"""
path = request.path
html = render_html("404", uri=path)
client_addr = get_remote_address(request)
count_notfound.labels(exception, path, client_addr).inc()
2019-09-25 22:40:02 -07:00
log.error(f"Error: {exception}, Path: {path}, Source: {client_addr}")
return response.html(html, status=404)
2019-05-12 19:22:17 -07:00
@app.exception(RateLimitExceeded)
async def handle_429(request, exception):
2019-05-07 23:21:41 -07:00
"""Renders full error page for too many site queries"""
html = render_html("ratelimit-site")
client_addr = get_remote_address(request)
count_ratelimit.labels(exception, client_addr).inc()
2019-09-25 22:40:02 -07:00
log.error(f"Error: {exception}, Source: {client_addr}")
return response.html(html, status=429)
2019-05-07 23:21:41 -07:00
@app.exception(ServerError)
async def handle_500(request, exception):
"""General Error Page"""
client_addr = get_remote_address(request)
count_errors.labels(500, exception, client_addr, None, None, None).inc()
2019-09-25 22:40:02 -07:00
log.error(f"Error: {exception}, Source: {client_addr}")
html = render_html("500")
return response.html(html, status=500)
2019-05-07 23:21:41 -07:00
async def clear_cache():
"""Function to clear the Redis cache"""
try:
await r_cache.flushdb()
2019-08-06 01:09:55 -07:00
return "Successfully cleared cache"
except Exception as error_exception:
2019-09-25 22:40:02 -07:00
log.error(f"Error clearing cache: {error_exception}")
raise HyperglassError(f"Error clearing cache: {error_exception}")
2019-05-07 23:21:41 -07:00
@app.route("/", methods=["GET"])
@limiter.limit(rate_limit_site, error_message="Site")
async def site(request):
2019-05-07 23:21:41 -07:00
"""Main front-end web application"""
2019-08-24 17:21:39 -07:00
return response.html(render_html("form", primary_asn=params.general.primary_asn))
2019-05-07 23:21:41 -07:00
@app.route("/test", methods=["GET"])
async def test_route(request):
"""Test route for various tests"""
2019-09-04 01:29:49 -07:00
html = render_html("500")
2019-07-29 22:13:11 -07:00
return response.html(html, status=500)
2019-05-07 23:21:41 -07:00
2019-09-09 12:18:26 -07:00
async def validate_input(query_data): # noqa: C901
"""
2019-09-09 12:18:26 -07:00
Deletes any globally unsupported query parameters.
Performs validation functions per input type:
- query_target:
- Verifies input is not empty
- Verifies input is a string
- query_location:
- Verfies input is not empty
- Verifies input is a list
- Verifies locations in list are defined
- query_type:
- Verifies input is not empty
- Verifies input is a string
- Verifies query type is enabled and supported
- query_vrf: (if feature enabled)
- Verfies input is a list
- Verifies VRFs in list are defined
"""
2019-09-09 12:18:26 -07:00
# Delete any globally unsupported parameters
2019-09-09 23:05:10 -07:00
supported_query_data = {
k: v for k, v in query_data.items() if k in Supported.query_parameters
}
2019-09-09 12:18:26 -07:00
# Unpack query data
2019-09-09 23:05:10 -07:00
query_location = supported_query_data.get("query_location", "")
query_type = supported_query_data.get("query_type", "")
query_target = supported_query_data.get("query_target", "")
2019-09-30 07:51:17 -07:00
query_vrf = supported_query_data.get("query_vrf", "")
2019-09-09 12:18:26 -07:00
# Verify that query_target is not empty
2019-09-04 01:29:49 -07:00
if not query_target:
2019-09-25 22:40:02 -07:00
log.debug("No input specified")
2019-09-04 01:29:49 -07:00
raise InvalidUsage(
{
"message": params.messages.no_input.format(
2019-09-09 23:05:10 -07:00
field=params.branding.text.query_target
2019-09-04 01:29:49 -07:00
),
"alert": "warning",
"keywords": [params.branding.text.query_target],
}
)
2019-09-09 12:18:26 -07:00
# Verify that query_target is a string
if not isinstance(query_target, str):
2019-09-25 22:40:02 -07:00
log.debug("Target is not a string")
2019-09-09 12:18:26 -07:00
raise InvalidUsage(
{
2019-09-09 23:05:10 -07:00
"message": params.messages.invalid_field.format(
input=query_target, field=params.branding.text.query_target
2019-09-09 12:18:26 -07:00
),
"alert": "warning",
"keywords": [params.branding.text.query_target, query_target],
}
)
# Verify that query_location is not empty
if not query_location:
2019-09-25 22:40:02 -07:00
log.debug("No selection specified")
2019-09-04 01:29:49 -07:00
raise InvalidUsage(
{
"message": params.messages.no_input.format(
2019-09-09 23:05:10 -07:00
field=params.branding.text.query_location
2019-09-04 01:29:49 -07:00
),
"alert": "warning",
"keywords": [params.branding.text.query_location],
}
)
2019-09-09 23:05:10 -07:00
# Verify that query_location is a string
if not isinstance(query_location, str):
2019-09-25 22:40:02 -07:00
log.debug("Query Location is not a string")
2019-09-09 12:18:26 -07:00
raise InvalidUsage(
{
2019-09-09 23:05:10 -07:00
"message": params.messages.invalid_field.format(
input=query_location, field=params.branding.text.query_location
2019-09-09 12:18:26 -07:00
),
"alert": "warning",
"keywords": [params.branding.text.query_location, query_location],
}
)
# Verify that locations in query_location are actually defined
2019-09-09 23:05:10 -07:00
if query_location not in devices.hostnames:
2019-09-09 12:18:26 -07:00
raise InvalidUsage(
{
2019-09-09 23:05:10 -07:00
"message": params.messages.invalid_field.format(
input=query_location, field=params.branding.text.query_location
2019-09-09 12:18:26 -07:00
),
"alert": "warning",
"keywords": [params.branding.text.query_location, query_location],
}
)
# Verify that query_type is not empty
if not query_type:
2019-09-25 22:40:02 -07:00
log.debug("No query specified")
2019-09-04 01:29:49 -07:00
raise InvalidUsage(
{
"message": params.messages.no_input.format(
2019-09-09 23:05:10 -07:00
field=params.branding.text.query_type
2019-09-04 01:29:49 -07:00
),
"alert": "warning",
"keywords": [params.branding.text.query_location],
}
)
2019-09-09 12:18:26 -07:00
if not isinstance(query_type, str):
2019-09-25 22:40:02 -07:00
log.debug("Query Type is not a string")
2019-09-09 12:18:26 -07:00
raise InvalidUsage(
{
2019-09-09 23:05:10 -07:00
"message": params.messages.invalid_field.format(
input=query_type, field=params.branding.text.query_type
2019-09-09 12:18:26 -07:00
),
"alert": "warning",
2019-09-09 23:05:10 -07:00
"keywords": [params.branding.text.query_type, query_type],
2019-09-09 12:18:26 -07:00
}
)
# Verify that query_type is actually supported
query_is_supported = Supported.is_supported_query(query_type)
if not query_is_supported:
2019-09-25 22:40:02 -07:00
log.debug("Query not supported")
2019-09-09 00:18:01 -07:00
raise InvalidUsage(
{
2019-09-09 23:05:10 -07:00
"message": params.messages.invalid_field.format(
input=query_type, field=params.branding.text.query_type
2019-09-09 00:18:01 -07:00
),
"alert": "warning",
2019-09-09 12:18:26 -07:00
"keywords": [params.branding.text.query_location, query_type],
2019-09-09 00:18:01 -07:00
}
)
2019-09-09 12:18:26 -07:00
elif query_is_supported:
query_is_enabled = operator.attrgetter(f"{query_type}.enable")(params.features)
if not query_is_enabled:
raise InvalidUsage(
{
2019-09-09 23:05:10 -07:00
"message": params.messages.invalid_field.format(
input=query_type, field=params.branding.text.query_type
2019-09-09 12:18:26 -07:00
),
"alert": "warning",
"keywords": [params.branding.text.query_location, query_type],
}
)
2019-09-30 07:51:17 -07:00
# Verify that query_vrf is a string
if query_vrf and not isinstance(query_vrf, str):
raise InvalidUsage(
{
"message": params.messages.invalid_field.format(
input=query_vrf, field=params.branding.text.query_vrf
),
"alert": "warning",
"keywords": [params.branding.text.query_vrf, query_vrf],
}
)
# Verify that vrfs in query_vrf are defined
display_vrfs = [v["display_name"] for k, v in vrfs.vrfs.items()]
if query_vrf and not any(vrf in query_vrf for vrf in display_vrfs):
display_device = getattr(devices, query_location)
raise InvalidUsage(
{
"message": params.messages.vrf_not_associated.format(
vrf_name=query_vrf, device_name=display_device.display_name
),
"alert": "warning",
"keywords": [query_vrf, query_location],
}
)
# If VRF display name from UI/API matches a configured display name, set the
# query_vrf value to the configured VRF key name
if query_vrf:
supported_query_data["query_vrf"] = [
k for k, v in vrfs.vrfs.items() if v["display_name"] == query_vrf
][0]
if not query_vrf:
supported_query_data["query_vrf"] = "default"
log.debug(f"Validated Query: {supported_query_data}")
return supported_query_data
2019-09-09 12:18:26 -07:00
@app.route("/query", methods=["POST"])
@limiter.limit(
rate_limit_query,
error_message={
"output": params.features.rate_limit.query.message,
"alert": "danger",
"keywords": [],
},
)
async def hyperglass_main(request):
"""
Main backend application initiator. Ingests Ajax POST data from
form submit, passes it to the backend application to perform the
filtering/lookups.
"""
# Get JSON data from Ajax POST
raw_query_data = request.json
2019-09-25 22:40:02 -07:00
log.debug(f"Unvalidated input: {raw_query_data}")
2019-09-09 12:18:26 -07:00
# Perform basic input validation
2019-09-09 23:05:10 -07:00
query_data = await validate_input(raw_query_data)
2019-09-09 00:18:01 -07:00
# Get client IP address for Prometheus logging & rate limiting
client_addr = get_remote_address(request)
2019-08-25 23:21:34 -07:00
# Increment Prometheus counter
count_data.labels(
2019-09-04 01:29:49 -07:00
client_addr,
2019-09-09 12:18:26 -07:00
query_data.get("query_type"),
query_data.get("query_location"),
query_data.get("query_target"),
query_data.get("query_vrf"),
).inc()
2019-09-25 22:40:02 -07:00
log.debug(f"Client Address: {client_addr}")
2019-07-09 16:30:14 -07:00
# Stringify the form response containing serialized JSON for the
# request, use as key for k/v cache store so each command output
# value is unique
2019-09-09 12:18:26 -07:00
cache_key = str(query_data)
2019-08-25 23:21:34 -07:00
# Define cache entry expiry time
cache_timeout = params.features.cache.timeout
2019-09-25 22:40:02 -07:00
log.debug(f"Cache Timeout: {cache_timeout}")
2019-08-25 23:21:34 -07:00
2019-05-07 23:21:41 -07:00
# Check if cached entry exists
if not await r_cache.get(cache_key):
2019-09-25 22:40:02 -07:00
log.debug(f"Sending query {cache_key} to execute module...")
2019-08-25 23:21:34 -07:00
# Pass request to execution module
2019-08-31 23:50:02 -07:00
try:
2019-09-03 00:44:17 -07:00
starttime = time.time()
2019-09-09 12:18:26 -07:00
cache_value = await Execute(query_data).response()
2019-09-03 00:44:17 -07:00
endtime = time.time()
elapsedtime = round(endtime - starttime, 4)
2019-09-09 12:18:26 -07:00
2019-09-25 22:40:02 -07:00
log.debug(f"Query {cache_key} took {elapsedtime} seconds to run.")
2019-09-09 12:18:26 -07:00
2019-09-04 01:29:49 -07:00
except (InputInvalid, InputNotAllowed) as frontend_error:
raise InvalidUsage(frontend_error.__dict__())
except (AuthError, RestError, ScrapeError, DeviceTimeout) as backend_error:
raise ServiceUnavailable(backend_error.__dict__())
2019-08-31 23:50:02 -07:00
2019-08-06 01:09:55 -07:00
if not cache_value:
2019-09-04 01:29:49 -07:00
raise ServerError(
{"message": params.messages.general, "alert": "danger", "keywords": []}
)
2019-08-25 23:21:34 -07:00
# Create a cache entry
await r_cache.set(cache_key, str(cache_value))
await r_cache.expire(cache_key, cache_timeout)
2019-09-25 22:40:02 -07:00
log.debug(f"Added cache entry for query: {cache_key}")
2019-08-25 23:21:34 -07:00
2019-05-07 23:21:41 -07:00
# If it does, return the cached entry
cache_response = await r_cache.get(cache_key)
2019-08-25 23:21:34 -07:00
2019-08-31 23:50:02 -07:00
response_output = cache_response
2019-09-25 22:40:02 -07:00
log.debug(f"Cache match for: {cache_key}, returning cached entry")
log.debug(f"Cache Output: {response_output}")
2019-08-31 23:50:02 -07:00
return response.json({"output": response_output}, status=200)