1
0
mirror of https://github.com/checktheroads/hyperglass synced 2024-05-11 05:55:08 +00:00
Files
checktheroads-hyperglass/hyperglass/hyperglass.py

520 lines
17 KiB
Python
Raw Normal View History

"""Hyperglass Front End."""
# Standard Library Imports
import asyncio
2019-09-09 00:18:01 -07:00
import operator
import os
import tempfile
import time
from pathlib import Path
2019-06-10 12:22:38 -07:00
# Third Party Imports
import aredis
from prometheus_client import CONTENT_TYPE_LATEST
from prometheus_client import CollectorRegistry
from prometheus_client import Counter
from prometheus_client import generate_latest
from prometheus_client import multiprocess
from sanic import Sanic
from sanic import response
from sanic.exceptions import InvalidUsage
from sanic.exceptions import NotFound
from sanic.exceptions import ServerError
2019-09-04 01:29:49 -07:00
from sanic.exceptions import ServiceUnavailable
from sanic_limiter import Limiter
from sanic_limiter import RateLimitExceeded
from sanic_limiter import get_remote_address
2019-05-07 23:21:41 -07:00
# Project Imports
from hyperglass.command.execute import Execute
from hyperglass.configuration import devices
from hyperglass.configuration import params
from hyperglass.constants import Supported
from hyperglass.exceptions import AuthError
from hyperglass.exceptions import DeviceTimeout
from hyperglass.exceptions import HyperglassError
from hyperglass.exceptions import InputInvalid
from hyperglass.exceptions import InputNotAllowed
from hyperglass.exceptions import ResponseEmpty
from hyperglass.exceptions import RestError
from hyperglass.exceptions import ScrapeError
from hyperglass.render import render_html
from hyperglass.util import check_python
from hyperglass.util import cpu_count
from hyperglass.util import log
# Verify Python version meets minimum requirement
try:
python_version = check_python()
log.info(f"Python {python_version} detected.")
except RuntimeError as r:
raise HyperglassError(str(r), alert="danger") from None
log.debug(f"Configuration Parameters: {params.dict(by_alias=True)}")
2019-05-07 23:21:41 -07:00
tempdir = tempfile.TemporaryDirectory(prefix="hyperglass_")
os.environ["prometheus_multiproc_dir"] = tempdir.name
2019-06-15 12:42:28 -07:00
2019-08-06 01:09:55 -07:00
# Static File Definitions
static_dir = Path(__file__).parent / "static" / "ui"
2019-09-25 22:40:02 -07:00
log.debug(f"Static Files: {static_dir}")
2019-08-06 01:09:55 -07:00
# Main Sanic App Definition
app = Sanic(__name__)
2019-08-06 01:09:55 -07:00
app.static("/ui", str(static_dir))
2019-09-25 22:40:02 -07:00
log.debug(app.config)
2019-06-15 12:42:28 -07:00
# Sanic Web Server Parameters
APP_PARAMS = {
"host": params.general.listen_address,
"port": params.general.listen_port,
"debug": params.general.debug,
"workers": cpu_count(),
"access_log": params.general.debug,
"auto_reload": params.general.debug,
}
# Redis Config
redis_config = {
"host": params.general.redis_host,
"port": params.general.redis_port,
"decode_responses": True,
}
2019-06-15 12:42:28 -07:00
# Sanic-Limiter Config
query_rate = params.features.rate_limit.query.rate
query_period = params.features.rate_limit.query.period
site_rate = params.features.rate_limit.site.rate
site_period = params.features.rate_limit.site.period
2019-06-10 12:22:38 -07:00
rate_limit_query = f"{query_rate} per {query_period}"
rate_limit_site = f"{site_rate} per {site_period}"
2019-09-25 22:40:02 -07:00
log.debug(f"Query rate limit: {rate_limit_query}")
log.debug(f"Site rate limit: {rate_limit_site}")
2019-05-07 23:21:41 -07:00
# Redis Config for Sanic-Limiter storage
r_limiter_db = params.features.rate_limit.redis_id
r_limiter_url = "redis://{host}:{port}/{db}".format(
host=params.general.redis_host,
port=params.general.redis_port,
db=params.features.rate_limit.redis_id,
)
r_cache = aredis.StrictRedis(db=params.features.cache.redis_id, **redis_config)
r_limiter = aredis.StrictRedis(db=params.features.rate_limit.redis_id, **redis_config)
async def check_redis():
"""Ensure Redis is running before starting server.
Raises:
HyperglassError: Raised if Redis is not running.
Returns:
{bool} -- True if Redis is running.
"""
try:
await r_cache.echo("hyperglass test")
await r_limiter.echo("hyperglass test")
except Exception:
raise HyperglassError(
f"Redis isn't running at: {redis_config['host']}:{redis_config['port']}",
alert="danger",
) from None
return True
# Verify Redis is running
asyncio.run(check_redis())
# Adds Sanic config variable for Sanic-Limiter
2019-06-15 12:42:28 -07:00
app.config.update(RATELIMIT_STORAGE_URL=r_limiter_url)
# Initializes Sanic-Limiter
limiter = Limiter(app, key_func=get_remote_address, global_limits=[rate_limit_site])
2019-05-12 19:22:17 -07:00
# Prometheus Config
count_data = Counter(
2019-09-09 00:18:01 -07:00
"count_data", "Query Counter", ["source", "query_type", "loc_id", "target", "vrf"]
)
count_errors = Counter(
"count_errors",
"Error Counter",
2019-09-04 01:29:49 -07:00
["reason", "source", "query_type", "loc_id", "target"],
)
count_ratelimit = Counter(
"count_ratelimit", "Rate Limit Counter", ["message", "source"]
)
count_notfound = Counter(
2019-06-13 07:49:49 -07:00
"count_notfound", "404 Not Found Counter", ["message", "path", "source"]
)
@app.route("/metrics")
@limiter.exempt
async def metrics(request):
2019-12-31 01:01:06 -07:00
"""Serve Prometheus metrics."""
registry = CollectorRegistry()
multiprocess.MultiProcessCollector(registry)
latest = generate_latest(registry)
2019-07-29 22:13:11 -07:00
return response.text(
latest,
headers={
"Content-Type": CONTENT_TYPE_LATEST,
"Content-Length": str(len(latest)),
},
)
2019-06-10 12:22:38 -07:00
2019-08-31 23:50:02 -07:00
@app.exception(InvalidUsage)
2019-09-04 01:29:49 -07:00
async def handle_frontend_errors(request, exception):
2019-12-31 01:01:06 -07:00
"""Handle user-facing feedback related to frontend/input errors."""
2019-08-31 23:50:02 -07:00
client_addr = get_remote_address(request)
error = exception.args[0]
2019-09-04 01:29:49 -07:00
alert = error["alert"]
2019-09-25 22:40:02 -07:00
log.info(error)
2019-08-31 23:50:02 -07:00
count_errors.labels(
2019-09-04 01:29:49 -07:00
"Front End Error",
2019-08-31 23:50:02 -07:00
client_addr,
2019-09-04 01:29:49 -07:00
request.json.get("query_type"),
request.json.get("location"),
request.json.get("target"),
2019-08-31 23:50:02 -07:00
).inc()
2019-09-25 22:40:02 -07:00
log.error(f'Error: {error["message"]}, Source: {client_addr}')
2019-08-31 23:50:02 -07:00
return response.json(
2019-09-04 01:29:49 -07:00
{"output": error["message"], "alert": alert, "keywords": error["keywords"]},
status=400,
2019-08-31 23:50:02 -07:00
)
2019-09-04 01:29:49 -07:00
@app.exception(ServiceUnavailable)
async def handle_backend_errors(request, exception):
2019-12-31 01:01:06 -07:00
"""Handle user-facing feedback related to backend errors."""
2019-09-04 01:29:49 -07:00
client_addr = get_remote_address(request)
error = exception.args[0]
alert = error["alert"]
2019-09-25 22:40:02 -07:00
log.info(error)
2019-09-04 01:29:49 -07:00
count_errors.labels(
"Back End Error",
client_addr,
request.json.get("query_type"),
request.json.get("location"),
request.json.get("target"),
).inc()
2019-09-25 22:40:02 -07:00
log.error(f'Error: {error["message"]}, Source: {client_addr}')
2019-09-04 01:29:49 -07:00
return response.json(
{"output": error["message"], "alert": alert, "keywords": error["keywords"]},
status=503,
)
2019-08-31 23:50:02 -07:00
@app.exception(NotFound)
async def handle_404(request, exception):
2019-12-31 01:01:06 -07:00
"""Render full error page for invalid URI."""
path = request.path
html = render_html("404", uri=path)
client_addr = get_remote_address(request)
count_notfound.labels(exception, path, client_addr).inc()
2019-09-25 22:40:02 -07:00
log.error(f"Error: {exception}, Path: {path}, Source: {client_addr}")
return response.html(html, status=404)
2019-05-12 19:22:17 -07:00
@app.exception(RateLimitExceeded)
async def handle_429(request, exception):
2019-12-31 01:01:06 -07:00
"""Render full error page for too many site queries."""
html = render_html("ratelimit-site")
client_addr = get_remote_address(request)
count_ratelimit.labels(exception, client_addr).inc()
2019-09-25 22:40:02 -07:00
log.error(f"Error: {exception}, Source: {client_addr}")
return response.html(html, status=429)
2019-05-07 23:21:41 -07:00
@app.exception(ServerError)
async def handle_500(request, exception):
2019-12-31 01:01:06 -07:00
"""Render general error page."""
client_addr = get_remote_address(request)
count_errors.labels(500, exception, client_addr, None, None, None).inc()
2019-09-25 22:40:02 -07:00
log.error(f"Error: {exception}, Source: {client_addr}")
html = render_html("500")
return response.html(html, status=500)
2019-05-07 23:21:41 -07:00
async def clear_cache():
2019-12-31 01:01:06 -07:00
"""Clear the Redis cache."""
try:
await r_cache.flushdb()
2019-08-06 01:09:55 -07:00
return "Successfully cleared cache"
except Exception as error_exception:
2019-09-25 22:40:02 -07:00
log.error(f"Error clearing cache: {error_exception}")
raise HyperglassError(f"Error clearing cache: {error_exception}")
2019-05-07 23:21:41 -07:00
@app.route("/", methods=["GET"])
@limiter.limit(rate_limit_site, error_message="Site")
async def site(request):
2019-12-31 01:01:06 -07:00
"""Serve main application front end."""
2019-08-24 17:21:39 -07:00
return response.html(render_html("form", primary_asn=params.general.primary_asn))
2019-05-07 23:21:41 -07:00
2019-09-09 12:18:26 -07:00
async def validate_input(query_data): # noqa: C901
"""Delete any globally unsupported query parameters.
2019-09-09 12:18:26 -07:00
Performs validation functions per input type:
- query_target:
- Verifies input is not empty
- Verifies input is a string
- query_location:
- Verfies input is not empty
- Verifies input is a list
- Verifies locations in list are defined
- query_type:
- Verifies input is not empty
- Verifies input is a string
- Verifies query type is enabled and supported
- query_vrf: (if feature enabled)
- Verfies input is a list
- Verifies VRFs in list are defined
"""
2019-09-09 12:18:26 -07:00
# Delete any globally unsupported parameters
2019-09-09 23:05:10 -07:00
supported_query_data = {
k: v for k, v in query_data.items() if k in Supported.query_parameters
}
2019-09-09 12:18:26 -07:00
# Unpack query data
2019-09-09 23:05:10 -07:00
query_location = supported_query_data.get("query_location", "")
query_type = supported_query_data.get("query_type", "")
query_target = supported_query_data.get("query_target", "")
2019-09-30 07:51:17 -07:00
query_vrf = supported_query_data.get("query_vrf", "")
2019-09-09 12:18:26 -07:00
device = getattr(devices, query_location)
2019-09-09 12:18:26 -07:00
# Verify that query_target is not empty
2019-09-04 01:29:49 -07:00
if not query_target:
2019-09-25 22:40:02 -07:00
log.debug("No input specified")
2019-09-04 01:29:49 -07:00
raise InvalidUsage(
{
"message": params.messages.no_input.format(
2019-09-09 23:05:10 -07:00
field=params.branding.text.query_target
2019-09-04 01:29:49 -07:00
),
"alert": "warning",
"keywords": [params.branding.text.query_target],
}
)
2019-09-09 12:18:26 -07:00
# Verify that query_target is a string
if not isinstance(query_target, str):
2019-09-25 22:40:02 -07:00
log.debug("Target is not a string")
2019-09-09 12:18:26 -07:00
raise InvalidUsage(
{
2019-09-09 23:05:10 -07:00
"message": params.messages.invalid_field.format(
input=query_target, field=params.branding.text.query_target
2019-09-09 12:18:26 -07:00
),
"alert": "warning",
"keywords": [params.branding.text.query_target, query_target],
}
)
# Verify that query_location is not empty
if not query_location:
2019-09-25 22:40:02 -07:00
log.debug("No selection specified")
2019-09-04 01:29:49 -07:00
raise InvalidUsage(
{
"message": params.messages.no_input.format(
2019-09-09 23:05:10 -07:00
field=params.branding.text.query_location
2019-09-04 01:29:49 -07:00
),
"alert": "warning",
"keywords": [params.branding.text.query_location],
}
)
2019-09-09 23:05:10 -07:00
# Verify that query_location is a string
if not isinstance(query_location, str):
2019-09-25 22:40:02 -07:00
log.debug("Query Location is not a string")
2019-09-09 12:18:26 -07:00
raise InvalidUsage(
{
2019-09-09 23:05:10 -07:00
"message": params.messages.invalid_field.format(
input=query_location, field=params.branding.text.query_location
2019-09-09 12:18:26 -07:00
),
"alert": "warning",
"keywords": [params.branding.text.query_location, query_location],
}
)
# Verify that locations in query_location are actually defined
2019-09-09 23:05:10 -07:00
if query_location not in devices.hostnames:
2019-09-09 12:18:26 -07:00
raise InvalidUsage(
{
2019-09-09 23:05:10 -07:00
"message": params.messages.invalid_field.format(
input=query_location, field=params.branding.text.query_location
2019-09-09 12:18:26 -07:00
),
"alert": "warning",
"keywords": [params.branding.text.query_location, query_location],
}
)
# Verify that query_type is not empty
if not query_type:
2019-09-25 22:40:02 -07:00
log.debug("No query specified")
2019-09-04 01:29:49 -07:00
raise InvalidUsage(
{
"message": params.messages.no_input.format(
2019-09-09 23:05:10 -07:00
field=params.branding.text.query_type
2019-09-04 01:29:49 -07:00
),
"alert": "warning",
"keywords": [params.branding.text.query_location],
}
)
2019-09-09 12:18:26 -07:00
if not isinstance(query_type, str):
2019-09-25 22:40:02 -07:00
log.debug("Query Type is not a string")
2019-09-09 12:18:26 -07:00
raise InvalidUsage(
{
2019-09-09 23:05:10 -07:00
"message": params.messages.invalid_field.format(
input=query_type, field=params.branding.text.query_type
2019-09-09 12:18:26 -07:00
),
"alert": "warning",
2019-09-09 23:05:10 -07:00
"keywords": [params.branding.text.query_type, query_type],
2019-09-09 12:18:26 -07:00
}
)
# Verify that query_type is actually supported
query_is_supported = Supported.is_supported_query(query_type)
if not query_is_supported:
2019-09-25 22:40:02 -07:00
log.debug("Query not supported")
2019-09-09 00:18:01 -07:00
raise InvalidUsage(
{
2019-09-09 23:05:10 -07:00
"message": params.messages.invalid_field.format(
input=query_type, field=params.branding.text.query_type
2019-09-09 00:18:01 -07:00
),
"alert": "warning",
2019-09-09 12:18:26 -07:00
"keywords": [params.branding.text.query_location, query_type],
2019-09-09 00:18:01 -07:00
}
)
2019-09-09 12:18:26 -07:00
elif query_is_supported:
query_is_enabled = operator.attrgetter(f"{query_type}.enable")(params.features)
if not query_is_enabled:
raise InvalidUsage(
{
2019-09-09 23:05:10 -07:00
"message": params.messages.invalid_field.format(
input=query_type, field=params.branding.text.query_type
2019-09-09 12:18:26 -07:00
),
"alert": "warning",
"keywords": [params.branding.text.query_location, query_type],
}
)
2019-09-30 07:51:17 -07:00
# Verify that query_vrf is a string
if query_vrf and not isinstance(query_vrf, str):
raise InvalidUsage(
{
"message": params.messages.invalid_field.format(
input=query_vrf, field=params.branding.text.query_vrf
),
"alert": "warning",
"keywords": [params.branding.text.query_vrf, query_vrf],
}
)
# Verify that vrfs in query_vrf are defined
if query_vrf and not any(vrf in query_vrf for vrf in devices.display_vrfs):
2019-09-30 07:51:17 -07:00
raise InvalidUsage(
{
"message": params.messages.vrf_not_associated.format(
vrf_name=query_vrf, device_name=device.display_name
2019-09-30 07:51:17 -07:00
),
"alert": "warning",
"keywords": [query_vrf, query_location],
}
)
# If VRF display name from UI/API matches a configured display name, set the
# query_vrf value to the configured VRF key name
if query_vrf:
for vrf in device.vrfs:
if vrf.display_name == query_vrf:
supported_query_data["query_vrf"] = vrf.name
2019-09-30 07:51:17 -07:00
if not query_vrf:
supported_query_data["query_vrf"] = "default"
log.debug(f"Validated Query: {supported_query_data}")
return supported_query_data
2019-09-09 12:18:26 -07:00
@app.route("/query", methods=["POST"])
@limiter.limit(
rate_limit_query,
error_message={
"output": params.features.rate_limit.query.message,
"alert": "danger",
"keywords": [],
},
)
async def hyperglass_main(request):
2019-12-31 01:01:06 -07:00
"""Process XHR POST data.
2019-12-31 01:01:06 -07:00
Ingests XHR POST data from
2019-09-09 12:18:26 -07:00
form submit, passes it to the backend application to perform the
filtering/lookups.
"""
# Get JSON data from Ajax POST
raw_query_data = request.json
2019-09-25 22:40:02 -07:00
log.debug(f"Unvalidated input: {raw_query_data}")
2019-09-09 12:18:26 -07:00
# Perform basic input validation
2019-09-09 23:05:10 -07:00
query_data = await validate_input(raw_query_data)
2019-09-09 00:18:01 -07:00
# Get client IP address for Prometheus logging & rate limiting
client_addr = get_remote_address(request)
2019-08-25 23:21:34 -07:00
# Increment Prometheus counter
count_data.labels(
2019-09-04 01:29:49 -07:00
client_addr,
2019-09-09 12:18:26 -07:00
query_data.get("query_type"),
query_data.get("query_location"),
query_data.get("query_target"),
query_data.get("query_vrf"),
).inc()
2019-09-25 22:40:02 -07:00
log.debug(f"Client Address: {client_addr}")
2019-07-09 16:30:14 -07:00
# Stringify the form response containing serialized JSON for the
# request, use as key for k/v cache store so each command output
# value is unique
2019-09-09 12:18:26 -07:00
cache_key = str(query_data)
2019-08-25 23:21:34 -07:00
# Define cache entry expiry time
cache_timeout = params.features.cache.timeout
2019-09-25 22:40:02 -07:00
log.debug(f"Cache Timeout: {cache_timeout}")
2019-08-25 23:21:34 -07:00
2019-05-07 23:21:41 -07:00
# Check if cached entry exists
if not await r_cache.get(cache_key):
2019-09-25 22:40:02 -07:00
log.debug(f"Sending query {cache_key} to execute module...")
2019-08-25 23:21:34 -07:00
# Pass request to execution module
2019-08-31 23:50:02 -07:00
try:
2019-09-03 00:44:17 -07:00
starttime = time.time()
2019-09-09 12:18:26 -07:00
cache_value = await Execute(query_data).response()
2019-09-03 00:44:17 -07:00
endtime = time.time()
elapsedtime = round(endtime - starttime, 4)
2019-09-09 12:18:26 -07:00
2019-09-25 22:40:02 -07:00
log.debug(f"Query {cache_key} took {elapsedtime} seconds to run.")
2019-09-09 12:18:26 -07:00
except (InputInvalid, InputNotAllowed, ResponseEmpty) as frontend_error:
2019-09-04 01:29:49 -07:00
raise InvalidUsage(frontend_error.__dict__())
except (AuthError, RestError, ScrapeError, DeviceTimeout) as backend_error:
raise ServiceUnavailable(backend_error.__dict__())
2019-08-31 23:50:02 -07:00
if cache_value is None:
2019-09-04 01:29:49 -07:00
raise ServerError(
{"message": params.messages.general, "alert": "danger", "keywords": []}
)
2019-08-25 23:21:34 -07:00
# Create a cache entry
await r_cache.set(cache_key, str(cache_value))
await r_cache.expire(cache_key, cache_timeout)
2019-09-25 22:40:02 -07:00
log.debug(f"Added cache entry for query: {cache_key}")
2019-08-25 23:21:34 -07:00
2019-05-07 23:21:41 -07:00
# If it does, return the cached entry
cache_response = await r_cache.get(cache_key)
2019-08-25 23:21:34 -07:00
2019-08-31 23:50:02 -07:00
response_output = cache_response
2019-09-25 22:40:02 -07:00
log.debug(f"Cache match for: {cache_key}, returning cached entry")
log.debug(f"Cache Output: {response_output}")
2019-08-31 23:50:02 -07:00
return response.json({"output": response_output}, status=200)