1
0
mirror of https://github.com/checktheroads/hyperglass synced 2024-05-11 05:55:08 +00:00

78 lines
2.5 KiB
Python
Raw Normal View History

2020-01-21 17:27:57 -07:00
"""API Routes."""
# Standard Library Imports
import time
# Third Party Imports
import aredis
2020-01-21 17:27:57 -07:00
from fastapi import HTTPException
from fastapi.openapi.docs import get_redoc_html
from fastapi.openapi.docs import get_swagger_ui_html
from starlette.requests import Request
# Project Imports
2020-01-21 17:27:57 -07:00
from hyperglass.configuration import REDIS_CONFIG
from hyperglass.configuration import params
from hyperglass.exceptions import HyperglassError
from hyperglass.execution.execute import Execute
2020-01-21 17:27:57 -07:00
from hyperglass.models.query import Query
from hyperglass.util import log
2020-01-28 09:52:54 -07:00
Cache = aredis.StrictRedis(db=params.cache.database, **REDIS_CONFIG)
2020-01-21 17:27:57 -07:00
async def query(query_data: Query, request: Request):
"""Ingest request data pass it to the backend application to perform the query."""
# Use hashed query_data string as key for for k/v cache store so
# each command output value is unique.
2020-01-26 02:15:19 -07:00
cache_key = query_data.digest()
# Define cache entry expiry time
2020-01-28 09:52:54 -07:00
cache_timeout = params.cache.timeout
log.debug(f"Cache Timeout: {cache_timeout}")
# Check if cached entry exists
if not await Cache.get(cache_key):
log.debug(f"Created new cache key {cache_key} entry for query {query_data}")
log.debug("Beginning query execution...")
# Pass request to execution module
starttime = time.time()
cache_value = await Execute(query_data).response()
endtime = time.time()
elapsedtime = round(endtime - starttime, 4)
log.debug(f"Query {cache_key} took {elapsedtime} seconds to run.")
if cache_value is None:
raise HyperglassError(message=params.messages.general, alert="danger")
# Create a cache entry
await Cache.set(cache_key, str(cache_value))
await Cache.expire(cache_key, cache_timeout)
log.debug(f"Added cache entry for query: {cache_key}")
# If it does, return the cached entry
cache_response = await Cache.get(cache_key)
log.debug(f"Cache match for: {cache_key}, returning cached entry")
log.debug(f"Cache Output: {cache_response}")
2020-01-21 17:27:57 -07:00
return {"output": cache_response, "level": "success", "keywords": []}
async def docs():
"""Serve custom docs."""
2020-01-28 08:59:27 -07:00
if params.docs.enable:
2020-01-21 17:27:57 -07:00
docs_func_map = {"swagger": get_swagger_ui_html, "redoc": get_redoc_html}
2020-01-28 08:59:27 -07:00
docs_func = docs_func_map[params.docs.mode]
2020-01-21 17:27:57 -07:00
return docs_func(
2020-01-28 08:59:27 -07:00
openapi_url=params.docs.openapi_url, title=params.site_title + " - API Docs"
2020-01-21 17:27:57 -07:00
)
else:
raise HTTPException(detail="Not found", status_code=404)
endpoints = [query, docs]