mirror of
https://github.com/checktheroads/hyperglass
synced 2024-05-11 05:55:08 +00:00
Fix unused imports, break out cache retrieval from try block
Completed switch from dict → tuple for output & status by stringifying the output tuple, then using literal_eval to de-stringify on retrieval
This commit is contained in:
@@ -3,6 +3,7 @@ Main Hyperglass Front End
|
|||||||
"""
|
"""
|
||||||
# Standard Imports
|
# Standard Imports
|
||||||
import json
|
import json
|
||||||
|
from ast import literal_eval
|
||||||
|
|
||||||
# Module Imports
|
# Module Imports
|
||||||
import redis
|
import redis
|
||||||
@@ -192,37 +193,35 @@ def hyperglass_main():
|
|||||||
cache_timeout = params.features.cache.timeout
|
cache_timeout = params.features.cache.timeout
|
||||||
logger.debug(f"Cache Timeout: {cache_timeout}")
|
logger.debug(f"Cache Timeout: {cache_timeout}")
|
||||||
# Check if cached entry exists
|
# Check if cached entry exists
|
||||||
if not r_cache.hgetall(cache_key):
|
if not r_cache.get(cache_key):
|
||||||
try:
|
try:
|
||||||
logger.debug(f"Sending query {cache_key} to execute module...")
|
logger.debug(f"Sending query {cache_key} to execute module...")
|
||||||
cache_value = Execute(lg_data).response()
|
cache_value = Execute(lg_data).response()
|
||||||
value_output = cache_value["output"]
|
|
||||||
value_code = cache_value["status"]
|
|
||||||
logger.debug(
|
logger.debug(
|
||||||
f"Validated response...\nStatus Code: {value_code}\nOutput:\n{value_output}"
|
f"Validated response...\nStatus Code: {cache_value[1]}\nOutput:\n{cache_value[0]}"
|
||||||
)
|
)
|
||||||
# If it doesn't, create a cache entry
|
# If it doesn't, create a cache entry
|
||||||
r_cache.hmset(cache_key, cache_value)
|
r_cache.set(cache_key, str(cache_value))
|
||||||
r_cache.expire(cache_key, cache_timeout)
|
r_cache.expire(cache_key, cache_timeout)
|
||||||
logger.debug(f"Added cache entry for query: {cache_key}")
|
logger.debug(f"Added cache entry for query: {cache_key}")
|
||||||
response = r_cache.hgetall(cache_key)
|
except:
|
||||||
logger.debug(f"Status code: {value_code}")
|
logger.error(f"Unable to add output to cache: {cache_key}")
|
||||||
|
raise HyperglassError(f"Error with cache key {cache_key}")
|
||||||
|
# If it does, return the cached entry
|
||||||
|
logger.debug(f"Cache match for: {cache_key}, returning cached entry")
|
||||||
|
cache_response = r_cache.get(cache_key)
|
||||||
|
response = literal_eval(cache_response)
|
||||||
|
response_output, response_status = response
|
||||||
|
logger.debug(f"Cache Output: {response_output}")
|
||||||
|
logger.debug(f"Cache Status Code: {response_status}")
|
||||||
# If error, increment Prometheus metrics
|
# If error, increment Prometheus metrics
|
||||||
if value_code in [405, 415, 504]:
|
if response_status in [405, 415, 504]:
|
||||||
count_errors.labels(
|
count_errors.labels(
|
||||||
response["status"],
|
response_status,
|
||||||
code.get_reason(response["status"]),
|
code.get_reason(response_status),
|
||||||
client_addr,
|
client_addr,
|
||||||
lg_data["type"],
|
lg_data["type"],
|
||||||
lg_data["location"],
|
lg_data["location"],
|
||||||
lg_data["target"],
|
lg_data["target"],
|
||||||
).inc()
|
).inc()
|
||||||
return Response(*response)
|
return Response(*response)
|
||||||
except:
|
|
||||||
logger.error(f"Unable to add output to cache: {cache_key}")
|
|
||||||
raise HyperglassError(f"Error with cache key {cache_key}")
|
|
||||||
# If it does, return the cached entry
|
|
||||||
else:
|
|
||||||
logger.debug(f"Cache match for: {cache_key}, returning cached entry")
|
|
||||||
response = r_cache.hgetall(cache_key)
|
|
||||||
return Response(*response)
|
|
||||||
|
|||||||
Reference in New Issue
Block a user