mirror of
https://github.com/checktheroads/hyperglass
synced 2024-05-11 05:55:08 +00:00
shorten logger function name
This commit is contained in:
@@ -9,7 +9,7 @@ import json
|
||||
import operator
|
||||
|
||||
# Third Party Imports
|
||||
from logzero import logger
|
||||
from logzero import logger as log
|
||||
|
||||
# Project Imports
|
||||
from hyperglass.configuration import vrfs
|
||||
@@ -66,7 +66,7 @@ class Construct:
|
||||
def ping(self):
|
||||
"""Constructs ping query parameters from pre-validated input"""
|
||||
|
||||
logger.debug(
|
||||
log.debug(
|
||||
f"Constructing ping query for {self.query_target} via {self.transport}"
|
||||
)
|
||||
|
||||
@@ -99,14 +99,14 @@ class Construct:
|
||||
)
|
||||
query.append(vrf_query)
|
||||
|
||||
logger.debug(f"Constructed query: {query}")
|
||||
log.debug(f"Constructed query: {query}")
|
||||
return query
|
||||
|
||||
def traceroute(self):
|
||||
"""
|
||||
Constructs traceroute query parameters from pre-validated input.
|
||||
"""
|
||||
logger.debug(
|
||||
log.debug(
|
||||
(
|
||||
f"Constructing traceroute query for {self.query_target} "
|
||||
f"via {self.transport}"
|
||||
@@ -133,7 +133,7 @@ class Construct:
|
||||
target=self.query_target, source=source, vrf=self.query_vrf
|
||||
)
|
||||
|
||||
logger.debug(f"Constructed query: {query}")
|
||||
log.debug(f"Constructed query: {query}")
|
||||
|
||||
return [query]
|
||||
|
||||
@@ -141,7 +141,7 @@ class Construct:
|
||||
"""
|
||||
Constructs bgp_route query parameters from pre-validated input.
|
||||
"""
|
||||
logger.debug(
|
||||
log.debug(
|
||||
f"Constructing bgp_route query for {self.query_target} via {self.transport}"
|
||||
)
|
||||
|
||||
@@ -165,7 +165,7 @@ class Construct:
|
||||
target=self.query_target, source=source, afi=afi, vrf=self.query_vrf
|
||||
)
|
||||
|
||||
logger.debug(f"Constructed query: {query}")
|
||||
log.debug(f"Constructed query: {query}")
|
||||
|
||||
return [query]
|
||||
|
||||
@@ -174,7 +174,7 @@ class Construct:
|
||||
Constructs bgp_community query parameters from pre-validated
|
||||
input.
|
||||
"""
|
||||
logger.debug(
|
||||
log.debug(
|
||||
(
|
||||
f"Constructing bgp_community query for {self.query_target} "
|
||||
f"via {self.transport}"
|
||||
@@ -183,7 +183,7 @@ class Construct:
|
||||
|
||||
query = None
|
||||
afi = self.query_afi(self.query_target, self.query_vrf)
|
||||
logger.debug(afi)
|
||||
log.debug(afi)
|
||||
source = self.get_src(self.device, afi)
|
||||
|
||||
if self.transport == "rest":
|
||||
@@ -202,7 +202,7 @@ class Construct:
|
||||
target=self.query_target, source=source, vrf=self.query_vrf
|
||||
)
|
||||
|
||||
logger.debug(f"Constructed query: {query}")
|
||||
log.debug(f"Constructed query: {query}")
|
||||
|
||||
return query
|
||||
|
||||
@@ -210,7 +210,7 @@ class Construct:
|
||||
"""
|
||||
Constructs bgp_aspath query parameters from pre-validated input.
|
||||
"""
|
||||
logger.debug(
|
||||
log.debug(
|
||||
(
|
||||
f"Constructing bgp_aspath query for {self.query_target} "
|
||||
f"via {self.transport}"
|
||||
@@ -237,6 +237,6 @@ class Construct:
|
||||
target=self.query_target, source=source, vrf=self.query_vrf
|
||||
)
|
||||
|
||||
logger.debug(f"Constructed query: {query}")
|
||||
log.debug(f"Constructed query: {query}")
|
||||
|
||||
return query
|
||||
|
@@ -10,7 +10,7 @@ import re
|
||||
# Third Party Imports
|
||||
import httpx
|
||||
import sshtunnel
|
||||
from logzero import logger
|
||||
from logzero import logger as log
|
||||
from netmiko import ConnectHandler
|
||||
from netmiko import NetMikoAuthenticationException
|
||||
from netmiko import NetmikoAuthError
|
||||
@@ -64,9 +64,7 @@ class Connect:
|
||||
"""
|
||||
device_proxy = getattr(proxies, self.device_config.proxy)
|
||||
|
||||
logger.debug(
|
||||
f"Connecting to {self.device_config.proxy} via sshtunnel library..."
|
||||
)
|
||||
log.debug(f"Connecting to {self.device_config.proxy} via sshtunnel library...")
|
||||
try:
|
||||
tunnel = sshtunnel.open_tunnel(
|
||||
device_proxy.address.compressed,
|
||||
@@ -82,7 +80,7 @@ class Connect:
|
||||
logger=logger,
|
||||
)
|
||||
except sshtunnel.BaseSSHTunnelForwarderError as scrape_proxy_error:
|
||||
logger.error(
|
||||
log.error(
|
||||
f"Error connecting to device {self.device_config.location} via "
|
||||
f"proxy {self.device_config.proxy}"
|
||||
)
|
||||
@@ -93,7 +91,7 @@ class Connect:
|
||||
error=scrape_proxy_error,
|
||||
)
|
||||
with tunnel:
|
||||
logger.debug(f"Established tunnel with {self.device_config.proxy}")
|
||||
log.debug(f"Established tunnel with {self.device_config.proxy}")
|
||||
scrape_host = {
|
||||
"host": "localhost",
|
||||
"port": tunnel.local_bind_port,
|
||||
@@ -103,9 +101,9 @@ class Connect:
|
||||
"global_delay_factor": 0.2,
|
||||
"timeout": params.general.request_timeout - 1,
|
||||
}
|
||||
logger.debug(f"SSH proxy local binding: localhost:{tunnel.local_bind_port}")
|
||||
log.debug(f"SSH proxy local binding: localhost:{tunnel.local_bind_port}")
|
||||
try:
|
||||
logger.debug(
|
||||
log.debug(
|
||||
f"Connecting to {self.device_config.location} "
|
||||
"via Netmiko library..."
|
||||
)
|
||||
@@ -114,12 +112,12 @@ class Connect:
|
||||
for query in self.query:
|
||||
raw = nm_connect_direct.send_command(query)
|
||||
responses.append(raw)
|
||||
logger.debug(f'Raw response for command "{query}":\n{raw}')
|
||||
log.debug(f'Raw response for command "{query}":\n{raw}')
|
||||
response = "\n".join(responses)
|
||||
logger.debug(f"Response type:\n{type(response)}")
|
||||
log.debug(f"Response type:\n{type(response)}")
|
||||
|
||||
except (NetMikoTimeoutException, NetmikoTimeoutError) as scrape_error:
|
||||
logger.error(
|
||||
log.error(
|
||||
f"Timeout connecting to device {self.device_config.location}: "
|
||||
f"{scrape_error}"
|
||||
)
|
||||
@@ -130,7 +128,7 @@ class Connect:
|
||||
error=params.messages.request_timeout,
|
||||
)
|
||||
except (NetMikoAuthenticationException, NetmikoAuthError) as auth_error:
|
||||
logger.error(
|
||||
log.error(
|
||||
f"Error authenticating to device {self.device_config.location}: "
|
||||
f"{auth_error}"
|
||||
)
|
||||
@@ -141,7 +139,7 @@ class Connect:
|
||||
error=params.messages.authentication_error,
|
||||
) from None
|
||||
except sshtunnel.BaseSSHTunnelForwarderError as scrape_error:
|
||||
logger.error(
|
||||
log.error(
|
||||
f"Error connecting to device proxy {self.device_config.proxy}: "
|
||||
f"{scrape_error}"
|
||||
)
|
||||
@@ -152,14 +150,14 @@ class Connect:
|
||||
error=params.messages.general,
|
||||
)
|
||||
if response is None:
|
||||
logger.error(f"No response from device {self.device_config.location}")
|
||||
log.error(f"No response from device {self.device_config.location}")
|
||||
raise ScrapeError(
|
||||
params.messages.connection_error,
|
||||
device_name=self.device_config.display_name,
|
||||
proxy=None,
|
||||
error=params.messages.noresponse_error,
|
||||
)
|
||||
logger.debug(f"Output for query: {self.query}:\n{response}")
|
||||
log.debug(f"Output for query: {self.query}:\n{response}")
|
||||
return response
|
||||
|
||||
async def scrape_direct(self):
|
||||
@@ -168,7 +166,7 @@ class Connect:
|
||||
command output.
|
||||
"""
|
||||
|
||||
logger.debug(f"Connecting directly to {self.device_config.location}...")
|
||||
log.debug(f"Connecting directly to {self.device_config.location}...")
|
||||
|
||||
scrape_host = {
|
||||
"host": self.device_config.address.compressed,
|
||||
@@ -181,17 +179,15 @@ class Connect:
|
||||
}
|
||||
|
||||
try:
|
||||
logger.debug(f"Device Parameters: {scrape_host}")
|
||||
logger.debug(
|
||||
log.debug(f"Device Parameters: {scrape_host}")
|
||||
log.debug(
|
||||
f"Connecting to {self.device_config.location} via Netmiko library"
|
||||
)
|
||||
nm_connect_direct = ConnectHandler(**scrape_host)
|
||||
response = nm_connect_direct.send_command(self.query)
|
||||
except (NetMikoTimeoutException, NetmikoTimeoutError) as scrape_error:
|
||||
logger.error(
|
||||
f"{params.general.request_timeout - 1} second timeout expired."
|
||||
)
|
||||
logger.error(scrape_error)
|
||||
log.error(f"{params.general.request_timeout - 1} second timeout expired.")
|
||||
log.error(scrape_error)
|
||||
raise DeviceTimeout(
|
||||
params.messages.connection_error,
|
||||
device_name=self.device_config.display_name,
|
||||
@@ -199,10 +195,8 @@ class Connect:
|
||||
error=params.messages.request_timeout,
|
||||
)
|
||||
except (NetMikoAuthenticationException, NetmikoAuthError) as auth_error:
|
||||
logger.error(
|
||||
f"Error authenticating to device {self.device_config.location}"
|
||||
)
|
||||
logger.error(auth_error)
|
||||
log.error(f"Error authenticating to device {self.device_config.location}")
|
||||
log.error(auth_error)
|
||||
|
||||
raise AuthError(
|
||||
params.messages.connection_error,
|
||||
@@ -211,19 +205,19 @@ class Connect:
|
||||
error=params.messages.authentication_error,
|
||||
)
|
||||
if not response:
|
||||
logger.error(f"No response from device {self.device_config.location}")
|
||||
log.error(f"No response from device {self.device_config.location}")
|
||||
raise ScrapeError(
|
||||
params.messages.connection_error,
|
||||
device_name=self.device_config.display_name,
|
||||
proxy=None,
|
||||
error=params.messages.noresponse_error,
|
||||
)
|
||||
logger.debug(f"Output for query: {self.query}:\n{response}")
|
||||
log.debug(f"Output for query: {self.query}:\n{response}")
|
||||
return response
|
||||
|
||||
async def rest(self):
|
||||
"""Sends HTTP POST to router running a hyperglass API agent"""
|
||||
logger.debug(f"Query parameters: {self.query}")
|
||||
log.debug(f"Query parameters: {self.query}")
|
||||
|
||||
uri = Supported.map_rest(self.device_config.nos)
|
||||
headers = {
|
||||
@@ -238,8 +232,8 @@ class Connect:
|
||||
uri=uri,
|
||||
)
|
||||
|
||||
logger.debug(f"HTTP Headers: {headers}")
|
||||
logger.debug(f"URL endpoint: {endpoint}")
|
||||
log.debug(f"HTTP Headers: {headers}")
|
||||
log.debug(f"URL endpoint: {endpoint}")
|
||||
|
||||
try:
|
||||
http_client = httpx.AsyncClient()
|
||||
@@ -248,8 +242,8 @@ class Connect:
|
||||
)
|
||||
response = raw_response.text
|
||||
|
||||
logger.debug(f"HTTP status code: {raw_response.status_code}")
|
||||
logger.debug(f"Output for query {self.query}:\n{response}")
|
||||
log.debug(f"HTTP status code: {raw_response.status_code}")
|
||||
log.debug(f"Output for query {self.query}:\n{response}")
|
||||
except (
|
||||
httpx.exceptions.ConnectTimeout,
|
||||
httpx.exceptions.CookieConflict,
|
||||
@@ -270,7 +264,7 @@ class Connect:
|
||||
rest_msg = " ".join(
|
||||
re.findall(r"[A-Z][^A-Z]*", rest_error.__class__.__name__)
|
||||
)
|
||||
logger.error(
|
||||
log.error(
|
||||
f"Error connecting to device {self.device_config.location}: {rest_msg}"
|
||||
)
|
||||
raise RestError(
|
||||
@@ -286,7 +280,7 @@ class Connect:
|
||||
)
|
||||
|
||||
if raw_response.status_code != 200:
|
||||
logger.error(f"Response code is {raw_response.status_code}")
|
||||
log.error(f"Response code is {raw_response.status_code}")
|
||||
raise RestError(
|
||||
params.messages.connection_error,
|
||||
device_name=self.device_config.display_name,
|
||||
@@ -294,14 +288,14 @@ class Connect:
|
||||
)
|
||||
|
||||
if not response:
|
||||
logger.error(f"No response from device {self.device_config.location}")
|
||||
log.error(f"No response from device {self.device_config.location}")
|
||||
raise RestError(
|
||||
params.messages.connection_error,
|
||||
device_name=self.device_config.display_name,
|
||||
error=params.messages.noresponse_error,
|
||||
)
|
||||
|
||||
logger.debug(f"Output for query: {self.query}:\n{response}")
|
||||
log.debug(f"Output for query: {self.query}:\n{response}")
|
||||
return response
|
||||
|
||||
|
||||
@@ -325,14 +319,14 @@ class Execute:
|
||||
"""
|
||||
device_config = getattr(devices, self.query_location)
|
||||
|
||||
logger.debug(f"Received query for {self.query_data}")
|
||||
logger.debug(f"Matched device config: {device_config}")
|
||||
log.debug(f"Received query for {self.query_data}")
|
||||
log.debug(f"Matched device config: {device_config}")
|
||||
|
||||
# Run query parameters through validity checks
|
||||
validation = Validate(device_config, self.query_type, self.query_target)
|
||||
valid_input = validation.validate_query()
|
||||
if valid_input:
|
||||
logger.debug(f"Validation passed for query: {self.query_data}")
|
||||
log.debug(f"Validation passed for query: {self.query_data}")
|
||||
pass
|
||||
|
||||
connect = None
|
||||
|
@@ -8,7 +8,7 @@ import ipaddress
|
||||
import re
|
||||
|
||||
# Third Party Imports
|
||||
from logzero import logger
|
||||
from logzero import logger as log
|
||||
|
||||
# Project Imports
|
||||
from hyperglass.configuration import logzero_config # noqa: F401
|
||||
@@ -62,10 +62,10 @@ class IPType:
|
||||
ip_version = ipaddress.ip_network(target).version
|
||||
state = False
|
||||
if ip_version == 4 and re.match(self.ipv4_host, target):
|
||||
logger.debug(f"{target} is an IPv{ip_version} host.")
|
||||
log.debug(f"{target} is an IPv{ip_version} host.")
|
||||
state = True
|
||||
if ip_version == 6 and re.match(self.ipv6_host, target):
|
||||
logger.debug(f"{target} is an IPv{ip_version} host.")
|
||||
log.debug(f"{target} is an IPv{ip_version} host.")
|
||||
state = True
|
||||
return state
|
||||
|
||||
@@ -89,7 +89,7 @@ def ip_validate(target):
|
||||
_exception.details = {}
|
||||
raise _exception
|
||||
except (ipaddress.AddressValueError, ValueError) as ip_error:
|
||||
logger.debug(f"IP {target} is invalid")
|
||||
log.debug(f"IP {target} is invalid")
|
||||
_exception = ValueError(ip_error)
|
||||
_exception.details = {}
|
||||
raise _exception
|
||||
@@ -101,7 +101,7 @@ def ip_blacklist(target):
|
||||
Check blacklist list for prefixes/IPs, return boolean based on list
|
||||
membership.
|
||||
"""
|
||||
logger.debug(f"Blacklist Enabled: {params.features.blacklist.enable}")
|
||||
log.debug(f"Blacklist Enabled: {params.features.blacklist.enable}")
|
||||
target = ipaddress.ip_network(target)
|
||||
if params.features.blacklist.enable:
|
||||
target_ver = target.version
|
||||
@@ -111,7 +111,7 @@ def ip_blacklist(target):
|
||||
for net in user_blacklist
|
||||
if ipaddress.ip_network(net).version == target_ver
|
||||
]
|
||||
logger.debug(
|
||||
log.debug(
|
||||
f"IPv{target_ver} Blacklist Networks: {[str(net) for net in networks]}"
|
||||
)
|
||||
for net in networks:
|
||||
@@ -120,7 +120,7 @@ def ip_blacklist(target):
|
||||
blacklist_net.network_address <= target.network_address
|
||||
and blacklist_net.broadcast_address >= target.broadcast_address
|
||||
):
|
||||
logger.debug(f"Blacklist Match Found for {target} in {str(net)}")
|
||||
log.debug(f"Blacklist Match Found for {target} in {str(net)}")
|
||||
_exception = ValueError(params.messages.blacklist)
|
||||
_exception.details = {"blacklisted_net": str(net)}
|
||||
raise _exception
|
||||
@@ -150,14 +150,14 @@ def ip_attributes(target):
|
||||
def ip_type_check(query_type, target, device):
|
||||
"""Checks multiple IP address related validation parameters"""
|
||||
prefix_attr = ip_attributes(target)
|
||||
logger.debug(f"IP Attributes:\n{prefix_attr}")
|
||||
log.debug(f"IP Attributes:\n{prefix_attr}")
|
||||
|
||||
# If enable_max_prefix feature enabled, require that BGP Route
|
||||
# queries be smaller than configured size limit.
|
||||
if query_type == "bgp_route" and params.features.max_prefix.enable:
|
||||
max_length = getattr(params.features.max_prefix, prefix_attr["afi"])
|
||||
if prefix_attr["length"] > max_length:
|
||||
logger.debug("Failed max prefix length check")
|
||||
log.debug("Failed max prefix length check")
|
||||
_exception = ValueError(params.messages.max_prefix)
|
||||
_exception.details = {"max_length": max_length}
|
||||
raise _exception
|
||||
@@ -170,7 +170,7 @@ def ip_type_check(query_type, target, device):
|
||||
and device.nos in params.general.requires_ipv6_cidr
|
||||
and IPType().is_host(target)
|
||||
):
|
||||
logger.debug("Failed requires IPv6 CIDR check")
|
||||
log.debug("Failed requires IPv6 CIDR check")
|
||||
_exception = ValueError(params.messages.requires_ipv6_cidr)
|
||||
_exception.details = {"device_name": device.display_name}
|
||||
raise _exception
|
||||
@@ -178,7 +178,7 @@ def ip_type_check(query_type, target, device):
|
||||
# If query type is ping or traceroute, and query target is in CIDR
|
||||
# format, return an error.
|
||||
if query_type in ("ping", "traceroute") and IPType().is_cidr(target):
|
||||
logger.debug("Failed CIDR format for ping/traceroute check")
|
||||
log.debug("Failed CIDR format for ping/traceroute check")
|
||||
_exception = ValueError(params.messages.directed_cidr)
|
||||
_exception.details = {"query_type": getattr(params.branding.text, query_type)}
|
||||
raise _exception
|
||||
@@ -200,7 +200,7 @@ class Validate:
|
||||
|
||||
def validate_ip(self):
|
||||
"""Validates IPv4/IPv6 Input"""
|
||||
logger.debug(f"Validating {self.query_type} query for target {self.target}...")
|
||||
log.debug(f"Validating {self.query_type} query for target {self.target}...")
|
||||
|
||||
# Perform basic validation of an IP address, return error if
|
||||
# not a valid IP.
|
||||
@@ -237,7 +237,7 @@ class Validate:
|
||||
|
||||
def validate_dual(self):
|
||||
"""Validates Dual-Stack Input"""
|
||||
logger.debug(f"Validating {self.query_type} query for target {self.target}...")
|
||||
log.debug(f"Validating {self.query_type} query for target {self.target}...")
|
||||
|
||||
if self.query_type == "bgp_community":
|
||||
# Validate input communities against configured or default regex
|
||||
|
@@ -9,7 +9,7 @@ from pathlib import Path
|
||||
# Third Party Imports
|
||||
import logzero
|
||||
import yaml
|
||||
from logzero import logger
|
||||
from logzero import logger as log
|
||||
from pydantic import ValidationError
|
||||
|
||||
# Project Imports
|
||||
@@ -34,16 +34,16 @@ try:
|
||||
user_config = yaml.safe_load(config_yaml)
|
||||
except FileNotFoundError as no_config_error:
|
||||
user_config = None
|
||||
logger.error(f"{no_config_error} - Default configuration will be used")
|
||||
log.error(f"{no_config_error} - Default configuration will be used")
|
||||
|
||||
# Import commands file
|
||||
try:
|
||||
with open(working_dir.joinpath("commands.yaml")) as commands_yaml:
|
||||
user_commands = yaml.safe_load(commands_yaml)
|
||||
logger.info(f"Found commands: {user_commands}")
|
||||
log.info(f"Found commands: {user_commands}")
|
||||
except FileNotFoundError:
|
||||
user_commands = None
|
||||
logger.info(
|
||||
log.info(
|
||||
(
|
||||
f'No commands found in {working_dir.joinpath("commands.yaml")}. '
|
||||
"Defaults will be used."
|
||||
@@ -57,7 +57,7 @@ try:
|
||||
with open(working_dir.joinpath("devices.yaml")) as devices_yaml:
|
||||
user_devices = yaml.safe_load(devices_yaml)
|
||||
except FileNotFoundError as no_devices_error:
|
||||
logger.error(no_devices_error)
|
||||
log.error(no_devices_error)
|
||||
raise ConfigMissing(
|
||||
missing_item=str(working_dir.joinpath("devices.yaml"))
|
||||
) from None
|
||||
|
@@ -7,7 +7,7 @@ from pathlib import Path
|
||||
|
||||
# Third Party Imports
|
||||
import aredis
|
||||
from logzero import logger
|
||||
from logzero import logger as log
|
||||
from prometheus_client import CollectorRegistry
|
||||
from prometheus_client import Counter
|
||||
from prometheus_client import generate_latest
|
||||
@@ -40,7 +40,7 @@ from hyperglass.exceptions import (
|
||||
DeviceTimeout,
|
||||
)
|
||||
|
||||
logger.debug(f"Configuration Parameters:\n {params.dict()}")
|
||||
log.debug(f"Configuration Parameters:\n {params.dict()}")
|
||||
|
||||
# Redis Config
|
||||
redis_config = {
|
||||
@@ -53,12 +53,12 @@ redis_config = {
|
||||
static_dir = Path(__file__).parent / "static" / "ui"
|
||||
|
||||
# Main Sanic app definition
|
||||
logger.debug(f"Static Files: {static_dir}")
|
||||
log.debug(f"Static Files: {static_dir}")
|
||||
|
||||
app = Sanic(__name__)
|
||||
app.static("/ui", str(static_dir))
|
||||
|
||||
logger.debug(app.config)
|
||||
log.debug(app.config)
|
||||
|
||||
# Redis Cache Config
|
||||
r_cache = aredis.StrictRedis(db=params.features.cache.redis_id, **redis_config)
|
||||
@@ -71,8 +71,8 @@ site_period = params.features.rate_limit.site.period
|
||||
#
|
||||
rate_limit_query = f"{query_rate} per {query_period}"
|
||||
rate_limit_site = f"{site_rate} per {site_period}"
|
||||
logger.debug(f"Query rate limit: {rate_limit_query}")
|
||||
logger.debug(f"Site rate limit: {rate_limit_site}")
|
||||
log.debug(f"Query rate limit: {rate_limit_query}")
|
||||
log.debug(f"Site rate limit: {rate_limit_site}")
|
||||
|
||||
# Redis Config for Sanic-Limiter storage
|
||||
r_limiter_db = params.features.rate_limit.redis_id
|
||||
@@ -131,7 +131,7 @@ async def handle_frontend_errors(request, exception):
|
||||
client_addr = get_remote_address(request)
|
||||
error = exception.args[0]
|
||||
alert = error["alert"]
|
||||
logger.info(error)
|
||||
log.info(error)
|
||||
count_errors.labels(
|
||||
"Front End Error",
|
||||
client_addr,
|
||||
@@ -139,7 +139,7 @@ async def handle_frontend_errors(request, exception):
|
||||
request.json.get("location"),
|
||||
request.json.get("target"),
|
||||
).inc()
|
||||
logger.error(f'Error: {error["message"]}, Source: {client_addr}')
|
||||
log.error(f'Error: {error["message"]}, Source: {client_addr}')
|
||||
return response.json(
|
||||
{"output": error["message"], "alert": alert, "keywords": error["keywords"]},
|
||||
status=400,
|
||||
@@ -152,7 +152,7 @@ async def handle_backend_errors(request, exception):
|
||||
client_addr = get_remote_address(request)
|
||||
error = exception.args[0]
|
||||
alert = error["alert"]
|
||||
logger.info(error)
|
||||
log.info(error)
|
||||
count_errors.labels(
|
||||
"Back End Error",
|
||||
client_addr,
|
||||
@@ -160,7 +160,7 @@ async def handle_backend_errors(request, exception):
|
||||
request.json.get("location"),
|
||||
request.json.get("target"),
|
||||
).inc()
|
||||
logger.error(f'Error: {error["message"]}, Source: {client_addr}')
|
||||
log.error(f'Error: {error["message"]}, Source: {client_addr}')
|
||||
return response.json(
|
||||
{"output": error["message"], "alert": alert, "keywords": error["keywords"]},
|
||||
status=503,
|
||||
@@ -174,7 +174,7 @@ async def handle_404(request, exception):
|
||||
html = render_html("404", uri=path)
|
||||
client_addr = get_remote_address(request)
|
||||
count_notfound.labels(exception, path, client_addr).inc()
|
||||
logger.error(f"Error: {exception}, Path: {path}, Source: {client_addr}")
|
||||
log.error(f"Error: {exception}, Path: {path}, Source: {client_addr}")
|
||||
return response.html(html, status=404)
|
||||
|
||||
|
||||
@@ -184,7 +184,7 @@ async def handle_429(request, exception):
|
||||
html = render_html("ratelimit-site")
|
||||
client_addr = get_remote_address(request)
|
||||
count_ratelimit.labels(exception, client_addr).inc()
|
||||
logger.error(f"Error: {exception}, Source: {client_addr}")
|
||||
log.error(f"Error: {exception}, Source: {client_addr}")
|
||||
return response.html(html, status=429)
|
||||
|
||||
|
||||
@@ -193,7 +193,7 @@ async def handle_500(request, exception):
|
||||
"""General Error Page"""
|
||||
client_addr = get_remote_address(request)
|
||||
count_errors.labels(500, exception, client_addr, None, None, None).inc()
|
||||
logger.error(f"Error: {exception}, Source: {client_addr}")
|
||||
log.error(f"Error: {exception}, Source: {client_addr}")
|
||||
html = render_html("500")
|
||||
return response.html(html, status=500)
|
||||
|
||||
@@ -204,7 +204,7 @@ async def clear_cache():
|
||||
await r_cache.flushdb()
|
||||
return "Successfully cleared cache"
|
||||
except Exception as error_exception:
|
||||
logger.error(f"Error clearing cache: {error_exception}")
|
||||
log.error(f"Error clearing cache: {error_exception}")
|
||||
raise HyperglassError(f"Error clearing cache: {error_exception}")
|
||||
|
||||
|
||||
@@ -254,7 +254,7 @@ async def validate_input(query_data): # noqa: C901
|
||||
|
||||
# Verify that query_target is not empty
|
||||
if not query_target:
|
||||
logger.debug("No input specified")
|
||||
log.debug("No input specified")
|
||||
raise InvalidUsage(
|
||||
{
|
||||
"message": params.messages.no_input.format(
|
||||
@@ -266,7 +266,7 @@ async def validate_input(query_data): # noqa: C901
|
||||
)
|
||||
# Verify that query_target is a string
|
||||
if not isinstance(query_target, str):
|
||||
logger.debug("Target is not a string")
|
||||
log.debug("Target is not a string")
|
||||
raise InvalidUsage(
|
||||
{
|
||||
"message": params.messages.invalid_field.format(
|
||||
@@ -278,7 +278,7 @@ async def validate_input(query_data): # noqa: C901
|
||||
)
|
||||
# Verify that query_location is not empty
|
||||
if not query_location:
|
||||
logger.debug("No selection specified")
|
||||
log.debug("No selection specified")
|
||||
raise InvalidUsage(
|
||||
{
|
||||
"message": params.messages.no_input.format(
|
||||
@@ -290,7 +290,7 @@ async def validate_input(query_data): # noqa: C901
|
||||
)
|
||||
# Verify that query_location is a string
|
||||
if not isinstance(query_location, str):
|
||||
logger.debug("Query Location is not a string")
|
||||
log.debug("Query Location is not a string")
|
||||
raise InvalidUsage(
|
||||
{
|
||||
"message": params.messages.invalid_field.format(
|
||||
@@ -313,7 +313,7 @@ async def validate_input(query_data): # noqa: C901
|
||||
)
|
||||
# Verify that query_type is not empty
|
||||
if not query_type:
|
||||
logger.debug("No query specified")
|
||||
log.debug("No query specified")
|
||||
raise InvalidUsage(
|
||||
{
|
||||
"message": params.messages.no_input.format(
|
||||
@@ -324,7 +324,7 @@ async def validate_input(query_data): # noqa: C901
|
||||
}
|
||||
)
|
||||
if not isinstance(query_type, str):
|
||||
logger.debug("Query Type is not a string")
|
||||
log.debug("Query Type is not a string")
|
||||
raise InvalidUsage(
|
||||
{
|
||||
"message": params.messages.invalid_field.format(
|
||||
@@ -337,7 +337,7 @@ async def validate_input(query_data): # noqa: C901
|
||||
# Verify that query_type is actually supported
|
||||
query_is_supported = Supported.is_supported_query(query_type)
|
||||
if not query_is_supported:
|
||||
logger.debug("Query not supported")
|
||||
log.debug("Query not supported")
|
||||
raise InvalidUsage(
|
||||
{
|
||||
"message": params.messages.invalid_field.format(
|
||||
@@ -402,7 +402,7 @@ async def hyperglass_main(request):
|
||||
"""
|
||||
# Get JSON data from Ajax POST
|
||||
raw_query_data = request.json
|
||||
logger.debug(f"Unvalidated input: {raw_query_data}")
|
||||
log.debug(f"Unvalidated input: {raw_query_data}")
|
||||
|
||||
# Perform basic input validation
|
||||
query_data = await validate_input(raw_query_data)
|
||||
@@ -419,7 +419,7 @@ async def hyperglass_main(request):
|
||||
query_data.get("query_vrf"),
|
||||
).inc()
|
||||
|
||||
logger.debug(f"Client Address: {client_addr}")
|
||||
log.debug(f"Client Address: {client_addr}")
|
||||
|
||||
# Stringify the form response containing serialized JSON for the
|
||||
# request, use as key for k/v cache store so each command output
|
||||
@@ -428,11 +428,11 @@ async def hyperglass_main(request):
|
||||
|
||||
# Define cache entry expiry time
|
||||
cache_timeout = params.features.cache.timeout
|
||||
logger.debug(f"Cache Timeout: {cache_timeout}")
|
||||
log.debug(f"Cache Timeout: {cache_timeout}")
|
||||
|
||||
# Check if cached entry exists
|
||||
if not await r_cache.get(cache_key):
|
||||
logger.debug(f"Sending query {cache_key} to execute module...")
|
||||
log.debug(f"Sending query {cache_key} to execute module...")
|
||||
|
||||
# Pass request to execution module
|
||||
try:
|
||||
@@ -443,7 +443,7 @@ async def hyperglass_main(request):
|
||||
endtime = time.time()
|
||||
elapsedtime = round(endtime - starttime, 4)
|
||||
|
||||
logger.debug(f"Query {cache_key} took {elapsedtime} seconds to run.")
|
||||
log.debug(f"Query {cache_key} took {elapsedtime} seconds to run.")
|
||||
|
||||
except (InputInvalid, InputNotAllowed) as frontend_error:
|
||||
raise InvalidUsage(frontend_error.__dict__())
|
||||
@@ -459,14 +459,14 @@ async def hyperglass_main(request):
|
||||
await r_cache.set(cache_key, str(cache_value))
|
||||
await r_cache.expire(cache_key, cache_timeout)
|
||||
|
||||
logger.debug(f"Added cache entry for query: {cache_key}")
|
||||
log.debug(f"Added cache entry for query: {cache_key}")
|
||||
|
||||
# If it does, return the cached entry
|
||||
cache_response = await r_cache.get(cache_key)
|
||||
|
||||
response_output = cache_response
|
||||
|
||||
logger.debug(f"Cache match for: {cache_key}, returning cached entry")
|
||||
logger.debug(f"Cache Output: {response_output}")
|
||||
log.debug(f"Cache match for: {cache_key}, returning cached entry")
|
||||
log.debug(f"Cache Output: {response_output}")
|
||||
|
||||
return response.json({"output": response_output}, status=200)
|
||||
|
@@ -7,7 +7,7 @@ from pathlib import Path
|
||||
# Third Party Imports
|
||||
import jinja2
|
||||
import yaml
|
||||
from logzero import logger
|
||||
from logzero import logger as log
|
||||
from markdown2 import Markdown
|
||||
|
||||
# Project Imports
|
||||
@@ -198,7 +198,7 @@ def render_html(template_name, **kwargs):
|
||||
details_dict.update({details_name: details_data})
|
||||
info_list = ["bgp_route", "bgp_aspath", "bgp_community", "ping", "traceroute"]
|
||||
rendered_help = generate_markdown("help")
|
||||
logger.debug(rendered_help)
|
||||
log.debug(rendered_help)
|
||||
try:
|
||||
template_file = f"templates/{template_name}.html.j2"
|
||||
template = env.get_template(template_file)
|
||||
@@ -210,7 +210,5 @@ def render_html(template_name, **kwargs):
|
||||
**kwargs,
|
||||
)
|
||||
except jinja2.TemplateNotFound as template_error:
|
||||
logger.error(
|
||||
f"Error rendering Jinja2 template {Path(template_file).resolve()}."
|
||||
)
|
||||
log.error(f"Error rendering Jinja2 template {Path(template_file).resolve()}.")
|
||||
raise HyperglassError(template_error)
|
||||
|
@@ -8,7 +8,7 @@ from pathlib import Path
|
||||
|
||||
# Third Party Imports
|
||||
import jinja2
|
||||
from logzero import logger
|
||||
from logzero import logger as log
|
||||
|
||||
# Project Imports
|
||||
from hyperglass.configuration import logzero_config # noqa: F401
|
||||
@@ -38,7 +38,7 @@ def render_frontend_config():
|
||||
json.dumps({"config": frontend_params, "networks": frontend_networks})
|
||||
)
|
||||
except jinja2.exceptions as frontend_error:
|
||||
logger.error(f"Error rendering front end config: {frontend_error}")
|
||||
log.error(f"Error rendering front end config: {frontend_error}")
|
||||
raise HyperglassError(frontend_error)
|
||||
|
||||
|
||||
@@ -65,11 +65,11 @@ def get_fonts():
|
||||
stdout, stderr = proc.communicate()
|
||||
if proc.returncode != 0:
|
||||
output_error = stderr.decode("utf-8")
|
||||
logger.error(output_error)
|
||||
log.error(output_error)
|
||||
raise HyperglassError(f"Error downloading font from URL {font_url}")
|
||||
else:
|
||||
proc.kill()
|
||||
logger.debug(f"Downloaded font from URL {font_url}")
|
||||
log.debug(f"Downloaded font from URL {font_url}")
|
||||
|
||||
|
||||
def render_theme():
|
||||
@@ -81,7 +81,7 @@ def render_theme():
|
||||
with rendered_theme_file.open(mode="w") as theme_file:
|
||||
theme_file.write(rendered_theme)
|
||||
except jinja2.exceptions as theme_error:
|
||||
logger.error(f"Error rendering theme: {theme_error}")
|
||||
log.error(f"Error rendering theme: {theme_error}")
|
||||
raise HyperglassError(theme_error)
|
||||
|
||||
|
||||
@@ -101,12 +101,12 @@ def build_assets():
|
||||
output_out = json.loads(stdout.decode("utf-8").split("\n")[0])
|
||||
if proc.returncode != 0:
|
||||
output_error = json.loads(stderr.decode("utf-8").strip("\n"))
|
||||
logger.error(output_error["data"])
|
||||
log.error(output_error["data"])
|
||||
raise HyperglassError(
|
||||
f'Error building web assets with script {output_out["data"]}:'
|
||||
f'{output_error["data"]}'
|
||||
)
|
||||
logger.debug(f'Built web assets with script {output_out["data"]}')
|
||||
log.debug(f'Built web assets with script {output_out["data"]}')
|
||||
|
||||
|
||||
def render_assets():
|
||||
@@ -115,26 +115,26 @@ def render_assets():
|
||||
web assets
|
||||
"""
|
||||
try:
|
||||
logger.debug("Rendering front end config...")
|
||||
log.debug("Rendering front end config...")
|
||||
render_frontend_config()
|
||||
logger.debug("Rendered front end config")
|
||||
log.debug("Rendered front end config")
|
||||
except HyperglassError as frontend_error:
|
||||
raise HyperglassError(frontend_error)
|
||||
try:
|
||||
logger.debug("Downloading theme fonts...")
|
||||
log.debug("Downloading theme fonts...")
|
||||
get_fonts()
|
||||
logger.debug("Downloaded theme fonts")
|
||||
log.debug("Downloaded theme fonts")
|
||||
except HyperglassError as theme_error:
|
||||
raise HyperglassError(theme_error)
|
||||
try:
|
||||
logger.debug("Rendering theme elements...")
|
||||
log.debug("Rendering theme elements...")
|
||||
render_theme()
|
||||
logger.debug("Rendered theme elements")
|
||||
log.debug("Rendered theme elements")
|
||||
except HyperglassError as theme_error:
|
||||
raise HyperglassError(theme_error)
|
||||
try:
|
||||
logger.debug("Building web assets...")
|
||||
log.debug("Building web assets...")
|
||||
build_assets()
|
||||
logger.debug("Built web assets")
|
||||
log.debug("Built web assets")
|
||||
except HyperglassError as assets_error:
|
||||
raise HyperglassError(assets_error)
|
||||
|
Reference in New Issue
Block a user