mirror of
https://github.com/checktheroads/hyperglass
synced 2024-05-11 05:55:08 +00:00
WIP add VRF feature
This commit is contained in:
@@ -22,21 +22,21 @@ class Construct:
|
||||
input parameters.
|
||||
"""
|
||||
|
||||
def __init__(self, device, transport):
|
||||
def __init__(self, device, query_data, transport):
|
||||
self.device = device
|
||||
self.query_data = query_data
|
||||
self.transport = transport
|
||||
self.query_target = self.query_data["target"]
|
||||
self.query_vrf = self.query_data["vrf"]
|
||||
|
||||
def get_src(self, ver):
|
||||
@staticmethod
|
||||
def get_src(device, afi):
|
||||
"""
|
||||
Returns source IP based on IP version of query destination.
|
||||
"""
|
||||
src = None
|
||||
if ver == 4:
|
||||
src = self.device.src_addr_ipv4.exploded
|
||||
if ver == 6:
|
||||
src = self.device.src_addr_ipv6.exploded
|
||||
logger.debug(f"IPv{ver} Source: {src}")
|
||||
return src
|
||||
src_afi = f"src_addr_{afi}"
|
||||
src = getattr(device, src_afi)
|
||||
return src.exploded
|
||||
|
||||
@staticmethod
|
||||
def device_commands(nos, afi, query_type):
|
||||
@@ -49,123 +49,184 @@ class Construct:
|
||||
cmd_path = f"{nos}.{afi}.{query_type}"
|
||||
return operator.attrgetter(cmd_path)(commands)
|
||||
|
||||
def ping(self, target):
|
||||
@staticmethod
|
||||
def query_afi(query_target, query_vrf):
|
||||
"""
|
||||
Constructs AFI string. If query_vrf is specified, AFI prefix is
|
||||
"vpnv", if not, AFI prefix is "ipv"
|
||||
"""
|
||||
ip_version = ipaddress.ip_network(query_target).version
|
||||
if query_vrf:
|
||||
afi = f"vpnv{ip_version}"
|
||||
else:
|
||||
afi = f"ipv{ip_version}"
|
||||
return afi
|
||||
|
||||
def ping(self):
|
||||
"""Constructs ping query parameters from pre-validated input"""
|
||||
query_type = "ping"
|
||||
|
||||
logger.debug(
|
||||
f"Constructing {query_type} query for {target} via {self.transport}..."
|
||||
f"Constructing ping query for {self.query_target} via {self.transport}"
|
||||
)
|
||||
query = None
|
||||
ip_version = ipaddress.ip_network(target).version
|
||||
afi = f"ipv{ip_version}"
|
||||
source = self.get_src(ip_version)
|
||||
|
||||
query = []
|
||||
afi = self.query_afi(self.query_target, self.query_vrf)
|
||||
source = self.get_src(self.device, afi)
|
||||
|
||||
if self.transport == "rest":
|
||||
query = json.dumps(
|
||||
{
|
||||
"query_type": query_type,
|
||||
"query_type": "ping",
|
||||
"afi": afi,
|
||||
"vrf": self.query_vrf,
|
||||
"source": source,
|
||||
"target": target,
|
||||
"target": self.query_target,
|
||||
}
|
||||
)
|
||||
elif self.transport == "scrape":
|
||||
conf_command = self.device_commands(self.device.commands, afi, query_type)
|
||||
query = conf_command.format(target=target, source=source)
|
||||
cmd = self.device_commands(self.device.commands, afi, "ping")
|
||||
query = cmd.format(
|
||||
target=self.query_target, source=source, vrf=self.query_vrf
|
||||
)
|
||||
|
||||
logger.debug(f"Constructed query: {query}")
|
||||
|
||||
return query
|
||||
|
||||
def traceroute(self, target):
|
||||
def traceroute(self):
|
||||
"""
|
||||
Constructs traceroute query parameters from pre-validated input.
|
||||
"""
|
||||
query_type = "traceroute"
|
||||
logger.debug(
|
||||
f"Constructing {query_type} query for {target} via {self.transport}..."
|
||||
(
|
||||
f"Constructing traceroute query for {self.query_target} "
|
||||
f"via {self.transport}"
|
||||
)
|
||||
)
|
||||
|
||||
query = None
|
||||
ip_version = ipaddress.ip_network(target).version
|
||||
afi = f"ipv{ip_version}"
|
||||
source = self.get_src(ip_version)
|
||||
afi = self.query_afi(self.query_target, self.query_vrf)
|
||||
source = self.get_src(self.device, afi)
|
||||
|
||||
if self.transport == "rest":
|
||||
query = json.dumps(
|
||||
{
|
||||
"query_type": query_type,
|
||||
"query_type": "traceroute",
|
||||
"afi": afi,
|
||||
"vrf": self.query_vrf,
|
||||
"source": source,
|
||||
"target": target,
|
||||
"target": self.query_target,
|
||||
}
|
||||
)
|
||||
|
||||
elif self.transport == "scrape":
|
||||
conf_command = self.device_commands(self.device.commands, afi, query_type)
|
||||
query = conf_command.format(target=target, source=source)
|
||||
cmd = self.device_commands(self.device.commands, afi, "traceroute")
|
||||
query = cmd.format(
|
||||
target=self.query_target, source=source, vrf=self.query_vrf
|
||||
)
|
||||
|
||||
logger.debug(f"Constructed query: {query}")
|
||||
|
||||
return query
|
||||
|
||||
def bgp_route(self, target):
|
||||
def bgp_route(self):
|
||||
"""
|
||||
Constructs bgp_route query parameters from pre-validated input.
|
||||
"""
|
||||
query_type = "bgp_route"
|
||||
logger.debug(
|
||||
f"Constructing {query_type} query for {target} via {self.transport}..."
|
||||
f"Constructing bgp_route query for {self.query_target} via {self.transport}"
|
||||
)
|
||||
|
||||
query = None
|
||||
ip_version = ipaddress.ip_network(target).version
|
||||
afi = f"ipv{ip_version}"
|
||||
afi = self.query_afi(self.query_target, self.query_vrf)
|
||||
source = self.get_src(self.device, afi)
|
||||
|
||||
if self.transport == "rest":
|
||||
query = json.dumps({"query_type": query_type, "afi": afi, "target": target})
|
||||
query = json.dumps(
|
||||
{
|
||||
"query_type": "bgp_route",
|
||||
"afi": afi,
|
||||
"vrf": self.query_vrf,
|
||||
"source": source,
|
||||
"target": self.query_target,
|
||||
}
|
||||
)
|
||||
elif self.transport == "scrape":
|
||||
conf_command = self.device_commands(self.device.commands, afi, query_type)
|
||||
query = conf_command.format(target=target)
|
||||
cmd = self.device_commands(self.device.commands, afi, "bgp_route")
|
||||
query = cmd.format(
|
||||
target=self.query_target, source=source, vrf=self.query_vrf
|
||||
)
|
||||
|
||||
logger.debug(f"Constructed query: {query}")
|
||||
|
||||
return query
|
||||
|
||||
def bgp_community(self, target):
|
||||
def bgp_community(self):
|
||||
"""
|
||||
Constructs bgp_community query parameters from pre-validated
|
||||
input.
|
||||
"""
|
||||
query_type = "bgp_community"
|
||||
logger.debug(
|
||||
f"Constructing {query_type} query for {target} via {self.transport}..."
|
||||
(
|
||||
f"Constructing bgp_community query for {self.query_target} "
|
||||
f"via {self.transport}"
|
||||
)
|
||||
)
|
||||
afi = "dual"
|
||||
|
||||
query = None
|
||||
afi = self.query_afi(self.query_target, self.query_vrf)
|
||||
source = self.get_src(self.device, afi)
|
||||
|
||||
if self.transport == "rest":
|
||||
query = json.dumps({"query_type": query_type, "afi": afi, "target": target})
|
||||
query = json.dumps(
|
||||
{
|
||||
"query_type": "bgp_community",
|
||||
"afi": afi,
|
||||
"vrf": self.query_vrf,
|
||||
"source": source,
|
||||
"target": self.query_target,
|
||||
}
|
||||
)
|
||||
elif self.transport == "scrape":
|
||||
conf_command = self.device_commands(self.device.commands, afi, query_type)
|
||||
afis = []
|
||||
for afi in self.device.afis:
|
||||
split_afi = afi.split("v")
|
||||
afis.append(
|
||||
"".join([split_afi[0].upper(), "v", split_afi[1], " Unicast|"])
|
||||
)
|
||||
query = conf_command.format(target=target, afis="".join(afis))
|
||||
cmd = self.device_commands(self.device.commands, afi, "bgp_community")
|
||||
query = cmd.format(
|
||||
target=self.query_target, source=source, vrf=self.query_vrf
|
||||
)
|
||||
|
||||
logger.debug(f"Constructed query: {query}")
|
||||
|
||||
return query
|
||||
|
||||
def bgp_aspath(self, target):
|
||||
def bgp_aspath(self):
|
||||
"""
|
||||
Constructs bgp_aspath query parameters from pre-validated input.
|
||||
"""
|
||||
query_type = "bgp_aspath"
|
||||
logger.debug(
|
||||
f"Constructing {query_type} query for {target} via {self.transport}..."
|
||||
(
|
||||
f"Constructing bgp_aspath query for {self.query_target} "
|
||||
f"via {self.transport}"
|
||||
)
|
||||
)
|
||||
afi = "dual"
|
||||
|
||||
query = None
|
||||
afi = self.query_afi(self.query_target, self.query_vrf)
|
||||
source = self.get_src(self.device, afi)
|
||||
|
||||
if self.transport == "rest":
|
||||
query = json.dumps({"query_type": query_type, "afi": afi, "target": target})
|
||||
query = json.dumps(
|
||||
{
|
||||
"query_type": "bgp_aspath",
|
||||
"afi": afi,
|
||||
"vrf": self.query_vrf,
|
||||
"source": source,
|
||||
"target": self.query_target,
|
||||
}
|
||||
)
|
||||
elif self.transport == "scrape":
|
||||
conf_command = self.device_commands(self.device.commands, afi, query_type)
|
||||
afis = []
|
||||
for afi in self.device.afis:
|
||||
split_afi = afi.split("v")
|
||||
afis.append(
|
||||
"".join([split_afi[0].upper(), "v", split_afi[1], " Unicast|"])
|
||||
)
|
||||
query = conf_command.format(target=target, afis="".join(afis))
|
||||
cmd = self.device_commands(self.device.commands, afi, "bgp_aspath")
|
||||
query = cmd.format(
|
||||
target=self.query_target, source=source, vrf=self.query_vrf
|
||||
)
|
||||
|
||||
logger.debug(f"Constructed query: {query}")
|
||||
|
||||
return query
|
||||
|
||||
@@ -41,13 +41,16 @@ class Connect:
|
||||
rest() connects to devices via HTTP for RESTful API communication
|
||||
"""
|
||||
|
||||
def __init__(self, device_config, query_type, target, transport):
|
||||
def __init__(self, device_config, query_data, transport):
|
||||
self.device_config = device_config
|
||||
self.query_type = query_type
|
||||
self.target = target
|
||||
self.query_data = query_data
|
||||
self.query_type = self.query_data["query_type"]
|
||||
self.query_target = self.query_data["target"]
|
||||
self.transport = transport
|
||||
self.cred = getattr(credentials, device_config.credential)
|
||||
self.query = getattr(Construct(device_config, transport), query_type)(target)
|
||||
self.query = getattr(Construct(device_config, transport), self.query_type)(
|
||||
self.query_data
|
||||
)
|
||||
|
||||
async def scrape_proxied(self):
|
||||
"""
|
||||
@@ -102,7 +105,14 @@ class Connect:
|
||||
"via Netmiko library..."
|
||||
)
|
||||
nm_connect_direct = ConnectHandler(**scrape_host)
|
||||
response = nm_connect_direct.send_command(self.query)
|
||||
responses = []
|
||||
for query in self.query:
|
||||
raw = nm_connect_direct.send_command(query)
|
||||
responses.append(raw)
|
||||
logger.debug(f'Raw response for command "{query}":\n{raw}')
|
||||
response = "\n".join(responses)
|
||||
logger.debug(f"Response type:\n{type(response)}")
|
||||
|
||||
except (NetMikoTimeoutException, NetmikoTimeoutError) as scrape_error:
|
||||
logger.error(
|
||||
f"Timeout connecting to device {self.device_config.location}: "
|
||||
@@ -136,7 +146,7 @@ class Connect:
|
||||
proxy=self.device_config.proxy,
|
||||
error=params.messages.general,
|
||||
)
|
||||
if not response:
|
||||
if response is None:
|
||||
logger.error(f"No response from device {self.device_config.location}")
|
||||
raise ScrapeError(
|
||||
params.messages.connection_error,
|
||||
@@ -226,12 +236,6 @@ class Connect:
|
||||
logger.debug(f"HTTP Headers: {headers}")
|
||||
logger.debug(f"URL endpoint: {endpoint}")
|
||||
|
||||
rest_exception = lambda msg: RestError(
|
||||
params.messages.connection_error,
|
||||
device_name=self.device_config.display_name,
|
||||
error=msg,
|
||||
)
|
||||
|
||||
try:
|
||||
http_client = httpx.AsyncClient()
|
||||
raw_response = await http_client.post(
|
||||
@@ -264,17 +268,33 @@ class Connect:
|
||||
logger.error(
|
||||
f"Error connecting to device {self.device_config.location}: {rest_msg}"
|
||||
)
|
||||
raise rest_exception(rest_msg)
|
||||
raise RestError(
|
||||
params.messages.connection_error,
|
||||
device_name=self.device_config.display_name,
|
||||
error=rest_msg,
|
||||
)
|
||||
except OSError:
|
||||
raise rest_exception("System error")
|
||||
raise RestError(
|
||||
params.messages.connection_error,
|
||||
device_name=self.device_config.display_name,
|
||||
error="System error",
|
||||
)
|
||||
|
||||
if raw_response.status_code != 200:
|
||||
logger.error(f"Response code is {raw_response.status_code}")
|
||||
raise rest_exception(params.messages.general)
|
||||
raise RestError(
|
||||
params.messages.connection_error,
|
||||
device_name=self.device_config.display_name,
|
||||
error=params.messages.general,
|
||||
)
|
||||
|
||||
if not response:
|
||||
logger.error(f"No response from device {self.device_config.location}")
|
||||
raise rest_exception(params.messages.noresponse_error)
|
||||
raise RestError(
|
||||
params.messages.connection_error,
|
||||
device_name=self.device_config.display_name,
|
||||
error=params.messages.noresponse_error,
|
||||
)
|
||||
|
||||
logger.debug(f"Output for query: {self.query}:\n{response}")
|
||||
return response
|
||||
@@ -289,9 +309,9 @@ class Execute:
|
||||
|
||||
def __init__(self, lg_data):
|
||||
self.query_data = lg_data
|
||||
self.query_location = self.query_data["location"]
|
||||
self.query_location = self.query_data["query_location"]
|
||||
self.query_type = self.query_data["query_type"]
|
||||
self.query_target = self.query_data["target"]
|
||||
self.query_target = self.query_data["query_target"]
|
||||
|
||||
async def response(self):
|
||||
"""
|
||||
@@ -314,7 +334,7 @@ class Execute:
|
||||
output = params.messages.general
|
||||
|
||||
transport = Supported.map_transport(device_config.nos)
|
||||
connect = Connect(device_config, self.query_type, self.query_target, transport)
|
||||
connect = Connect(device_config, self.query_data, transport)
|
||||
|
||||
if Supported.is_rest(device_config.nos):
|
||||
output = await connect.rest()
|
||||
|
||||
@@ -109,6 +109,7 @@ class Networks:
|
||||
"location": router_params["location"],
|
||||
"hostname": router,
|
||||
"display_name": router_params["display_name"],
|
||||
"vrfs": router_params["vrfs"],
|
||||
}
|
||||
)
|
||||
elif net_display not in locations_dict:
|
||||
@@ -117,6 +118,7 @@ class Networks:
|
||||
"location": router_params["location"],
|
||||
"hostname": router,
|
||||
"display_name": router_params["display_name"],
|
||||
"vrfs": router_params["vrfs"],
|
||||
}
|
||||
]
|
||||
if not locations_dict:
|
||||
@@ -142,8 +144,43 @@ class Networks:
|
||||
for (netname, display_name) in locations_dict.items()
|
||||
]
|
||||
|
||||
def frontend_networks(self):
|
||||
frontend_dict = {}
|
||||
for (router, router_params) in self.routers.items():
|
||||
for (netname, net_params) in self.networks.items():
|
||||
if router_params["network"] == netname:
|
||||
net_display = net_params["display_name"]
|
||||
if net_display in frontend_dict:
|
||||
frontend_dict[net_display].update(
|
||||
{
|
||||
router: {
|
||||
"location": router_params["location"],
|
||||
"display_name": router_params["display_name"],
|
||||
"vrfs": router_params["vrfs"],
|
||||
}
|
||||
}
|
||||
)
|
||||
elif net_display not in frontend_dict:
|
||||
frontend_dict[net_display] = {
|
||||
router: {
|
||||
"location": router_params["location"],
|
||||
"display_name": router_params["display_name"],
|
||||
"vrfs": router_params["vrfs"],
|
||||
}
|
||||
}
|
||||
if not frontend_dict:
|
||||
raise ConfigError(error_msg="Unable to build network to device mapping")
|
||||
return frontend_dict
|
||||
|
||||
|
||||
net = Networks()
|
||||
networks = net.networks_verbose()
|
||||
logger.debug(networks)
|
||||
display_networks = net.networks_display()
|
||||
frontend_networks = net.frontend_networks()
|
||||
|
||||
frontend_fields = {
|
||||
"general": {"debug", "request_timeout"},
|
||||
"branding": {"text"},
|
||||
"messages": ...,
|
||||
}
|
||||
frontend_params = params.dict(include=frontend_fields)
|
||||
|
||||
@@ -53,6 +53,7 @@ class Router(BaseSettings):
|
||||
nos: str
|
||||
commands: Union[str, None] = None
|
||||
afis: List[str] = ["ipv4", "ipv6"]
|
||||
vrfs: List[str] = []
|
||||
proxy: Union[str, None] = None
|
||||
|
||||
@validator("nos")
|
||||
@@ -307,6 +308,7 @@ class Branding(BaseSettings):
|
||||
bgp_aspath: str = "BGP AS Path"
|
||||
ping: str = "Ping"
|
||||
traceroute: str = "Traceroute"
|
||||
vrf: str = "VRF"
|
||||
|
||||
class Error404(BaseSettings):
|
||||
"""Class model for 404 Error Page"""
|
||||
@@ -369,11 +371,18 @@ class Messages(BaseSettings):
|
||||
connection_error: str = "Error connecting to {device_name}: {error}"
|
||||
authentication_error: str = "Authentication error occurred."
|
||||
noresponse_error: str = "No response."
|
||||
vrf_not_associated: str = "{vrf} is not associated with {device_name}."
|
||||
no_matching_vrfs: str = "No VRFs Match"
|
||||
|
||||
|
||||
class Features(BaseSettings):
|
||||
"""Class model for params.features"""
|
||||
|
||||
class Vrf(BaseSettings):
|
||||
"""Class model for params.features.vrf"""
|
||||
|
||||
enable: bool = False
|
||||
|
||||
class BgpRoute(BaseSettings):
|
||||
"""Class model for params.features.bgp_route"""
|
||||
|
||||
@@ -493,6 +502,7 @@ class Features(BaseSettings):
|
||||
cache: Cache = Cache()
|
||||
max_prefix: MaxPrefix = MaxPrefix()
|
||||
rate_limit: RateLimit = RateLimit()
|
||||
vrf: Vrf = Vrf()
|
||||
|
||||
|
||||
class Params(BaseSettings):
|
||||
@@ -552,23 +562,78 @@ class Commands(BaseSettings):
|
||||
setattr(Commands, nos, NosModel(**cmds))
|
||||
return obj
|
||||
|
||||
# class CiscoIOS(BaseSettings):
|
||||
# """Class model for default cisco_ios commands"""
|
||||
|
||||
# class Dual(BaseSettings):
|
||||
# """Default commands for dual afi commands"""
|
||||
|
||||
# bgp_community: str = (
|
||||
# "show bgp all community {target} | section {afis}Network"
|
||||
# )
|
||||
|
||||
# bgp_aspath: str = (
|
||||
# 'show bgp all quote-regexp "{target}" | section {afis}Network'
|
||||
# )
|
||||
|
||||
# class IPv4(BaseSettings):
|
||||
# """Default commands for ipv4 commands"""
|
||||
|
||||
# bgp_route: str = "show bgp ipv4 unicast {target} | exclude pathid:|Epoch"
|
||||
# ping: str = "ping {target} repeat 5 source {source} | exclude Type escape"
|
||||
# traceroute: str = (
|
||||
# "traceroute {target} timeout 1 probe 2 source {source} "
|
||||
# "| exclude Type escape"
|
||||
# )
|
||||
|
||||
# class IPv6(BaseSettings):
|
||||
# """Default commands for ipv6 commands"""
|
||||
|
||||
# bgp_route: str = "show bgp ipv6 unicast {target} | exclude pathid:|Epoch"
|
||||
# ping: str = (
|
||||
# "ping ipv6 {target} repeat 5 source {source} | exclude Type escape"
|
||||
# )
|
||||
# traceroute: str = (
|
||||
# "traceroute ipv6 {target} timeout 1 probe 2 source {source} "
|
||||
# "| exclude Type escape"
|
||||
# )
|
||||
|
||||
# dual: Dual = Dual()
|
||||
# ipv4: IPv4 = IPv4()
|
||||
# ipv6: IPv6 = IPv6()
|
||||
class CiscoIOS(BaseSettings):
|
||||
"""Class model for default cisco_ios commands"""
|
||||
|
||||
class Dual(BaseSettings):
|
||||
class VPNv4(BaseSettings):
|
||||
"""Default commands for dual afi commands"""
|
||||
|
||||
bgp_community: str = (
|
||||
"show bgp all community {target} | section {afis}Network"
|
||||
bgp_community: str = "show bgp {afi} unicast vrf {vrf} community {target}"
|
||||
bgp_aspath: str = 'show bgp {afi} unicast vrf {vrf} quote-regexp "{target}"'
|
||||
bgp_route: str = "show bgp {afi} unicast vrf {vrf} {target}"
|
||||
ping: str = "ping vrf {vrf} {target} repeat 5 source {source}"
|
||||
traceroute: str = (
|
||||
"traceroute vrf {vrf} {target} timeout 1 probe 2 source {source} "
|
||||
"| exclude Type escape"
|
||||
)
|
||||
bgp_aspath: str = (
|
||||
'show bgp all quote-regexp "{target}" | section {afis}Network'
|
||||
|
||||
class VPNv6(BaseSettings):
|
||||
"""Default commands for dual afi commands"""
|
||||
|
||||
bgp_community: str = "show bgp {afi} unicast vrf {vrf} community {target}"
|
||||
bgp_aspath: str = 'show bgp {afi} unicast vrf {vrf} quote-regexp "{target}"'
|
||||
bgp_route: str = "show bgp {afi} unicast vrf {vrf} {target}"
|
||||
ping: str = "ping vrf {vrf} {target} repeat 5 source {source}"
|
||||
traceroute: str = (
|
||||
"traceroute vrf {vrf} {target} timeout 1 probe 2 source {source} "
|
||||
"| exclude Type escape"
|
||||
)
|
||||
|
||||
class IPv4(BaseSettings):
|
||||
"""Default commands for ipv4 commands"""
|
||||
|
||||
bgp_route: str = "show bgp ipv4 unicast {target} | exclude pathid:|Epoch"
|
||||
bgp_community: str = "show bgp {afi} unicast community {target}"
|
||||
bgp_aspath: str = 'show bgp {afi} unicast quote-regexp "{target}"'
|
||||
bgp_route: str = "show bgp {afi} unicast {target} | exclude pathid:|Epoch"
|
||||
ping: str = "ping {target} repeat 5 source {source} | exclude Type escape"
|
||||
traceroute: str = (
|
||||
"traceroute {target} timeout 1 probe 2 source {source} "
|
||||
@@ -578,16 +643,19 @@ class Commands(BaseSettings):
|
||||
class IPv6(BaseSettings):
|
||||
"""Default commands for ipv6 commands"""
|
||||
|
||||
bgp_route: str = "show bgp ipv6 unicast {target} | exclude pathid:|Epoch"
|
||||
bgp_community: str = "show bgp {afi} unicast community {target}"
|
||||
bgp_aspath: str = 'show bgp {afi} unicast quote-regexp "{target}"'
|
||||
bgp_route: str = "show bgp {afi} unicast {target} | exclude pathid:|Epoch"
|
||||
ping: str = (
|
||||
"ping ipv6 {target} repeat 5 source {source} | exclude Type escape"
|
||||
"ping {afi} {target} repeat 5 source {source} | exclude Type escape"
|
||||
)
|
||||
traceroute: str = (
|
||||
"traceroute ipv6 {target} timeout 1 probe 2 source {source} "
|
||||
"| exclude Type escape"
|
||||
)
|
||||
|
||||
dual: Dual = Dual()
|
||||
vpnv4: VPNv4 = VPNv4()
|
||||
vpnv6: VPNv6 = VPNv6()
|
||||
ipv4: IPv4 = IPv4()
|
||||
ipv6: IPv6 = IPv6()
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""Hyperglass Front End"""
|
||||
|
||||
# Standard Library Imports
|
||||
import operator
|
||||
import time
|
||||
from ast import literal_eval
|
||||
from pathlib import Path
|
||||
@@ -91,7 +92,7 @@ limiter = Limiter(app, key_func=get_remote_address, global_limits=[rate_limit_si
|
||||
|
||||
# Prometheus Config
|
||||
count_data = Counter(
|
||||
"count_data", "Query Counter", ["source", "query_type", "loc_id", "target"]
|
||||
"count_data", "Query Counter", ["source", "query_type", "loc_id", "target", "vrf"]
|
||||
)
|
||||
|
||||
count_errors = Counter(
|
||||
@@ -241,9 +242,10 @@ async def hyperglass_main(request):
|
||||
lg_data = request.json
|
||||
logger.debug(f"Unvalidated input: {lg_data}")
|
||||
|
||||
query_location = lg_data.get("location")
|
||||
query_location = lg_data.get("query_location")
|
||||
query_type = lg_data.get("query_type")
|
||||
query_target = lg_data.get("target")
|
||||
query_target = lg_data.get("query_target")
|
||||
query_vrf = lg_data.get("query_vrf", None)
|
||||
|
||||
# Return error if no target is specified
|
||||
if not query_target:
|
||||
@@ -284,6 +286,21 @@ async def hyperglass_main(request):
|
||||
}
|
||||
)
|
||||
|
||||
device_selector = getattr(devices, query_location)
|
||||
device_vrfs = device_selector.vrfs
|
||||
device_display_name = device_selector.display_name
|
||||
if query_vrf and query_vrf not in device_vrfs:
|
||||
logger.debug(f"VRF {query_vrf} not associated with {query_location}")
|
||||
raise InvalidUsage(
|
||||
{
|
||||
"message": params.messages.vrf_not_associated.format(
|
||||
vrf=query_vrf, device_name=device_display_name
|
||||
),
|
||||
"alert": "warning",
|
||||
"keywords": [query_vrf, device_display_name],
|
||||
}
|
||||
)
|
||||
|
||||
# Get client IP address for Prometheus logging & rate limiting
|
||||
client_addr = get_remote_address(request)
|
||||
|
||||
@@ -291,8 +308,9 @@ async def hyperglass_main(request):
|
||||
count_data.labels(
|
||||
client_addr,
|
||||
lg_data.get("query_type"),
|
||||
lg_data.get("location"),
|
||||
lg_data.get("target"),
|
||||
lg_data.get("query_location"),
|
||||
lg_data.get("query_target"),
|
||||
lg_data.get("query_vrf", None),
|
||||
).inc()
|
||||
|
||||
logger.debug(f"Client Address: {client_addr}")
|
||||
|
||||
@@ -28,7 +28,7 @@
|
||||
</div>
|
||||
<div class="col-md col-sm-12">
|
||||
<select class="form-control form-control-lg hg-select custom-select-lg" id="query_type"
|
||||
title="{{ branding.text.query_type }}" data-live-search="true" required>
|
||||
title="{{ branding.text.query_type }}" required>
|
||||
{% if features.bgp_route.enable %}
|
||||
<option id="bgp_route" value="bgp_route" data-display-name="{{ branding.text.bgp_route }}">
|
||||
{{ branding.text.bgp_route }}</option>
|
||||
@@ -52,12 +52,12 @@
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-row mb-4">
|
||||
<div class="form-row mb-4" id="hg-row-2">
|
||||
<div id="hg-container-vrf"></div>
|
||||
<div class="col" id="hg-target-container">
|
||||
<div class="input-group input-group-lg">
|
||||
<input class="form-control" type="text" placeholder="{{ branding.text.query_target }}"
|
||||
aria-label="{{ branding.text.query_target }}" aria-describedby="query_target" id="query_target"
|
||||
required>
|
||||
aria-label="{{ branding.text.query_target }}" aria-describedby="query_target" id="query_target" required>
|
||||
<div class="input-group-append" id="hg-target-append">
|
||||
<button class="btn btn-primary" id="hg-submit-button" type="submit">
|
||||
<div id="hg-submit-icon">
|
||||
|
||||
@@ -13,6 +13,8 @@ from logzero import logger
|
||||
# Project Imports
|
||||
from hyperglass.configuration import logzero_config # noqa: F401
|
||||
from hyperglass.configuration import params
|
||||
from hyperglass.configuration import frontend_params
|
||||
from hyperglass.configuration import frontend_networks
|
||||
from hyperglass.exceptions import HyperglassError
|
||||
|
||||
# Module Directories
|
||||
@@ -32,7 +34,9 @@ def render_frontend_config():
|
||||
rendered_frontend_file = hyperglass_root.joinpath("static/frontend.json")
|
||||
try:
|
||||
with rendered_frontend_file.open(mode="w") as frontend_file:
|
||||
frontend_file.write(params.json())
|
||||
frontend_file.write(
|
||||
json.dumps({"config": frontend_params, "networks": frontend_networks})
|
||||
)
|
||||
except jinja2.exceptions as frontend_error:
|
||||
logger.error(f"Error rendering front end config: {frontend_error}")
|
||||
raise HyperglassError(frontend_error)
|
||||
|
||||
@@ -9,14 +9,19 @@ const animsition = require('animsition');
|
||||
const ClipboardJS = require('clipboard');
|
||||
const frontEndConfig = require('./frontend.json');
|
||||
|
||||
const cfgGeneral = frontEndConfig.general;
|
||||
const inputMessages = frontEndConfig.messages;
|
||||
const cfgGeneral = frontEndConfig.config.general;
|
||||
const cfgBranding = frontEndConfig.config.branding;
|
||||
const cfgNetworks = frontEndConfig.networks;
|
||||
const inputMessages = frontEndConfig.config.messages;
|
||||
const pageContainer = $('#hg-page-container');
|
||||
const formContainer = $('#hg-form');
|
||||
const titleColumn = $('#hg-title-col');
|
||||
const rowTwo = $('#hg-row-2');
|
||||
const vrfContainer = $('#hg-container-vrf');
|
||||
const queryLocation = $('#location');
|
||||
const queryType = $('#query_type');
|
||||
const queryTarget = $('#query_target');
|
||||
const queryVrf = $('#query_vrf');
|
||||
const queryTargetAppend = $('#hg-target-append');
|
||||
const submitIcon = $('#hg-submit-icon');
|
||||
const resultsContainer = $('#hg-results');
|
||||
@@ -28,7 +33,14 @@ const footerTermsBtn = $('#hg-footer-terms-btn');
|
||||
const footerCreditBtn = $('#hg-footer-credit-btn');
|
||||
const footerPopoverTemplate = '<div class="popover mw-sm-75 mw-md-50 mw-lg-25" role="tooltip"><div class="arrow"></div><h3 class="popover-header"></h3><div class="popover-body"></div></div>';
|
||||
|
||||
let bsBlurState = false;
|
||||
const supportedBtn = qt => `<button class="btn btn-secondary hg-info-btn" id="hg-info-btn-${qt}" data-hg-type="${qt}" type="button"><div id="hg-info-icon-${qt}"><i class="remixicon-information-line"></i></div></button>`;
|
||||
|
||||
const vrfSelect = title => `
|
||||
<select class="form-control form-control-lg hg-select" id="query_vrf" title="${title}" disabled>
|
||||
</select>
|
||||
`;
|
||||
|
||||
const vrfOption = txt => `<option value="${txt}">${txt}</option>`;
|
||||
|
||||
class InputInvalid extends Error {
|
||||
constructor(validationMsg, invalidField, fieldContainer) {
|
||||
@@ -69,7 +81,7 @@ const resetResults = () => {
|
||||
|
||||
const reloadPage = () => {
|
||||
queryLocation.selectpicker('deselectAll');
|
||||
queryLocation.selectpicker('val', '');
|
||||
queryLocation.selectpicker('val', []);
|
||||
queryType.selectpicker('val', '');
|
||||
queryTarget.val('');
|
||||
resultsAccordion.empty();
|
||||
@@ -85,9 +97,6 @@ queryLocation.selectpicker({
|
||||
style: '',
|
||||
styleBase: 'form-control',
|
||||
tickIcon: 'remixicon-check-line',
|
||||
}).nextAll('.dropdown-menu.show').on('focus', '.bs-searchbox input', (e) => {
|
||||
$(e.currentTarget).blur();
|
||||
bsBlurState = true;
|
||||
}).on('hidden.bs.select', (e) => {
|
||||
$(e.currentTarget).nextAll('.dropdown-menu.show').find('input').blur();
|
||||
});
|
||||
@@ -159,8 +168,6 @@ $(document).ready(() => {
|
||||
}
|
||||
});
|
||||
|
||||
const supportedBtn = qt => `<button class="btn btn-secondary hg-info-btn" id="hg-info-btn-${qt}" data-hg-type="${qt}" type="button"><div id="hg-info-icon-${qt}"><i class="remixicon-information-line"></i></div></button>`;
|
||||
|
||||
queryType.on('changed.bs.select', () => {
|
||||
const queryTypeId = queryType.val();
|
||||
const queryTypeBtn = $('.hg-info-btn');
|
||||
@@ -172,12 +179,78 @@ queryType.on('changed.bs.select', () => {
|
||||
}
|
||||
});
|
||||
|
||||
function findIntersection(firstSet, ...sets) {
|
||||
const count = sets.length;
|
||||
const result = new Set(firstSet);
|
||||
firstSet.forEach((item) => {
|
||||
let i = count;
|
||||
let allHave = true;
|
||||
while (i--) {
|
||||
allHave = sets[i].has(item);
|
||||
if (!allHave) { break; }
|
||||
}
|
||||
if (!allHave) {
|
||||
result.delete(item);
|
||||
}
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
||||
queryLocation.on('changed.bs.select', (e, clickedIndex, isSelected, previousValue) => {
|
||||
const net = $(e.currentTarget);
|
||||
vrfContainer.empty().removeClass('col');
|
||||
const queryLocationIds = net.val();
|
||||
if (Array.isArray(queryLocationIds) && (queryLocationIds.length)) {
|
||||
const queryLocationNet = net[0][clickedIndex].dataset.netname;
|
||||
const selectedVrfs = () => {
|
||||
const allVrfs = [];
|
||||
$.each(queryLocationIds, (i, loc) => {
|
||||
const locVrfs = cfgNetworks[queryLocationNet][loc].vrfs;
|
||||
allVrfs.push(new Set(locVrfs));
|
||||
});
|
||||
return allVrfs;
|
||||
};
|
||||
const intersectingVrfs = Array.from(findIntersection(...selectedVrfs()));
|
||||
console.log(intersectingVrfs);
|
||||
// Add the VRF select element
|
||||
if (vrfContainer.find('#query_vrf').length === 0) {
|
||||
vrfContainer.addClass('col').html(vrfSelect(cfgBranding.text.vrf));
|
||||
}
|
||||
// Build the select options for each VRF in array
|
||||
const vrfHtmlList = [];
|
||||
$.each(intersectingVrfs, (i, vrf) => {
|
||||
vrfHtmlList.push(vrfOption(vrf));
|
||||
});
|
||||
// Add the options to the VRF select element, enable it, initialize Bootstrap Select
|
||||
vrfContainer.find('#query_vrf').html(vrfHtmlList.join('')).removeAttr('disabled').selectpicker({
|
||||
iconBase: '',
|
||||
liveSearch: false,
|
||||
style: '',
|
||||
styleBase: 'form-control',
|
||||
});
|
||||
if (intersectingVrfs.length === 0) {
|
||||
vrfContainer.find('#query_vrf').selectpicker('destroy');
|
||||
vrfContainer.find('#query_vrf').prop('title', inputMessages.no_matching_vrfs).prop('disabled', true);
|
||||
vrfContainer.find('#query_vrf').selectpicker({
|
||||
iconBase: '',
|
||||
liveSearch: false,
|
||||
style: '',
|
||||
styleBase: 'form-control',
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
queryTargetAppend.on('click', '.hg-info-btn', () => {
|
||||
const queryTypeId = $('.hg-info-btn').data('hg-type');
|
||||
$(`#hg-info-${queryTypeId}`).modal('show');
|
||||
});
|
||||
|
||||
const queryApp = (queryType, queryTypeName, locationList, queryTarget) => {
|
||||
$('#hg-row-2').find('#query_vrf').on('hidden.bs.select', (e) => {
|
||||
$(e.currentTarget).nextAll('.form-control.dropdown-toggle').blur();
|
||||
});
|
||||
|
||||
const queryApp = (queryType, queryTypeName, locationList, queryTarget, queryVrf) => {
|
||||
const resultsTitle = `${queryTypeName} Query for ${queryTarget}`;
|
||||
|
||||
$('#hg-results-title').html(resultsTitle);
|
||||
@@ -255,9 +328,10 @@ const queryApp = (queryType, queryTypeName, locationList, queryTarget) => {
|
||||
url: '/query',
|
||||
method: 'POST',
|
||||
data: JSON.stringify({
|
||||
location: loc,
|
||||
query_location: loc,
|
||||
query_type: queryType,
|
||||
target: queryTarget,
|
||||
query_target: queryTarget,
|
||||
query_vrf: queryVrf,
|
||||
response_format: 'html',
|
||||
}),
|
||||
contentType: 'application/json; charset=utf-8',
|
||||
@@ -317,6 +391,7 @@ $('#lgForm').on('submit', (e) => {
|
||||
const queryType = $('#query_type').val();
|
||||
const queryLocation = $('#location').val();
|
||||
const queryTarget = $('#query_target').val();
|
||||
const queryVrf = $('#query_vrf').val() || null;
|
||||
|
||||
try {
|
||||
// message, thing to circle in red, place to put error text
|
||||
@@ -340,7 +415,7 @@ $('#lgForm').on('submit', (e) => {
|
||||
return false;
|
||||
}
|
||||
const queryTypeTitle = $(`#${queryType}`).data('display-name');
|
||||
queryApp(queryType, queryTypeTitle, queryLocation, queryTarget);
|
||||
queryApp(queryType, queryTypeTitle, queryLocation, queryTarget, queryVrf);
|
||||
$('#hg-form').animsition('out', $('#hg-results'), '#');
|
||||
$('#hg-form').hide();
|
||||
swapSpacing('results');
|
||||
|
||||
Reference in New Issue
Block a user