mirror of
https://github.com/checktheroads/hyperglass
synced 2024-05-11 05:55:08 +00:00
replace flake8 with ruff for linting, refactor issues
This commit is contained in:
33
.flake8
33
.flake8
@ -1,33 +0,0 @@
|
||||
[flake8]
|
||||
max-line-length=100
|
||||
count=True
|
||||
show-source=False
|
||||
statistics=True
|
||||
exclude=.git, __pycache__, hyperglass/ui, hyperglass/plugins/external, hyperglass/api/examples/*.py, hyperglass/compat/_sshtunnel.py
|
||||
filename=*.py
|
||||
per-file-ignores=
|
||||
hyperglass/main.py:E402
|
||||
# Disable classmethod warning for validator decorators
|
||||
hyperglass/models/*.py:N805,E0213,R0903,E501,C0301
|
||||
hyperglass/models/api/*.py:N805,E0213,R0903,E501,C0301
|
||||
hyperglass/models/commands/*.py:N805,E0213,R0903,E501,C0301
|
||||
hyperglass/parsing/models/*.py:N805,E0213,R0903
|
||||
hyperglass/defaults/*/*.py:E501
|
||||
hyperglass/configuration/models/*.py:N805,E0213,R0903,E501,C0301
|
||||
# Disable unused import warning for modules
|
||||
hyperglass/*/__init__.py:F401
|
||||
hyperglass/models/*/__init__.py:F401
|
||||
# Disable assertion and docstring checks on tests.
|
||||
hyperglass/**/test_*.py:S101,D103,D100,D104
|
||||
hyperglass/**/tests/*.py:S101,D103,D100,D104
|
||||
hyperglass/**/tests/__init__.py:D103,D100,D104
|
||||
hyperglass/state/hooks.py:F811
|
||||
# Ignore whitespace in docstrings
|
||||
hyperglass/cli/static.py:W293
|
||||
# Ignore docstring standards
|
||||
hyperglass/cli/main.py:D400,D403
|
||||
ignore=W503,R504,D202,S403,S301,S404,E731,D402,IF100,B008
|
||||
select=B, BLK, C, D, E, F, I, II, N, P, PIE, S, R, W
|
||||
disable-noqa=False
|
||||
hang-closing=False
|
||||
max-complexity=10
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -5,7 +5,6 @@ TODO*
|
||||
test.py
|
||||
.DS_Store
|
||||
.idea
|
||||
.vscode
|
||||
old_*.py
|
||||
*.rdb
|
||||
#
|
||||
|
@ -1,10 +1,15 @@
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v2.3.0
|
||||
- repo: https://github.com/pycqa/isort
|
||||
rev: 5.11.2
|
||||
hooks:
|
||||
- id: flake8
|
||||
stages:
|
||||
- commit
|
||||
- id: isort
|
||||
args: ['--profile', 'black', '--filter-files', '--check']
|
||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||
rev: v0.0.192
|
||||
hooks:
|
||||
- id: ruff
|
||||
# Respect `exclude` and `extend-exclude` settings.
|
||||
args: ['--force-exclude']
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: typescript
|
||||
|
7
.vscode/settings.json
vendored
Normal file
7
.vscode/settings.json
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
{
|
||||
"yaml.format.singleQuote": true,
|
||||
"eslint.workingDirectories": ["./hyperglass/ui"],
|
||||
"python.linting.mypyEnabled": false,
|
||||
"python.linting.enabled": false,
|
||||
"prettier.configPath": "./hyperglass/ui/.prettierrc"
|
||||
}
|
@ -150,8 +150,7 @@ async def docs(params: "Params" = Depends(get_params)):
|
||||
return docs_func(
|
||||
openapi_url=params.docs.openapi_url, title=params.site_title + " - API Docs"
|
||||
)
|
||||
else:
|
||||
raise HTTPException(detail="Not found", status_code=404)
|
||||
raise HTTPException(detail="Not found", status_code=404)
|
||||
|
||||
|
||||
async def router(id: str, devices: "Devices" = Depends(get_devices)):
|
||||
|
@ -269,9 +269,9 @@ def _plugins(
|
||||
if len(matching) == 0:
|
||||
echo.error(f"No plugins matching {search!r}")
|
||||
raise typer.Exit(1)
|
||||
else:
|
||||
echo._console.print(Columns(matching))
|
||||
raise typer.Exit(0)
|
||||
|
||||
echo._console.print(Columns(matching))
|
||||
raise typer.Exit(0)
|
||||
|
||||
echo._console.print(Columns(all_plugins))
|
||||
|
||||
|
@ -36,8 +36,8 @@ def load_dsl(path: Path, *, empty_allowed: bool) -> LoadedConfig:
|
||||
|
||||
loader = yaml.safe_load
|
||||
|
||||
except ImportError:
|
||||
raise ConfigLoaderMissing(path)
|
||||
except ImportError as err:
|
||||
raise ConfigLoaderMissing(path) from err
|
||||
elif path.suffix == ".toml":
|
||||
try:
|
||||
# Third Party
|
||||
@ -45,8 +45,8 @@ def load_dsl(path: Path, *, empty_allowed: bool) -> LoadedConfig:
|
||||
|
||||
loader = toml.load
|
||||
|
||||
except ImportError:
|
||||
raise ConfigLoaderMissing(path)
|
||||
except ImportError as err:
|
||||
raise ConfigLoaderMissing(path) from err
|
||||
|
||||
elif path.suffix == ".json":
|
||||
# Standard Library
|
||||
@ -112,10 +112,10 @@ def load_config(name: str, *, required: bool) -> LoadedConfig:
|
||||
if path is None and required is False:
|
||||
return {}
|
||||
|
||||
elif path.suffix == ".py":
|
||||
if path.suffix == ".py":
|
||||
return load_python(path, empty_allowed=not required)
|
||||
|
||||
elif path.suffix.replace(".", "") in CONFIG_EXTENSIONS:
|
||||
if path.suffix.replace(".", "") in CONFIG_EXTENSIONS:
|
||||
return load_dsl(path, empty_allowed=not required)
|
||||
|
||||
raise ConfigError(
|
||||
|
@ -156,7 +156,7 @@ class Formatter:
|
||||
if self.platform in ("bird", "bird_ssh"):
|
||||
if self.query_type == "bgp_aspath":
|
||||
return self._with_formatter(self._bird_bgp_aspath)
|
||||
elif self.query_type == "bgp_community":
|
||||
if self.query_type == "bgp_community":
|
||||
return self._with_formatter(self._bird_bgp_community)
|
||||
return self._with_formatter(self._default)
|
||||
|
||||
|
@ -43,7 +43,7 @@ class HttpClient(Connection):
|
||||
self.config._attribute_map.query_location: self.query_data.query_location,
|
||||
self.config._attribute_map.query_type: self.query_data.query_type,
|
||||
}
|
||||
elif isinstance(self.config.query, t.Dict):
|
||||
if isinstance(self.config.query, t.Dict):
|
||||
return {
|
||||
key: value.format(
|
||||
**{
|
||||
@ -65,13 +65,13 @@ class HttpClient(Connection):
|
||||
if self.config.body_format == "json":
|
||||
return {"json": data}
|
||||
|
||||
elif self.config.body_format == "yaml":
|
||||
if self.config.body_format == "yaml":
|
||||
# Third Party
|
||||
import yaml
|
||||
|
||||
return {"content": yaml.dump(data), "headers": {"content-type": "text/yaml"}}
|
||||
|
||||
elif self.config.body_format == "xml":
|
||||
if self.config.body_format == "xml":
|
||||
# Third Party
|
||||
import xmltodict # type: ignore
|
||||
|
||||
@ -79,7 +79,7 @@ class HttpClient(Connection):
|
||||
"content": xmltodict.unparse({"query": data}),
|
||||
"headers": {"content-type": "application/xml"},
|
||||
}
|
||||
elif self.config.body_format == "text":
|
||||
if self.config.body_format == "text":
|
||||
return {"data": data}
|
||||
|
||||
return {}
|
||||
@ -108,10 +108,10 @@ class HttpClient(Connection):
|
||||
responses += (data,)
|
||||
|
||||
except (httpx.TimeoutException) as error:
|
||||
raise DeviceTimeout(error=error, device=self.device)
|
||||
raise DeviceTimeout(error=error, device=self.device) from error
|
||||
|
||||
except (httpx.HTTPStatusError) as error:
|
||||
if error.response.status_code == 401:
|
||||
raise AuthError(error=error, device=self.device)
|
||||
raise RestError(error=error, device=self.device)
|
||||
raise AuthError(error=error, device=self.device) from error
|
||||
raise RestError(error=error, device=self.device) from error
|
||||
return responses
|
||||
|
@ -54,6 +54,8 @@ class SSHConnection(Connection):
|
||||
log.error(
|
||||
f"Error connecting to device {self.device.name} via " f"proxy {proxy.name}"
|
||||
)
|
||||
raise ScrapeError(error=scrape_proxy_error, device=self.device)
|
||||
raise ScrapeError(
|
||||
error=scrape_proxy_error, device=self.device
|
||||
) from scrape_proxy_error
|
||||
|
||||
return opener
|
||||
|
@ -102,10 +102,10 @@ class NetmikoConnection(SSHConnection):
|
||||
nm_connect_direct.disconnect()
|
||||
|
||||
except NetMikoTimeoutException as scrape_error:
|
||||
raise DeviceTimeout(error=scrape_error, device=self.device)
|
||||
raise DeviceTimeout(error=scrape_error, device=self.device) from scrape_error
|
||||
|
||||
except NetMikoAuthenticationException as auth_error:
|
||||
raise AuthError(error=auth_error, device=self.device)
|
||||
raise AuthError(error=auth_error, device=self.device) from auth_error
|
||||
|
||||
if not responses:
|
||||
raise ResponseEmpty(query=self.query_data)
|
||||
|
2
hyperglass/external/__init__.py
vendored
2
hyperglass/external/__init__.py
vendored
@ -7,9 +7,11 @@ from .generic import BaseExternal
|
||||
from .msteams import MSTeams
|
||||
from .bgptools import network_info, network_info_sync
|
||||
from .webhooks import Webhook
|
||||
from .http_client import HTTPClient
|
||||
|
||||
__all__ = (
|
||||
"BaseExternal",
|
||||
"HTTPClient",
|
||||
"MSTeams",
|
||||
"network_info_sync",
|
||||
"network_info",
|
||||
|
4
hyperglass/external/_base.py
vendored
4
hyperglass/external/_base.py
vendored
@ -243,8 +243,8 @@ class BaseExternal:
|
||||
if not isinstance(timeout, int):
|
||||
try:
|
||||
timeout = int(timeout)
|
||||
except TypeError:
|
||||
raise self._exception(f"Timeout must be an int, got: {str(timeout)}")
|
||||
except TypeError as err:
|
||||
raise self._exception(f"Timeout must be an int, got: {str(timeout)}") from err
|
||||
request["timeout"] = timeout
|
||||
|
||||
log.debug("Constructed request parameters {}", request)
|
||||
|
2
hyperglass/external/bgptools.py
vendored
2
hyperglass/external/bgptools.py
vendored
@ -148,7 +148,7 @@ async def network_info(*targets: str) -> TargetData:
|
||||
|
||||
# Try to use cached data for each of the items in the list of
|
||||
# resources.
|
||||
for target in (t for t in query_targets if t in cached):
|
||||
for target in (target for target in query_targets if target in cached):
|
||||
# Reassign the cached network info to the matching resource.
|
||||
query_data[target] = cached[target]
|
||||
log.debug("Using cached network info for {}", target)
|
||||
|
234
hyperglass/external/http_client.py
vendored
Normal file
234
hyperglass/external/http_client.py
vendored
Normal file
@ -0,0 +1,234 @@
|
||||
"""HTTP Client for plugin use."""
|
||||
|
||||
# Standard Library
|
||||
import typing as t
|
||||
|
||||
# Project
|
||||
from hyperglass.models.fields import JsonValue, Primitives
|
||||
|
||||
# Local
|
||||
from ._base import BaseExternal
|
||||
|
||||
|
||||
class HTTPClient(BaseExternal, name="HTTPClient"):
|
||||
"""Wrapper around a standard HTTP Client."""
|
||||
|
||||
def __init__(self: "HTTPClient", base_url: str, timeout: int = 10) -> None:
|
||||
"""Create an HTTPClient instance."""
|
||||
super().__init__(base_url=base_url, timeout=timeout, parse=False)
|
||||
|
||||
async def aget(
|
||||
self: "HTTPClient",
|
||||
endpoint: str,
|
||||
headers: t.Dict[str, str] = None,
|
||||
params: t.Dict[str, JsonValue[Primitives]] = None,
|
||||
data: t.Optional[t.Any] = None,
|
||||
timeout: t.Optional[int] = None,
|
||||
) -> t.Any:
|
||||
"""Perform an async HTTP GET request."""
|
||||
return await self._arequest(
|
||||
method="GET",
|
||||
endpoint=endpoint,
|
||||
headers=headers,
|
||||
params=params,
|
||||
data=data,
|
||||
timeout=timeout,
|
||||
)
|
||||
|
||||
async def apost(
|
||||
self: "HTTPClient",
|
||||
endpoint: str,
|
||||
headers: t.Dict[str, str] = None,
|
||||
params: t.Dict[str, JsonValue[Primitives]] = None,
|
||||
data: t.Optional[t.Any] = None,
|
||||
timeout: t.Optional[int] = None,
|
||||
) -> t.Any:
|
||||
"""Perform an async HTTP POST request."""
|
||||
return await self._arequest(
|
||||
method="POST",
|
||||
endpoint=endpoint,
|
||||
headers=headers,
|
||||
params=params,
|
||||
data=data,
|
||||
timeout=timeout,
|
||||
)
|
||||
|
||||
async def aput(
|
||||
self: "HTTPClient",
|
||||
endpoint: str,
|
||||
headers: t.Dict[str, str] = None,
|
||||
params: t.Dict[str, JsonValue[Primitives]] = None,
|
||||
data: t.Optional[t.Any] = None,
|
||||
timeout: t.Optional[int] = None,
|
||||
) -> t.Any:
|
||||
"""Perform an async HTTP PUT request."""
|
||||
return await self._arequest(
|
||||
method="PUT",
|
||||
endpoint=endpoint,
|
||||
headers=headers,
|
||||
params=params,
|
||||
data=data,
|
||||
timeout=timeout,
|
||||
)
|
||||
|
||||
async def adelete(
|
||||
self: "HTTPClient",
|
||||
endpoint: str,
|
||||
headers: t.Dict[str, str] = None,
|
||||
params: t.Dict[str, JsonValue[Primitives]] = None,
|
||||
data: t.Optional[t.Any] = None,
|
||||
timeout: t.Optional[int] = None,
|
||||
) -> t.Any:
|
||||
"""Perform an async HTTP DELETE request."""
|
||||
return await self._arequest(
|
||||
method="DELETE",
|
||||
endpoint=endpoint,
|
||||
headers=headers,
|
||||
params=params,
|
||||
data=data,
|
||||
timeout=timeout,
|
||||
)
|
||||
|
||||
async def apatch(
|
||||
self: "HTTPClient",
|
||||
endpoint: str,
|
||||
headers: t.Dict[str, str] = None,
|
||||
params: t.Dict[str, JsonValue[Primitives]] = None,
|
||||
data: t.Optional[t.Any] = None,
|
||||
timeout: t.Optional[int] = None,
|
||||
) -> t.Any:
|
||||
"""Perform an async HTTP PATCH request."""
|
||||
return await self._arequest(
|
||||
method="PATCH",
|
||||
endpoint=endpoint,
|
||||
headers=headers,
|
||||
params=params,
|
||||
data=data,
|
||||
timeout=timeout,
|
||||
)
|
||||
|
||||
async def ahead(
|
||||
self: "HTTPClient",
|
||||
endpoint: str,
|
||||
headers: t.Dict[str, str] = None,
|
||||
params: t.Dict[str, JsonValue[Primitives]] = None,
|
||||
data: t.Optional[t.Any] = None,
|
||||
timeout: t.Optional[int] = None,
|
||||
) -> t.Any:
|
||||
"""Perform an async HTTP HEAD request."""
|
||||
return await self._arequest(
|
||||
method="HEAD",
|
||||
endpoint=endpoint,
|
||||
headers=headers,
|
||||
params=params,
|
||||
data=data,
|
||||
timeout=timeout,
|
||||
)
|
||||
|
||||
def get(
|
||||
self: "HTTPClient",
|
||||
endpoint: str,
|
||||
headers: t.Dict[str, str] = None,
|
||||
params: t.Dict[str, JsonValue[Primitives]] = None,
|
||||
data: t.Optional[t.Any] = None,
|
||||
timeout: t.Optional[int] = None,
|
||||
) -> t.Any:
|
||||
"""Perform an HTTP GET request."""
|
||||
return self._request(
|
||||
method="GET",
|
||||
endpoint=endpoint,
|
||||
headers=headers,
|
||||
params=params,
|
||||
data=data,
|
||||
timeout=timeout,
|
||||
)
|
||||
|
||||
def post(
|
||||
self: "HTTPClient",
|
||||
endpoint: str,
|
||||
headers: t.Dict[str, str] = None,
|
||||
params: t.Dict[str, JsonValue[Primitives]] = None,
|
||||
data: t.Optional[t.Any] = None,
|
||||
timeout: t.Optional[int] = None,
|
||||
) -> t.Any:
|
||||
"""Perform an HTTP POST request."""
|
||||
return self._request(
|
||||
method="POST",
|
||||
endpoint=endpoint,
|
||||
headers=headers,
|
||||
params=params,
|
||||
data=data,
|
||||
timeout=timeout,
|
||||
)
|
||||
|
||||
def put(
|
||||
self: "HTTPClient",
|
||||
endpoint: str,
|
||||
headers: t.Dict[str, str] = None,
|
||||
params: t.Dict[str, JsonValue[Primitives]] = None,
|
||||
data: t.Optional[t.Any] = None,
|
||||
timeout: t.Optional[int] = None,
|
||||
) -> t.Any:
|
||||
"""Perform an HTTP PUT request."""
|
||||
return self._request(
|
||||
method="PUT",
|
||||
endpoint=endpoint,
|
||||
headers=headers,
|
||||
params=params,
|
||||
data=data,
|
||||
timeout=timeout,
|
||||
)
|
||||
|
||||
def delete(
|
||||
self: "HTTPClient",
|
||||
endpoint: str,
|
||||
headers: t.Dict[str, str] = None,
|
||||
params: t.Dict[str, JsonValue[Primitives]] = None,
|
||||
data: t.Optional[t.Any] = None,
|
||||
timeout: t.Optional[int] = None,
|
||||
) -> t.Any:
|
||||
"""Perform an HTTP DELETE request."""
|
||||
return self._request(
|
||||
method="DELETE",
|
||||
endpoint=endpoint,
|
||||
headers=headers,
|
||||
params=params,
|
||||
data=data,
|
||||
timeout=timeout,
|
||||
)
|
||||
|
||||
def patch(
|
||||
self: "HTTPClient",
|
||||
endpoint: str,
|
||||
headers: t.Dict[str, str] = None,
|
||||
params: t.Dict[str, JsonValue[Primitives]] = None,
|
||||
data: t.Optional[t.Any] = None,
|
||||
timeout: t.Optional[int] = None,
|
||||
) -> t.Any:
|
||||
"""Perform an HTTP PATCH request."""
|
||||
return self._request(
|
||||
method="PATCH",
|
||||
endpoint=endpoint,
|
||||
headers=headers,
|
||||
params=params,
|
||||
data=data,
|
||||
timeout=timeout,
|
||||
)
|
||||
|
||||
def head(
|
||||
self: "HTTPClient",
|
||||
endpoint: str,
|
||||
headers: t.Dict[str, str] = None,
|
||||
params: t.Dict[str, JsonValue[Primitives]] = None,
|
||||
data: t.Optional[t.Any] = None,
|
||||
timeout: t.Optional[int] = None,
|
||||
) -> t.Any:
|
||||
"""Perform an HTTP HEAD request."""
|
||||
return self._request(
|
||||
method="HEAD",
|
||||
endpoint=endpoint,
|
||||
headers=headers,
|
||||
params=params,
|
||||
data=data,
|
||||
timeout=timeout,
|
||||
)
|
4
hyperglass/external/webhooks.py
vendored
4
hyperglass/external/webhooks.py
vendored
@ -31,8 +31,8 @@ class Webhook(BaseExternal):
|
||||
try:
|
||||
provider_class = PROVIDER_MAP[config.provider]
|
||||
return provider_class(config)
|
||||
except KeyError:
|
||||
except KeyError as err:
|
||||
raise UnsupportedError(
|
||||
message="{p} is not yet supported as a webhook target.",
|
||||
p=config.provider.title(),
|
||||
)
|
||||
) from err
|
||||
|
@ -56,10 +56,14 @@ class Query(BaseModel):
|
||||
|
||||
self.directive = query_directives[0]
|
||||
|
||||
self._input_plugin_manager = InputPluginManager()
|
||||
|
||||
self.query_target = self.transform_query_target()
|
||||
|
||||
try:
|
||||
self.validate_query_target()
|
||||
except InputValidationError as err:
|
||||
raise InputInvalid(**err.kwargs)
|
||||
raise InputInvalid(**err.kwargs) from err
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Represent only the query fields."""
|
||||
@ -80,14 +84,17 @@ class Query(BaseModel):
|
||||
).hexdigest()
|
||||
|
||||
def validate_query_target(self) -> None:
|
||||
"""Validate a query target after all fields/relationships havebeen initialized."""
|
||||
"""Validate a query target after all fields/relationships have been initialized."""
|
||||
# Run config/rule-based validations.
|
||||
self.directive.validate_target(self.query_target)
|
||||
# Run plugin-based validations.
|
||||
manager = InputPluginManager()
|
||||
manager.execute(query=self)
|
||||
self._input_plugin_manager.validate(query=self)
|
||||
log.debug("Validation passed for query {!r}", self)
|
||||
|
||||
def transform_query_target(self) -> QueryTarget:
|
||||
"""Transform a query target based on defined plugins."""
|
||||
return self._input_plugin_manager.transform(query=self)
|
||||
|
||||
def dict(self) -> t.Dict[str, t.Union[t.List[str], str]]:
|
||||
"""Include only public fields."""
|
||||
return super().dict(include={"query_location", "query_target", "query_type"})
|
||||
|
@ -216,7 +216,7 @@ class Device(HyperglassModelWithId, extra="allow"):
|
||||
p=values["platform"],
|
||||
)
|
||||
return value
|
||||
elif value is None and values["platform"] in SUPPORTED_STRUCTURED_OUTPUT:
|
||||
if value is None and values["platform"] in SUPPORTED_STRUCTURED_OUTPUT:
|
||||
value = True
|
||||
else:
|
||||
value = False
|
||||
|
@ -79,7 +79,7 @@ class BGPRoute(HyperglassModel):
|
||||
# If router validation is enabled, return the value as-is.
|
||||
return value
|
||||
|
||||
elif structured.rpki.mode == "external":
|
||||
if structured.rpki.mode == "external":
|
||||
# If external validation is enabled, validate the prefix
|
||||
# & asn with Cloudflare's RPKI API.
|
||||
as_path = values["as_path"]
|
||||
@ -88,9 +88,8 @@ class BGPRoute(HyperglassModel):
|
||||
# If the AS_PATH length is 0, i.e. for an internal route,
|
||||
# return RPKI Unknown state.
|
||||
return 3
|
||||
else:
|
||||
# Get last ASN in path
|
||||
asn = as_path[-1]
|
||||
# Get last ASN in path
|
||||
asn = as_path[-1]
|
||||
|
||||
try:
|
||||
net = ip_network(values["prefix"])
|
||||
@ -100,8 +99,8 @@ class BGPRoute(HyperglassModel):
|
||||
# Only do external RPKI lookups for global prefixes.
|
||||
if net.is_global:
|
||||
return rpki_state(prefix=values["prefix"], asn=asn)
|
||||
else:
|
||||
return value
|
||||
|
||||
return value
|
||||
|
||||
|
||||
class BGPRouteTable(HyperglassModel):
|
||||
|
@ -137,7 +137,7 @@ class RuleWithIP(Rule):
|
||||
valid_target = ip_network(target)
|
||||
|
||||
except ValueError as err:
|
||||
raise InputValidationError(error=str(err), target=target)
|
||||
raise InputValidationError(error=str(err), target=target) from err
|
||||
|
||||
is_member = self.membership(valid_target, self.condition)
|
||||
in_range = self.in_range(valid_target)
|
||||
@ -146,7 +146,7 @@ class RuleWithIP(Rule):
|
||||
self._passed = True
|
||||
return True
|
||||
|
||||
elif is_member and not in_range:
|
||||
if is_member and not in_range:
|
||||
self._passed = False
|
||||
raise InputValidationError(
|
||||
error="Prefix-length is not within range {ge}-{le}",
|
||||
@ -155,7 +155,7 @@ class RuleWithIP(Rule):
|
||||
le=self.le,
|
||||
)
|
||||
|
||||
elif is_member and self.action == "deny":
|
||||
if is_member and self.action == "deny":
|
||||
self._passed = False
|
||||
raise InputValidationError(
|
||||
error="Member of denied network '{network}'",
|
||||
@ -204,7 +204,7 @@ class RuleWithPattern(Rule):
|
||||
|
||||
if is_match and self.action == "permit":
|
||||
return True
|
||||
elif is_match and self.action == "deny":
|
||||
if is_match and self.action == "deny":
|
||||
return InputValidationError(target=value, error="Denied")
|
||||
return False
|
||||
|
||||
@ -213,13 +213,13 @@ class RuleWithPattern(Rule):
|
||||
if isinstance(result, BaseException):
|
||||
self._passed = False
|
||||
raise result
|
||||
elif result is False:
|
||||
if result is False:
|
||||
self._passed = False
|
||||
return result
|
||||
self._passed = True
|
||||
return True
|
||||
|
||||
elif isinstance(target, t.List) and not multiple:
|
||||
if isinstance(target, t.List) and not multiple:
|
||||
raise InputValidationError("Target must be a single value")
|
||||
|
||||
result = validate_single_value(target)
|
||||
@ -277,7 +277,7 @@ class Directive(HyperglassUniqueModel, unique_by=("id", "table_output")):
|
||||
|
||||
if self.field.is_select:
|
||||
return "select"
|
||||
elif self.field.is_text or self.field.is_ip:
|
||||
if self.field.is_text or self.field.is_ip:
|
||||
return "text"
|
||||
return None
|
||||
|
||||
|
@ -64,7 +64,7 @@ class Action(str):
|
||||
|
||||
if value in cls.permits:
|
||||
return cls("permit")
|
||||
elif value in cls.denies:
|
||||
if value in cls.denies:
|
||||
return cls("deny")
|
||||
|
||||
raise ValueError(
|
||||
|
@ -118,7 +118,9 @@ class HyperglassUniqueModel(HyperglassModel):
|
||||
|
||||
def __hash__(self: "HyperglassUniqueModel") -> int:
|
||||
"""Create a hashed representation of this model's name."""
|
||||
fields = dict(zip(self._unique_fields, (getattr(self, f) for f in self._unique_fields)))
|
||||
fields = dict(
|
||||
zip(self._unique_fields, (getattr(self, f) for f in self._unique_fields), strict=True)
|
||||
)
|
||||
return hash(json.dumps(fields))
|
||||
|
||||
|
||||
|
@ -79,7 +79,7 @@ class JuniperRouteTableEntry(JuniperBase):
|
||||
if "selected_next_hop" in hop:
|
||||
selected_next_hop = hop.get("to", "")
|
||||
break
|
||||
elif hop.get("to") is not None:
|
||||
if hop.get("to") is not None:
|
||||
selected_next_hop = hop["to"]
|
||||
break
|
||||
|
||||
@ -114,8 +114,8 @@ class JuniperRouteTableEntry(JuniperBase):
|
||||
if not isinstance(value, dict):
|
||||
try:
|
||||
value = int(value)
|
||||
except ValueError:
|
||||
raise ValueError(f"Age field is in an unexpected format. Got: {value}")
|
||||
except ValueError as err:
|
||||
raise ValueError(f"Age field is in an unexpected format. Got: {value}") from err
|
||||
else:
|
||||
value = value.get("@junos:seconds", 0)
|
||||
return int(value)
|
||||
|
@ -84,15 +84,15 @@ class HyperglassSettings(BaseSettings):
|
||||
if value is None:
|
||||
if values["debug"] is False:
|
||||
return ip_address("::1")
|
||||
elif values["debug"] is True:
|
||||
if values["debug"] is True:
|
||||
return ip_address("::")
|
||||
|
||||
if isinstance(value, str):
|
||||
if value != "localhost":
|
||||
try:
|
||||
return ip_address(value)
|
||||
except ValueError:
|
||||
raise ValueError(str(value))
|
||||
except ValueError as err:
|
||||
raise ValueError(str(value)) from err
|
||||
|
||||
elif value == "localhost":
|
||||
return ip_address("::1")
|
||||
|
@ -39,19 +39,19 @@ def parse_arista(output: Sequence[str]) -> Dict: # noqa: C901
|
||||
|
||||
except json.JSONDecodeError as err:
|
||||
log.critical("Error decoding JSON: {}", str(err))
|
||||
raise ParsingError("Error parsing response data")
|
||||
raise ParsingError("Error parsing response data") from err
|
||||
|
||||
except KeyError as err:
|
||||
log.critical("'{}' was not found in the response", str(err))
|
||||
raise ParsingError("Error parsing response data")
|
||||
raise ParsingError("Error parsing response data") from err
|
||||
|
||||
except IndexError as err:
|
||||
log.critical(str(err))
|
||||
raise ParsingError("Error parsing response data")
|
||||
raise ParsingError("Error parsing response data") from err
|
||||
|
||||
except ValidationError as err:
|
||||
log.critical(str(err))
|
||||
raise ParsingError(err.errors())
|
||||
raise ParsingError(err.errors()) from err
|
||||
|
||||
log.debug("Serialzed: {}", data)
|
||||
log.debug("Serialized: {}", data)
|
||||
return data
|
||||
|
@ -94,10 +94,10 @@ def parse_juniper(output: Sequence) -> Dict: # noqa: C901
|
||||
|
||||
except KeyError as err:
|
||||
log.critical("{} was not found in the response", str(err))
|
||||
raise ParsingError("Error parsing response data")
|
||||
raise ParsingError("Error parsing response data") from err
|
||||
|
||||
except ValidationError as err:
|
||||
log.critical(str(err))
|
||||
raise ParsingError(err.errors())
|
||||
raise ParsingError(err.errors()) from err
|
||||
|
||||
return data
|
||||
|
@ -46,7 +46,7 @@ def parse_mikrotik(output: str):
|
||||
|
||||
# Remove any lines marked for removal and re-join with a single
|
||||
# newline character.
|
||||
lines = [l for i, l in enumerate(lines) if i not in remove_lines]
|
||||
lines = [line for idx, line in enumerate(lines) if idx not in remove_lines]
|
||||
output = "\n".join(lines)
|
||||
|
||||
return output
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
# Local
|
||||
from .main import register_plugin, init_builtin_plugins
|
||||
from ._input import InputPlugin, InputPluginReturn
|
||||
from ._input import InputPlugin, InputPluginValidationReturn
|
||||
from ._output import OutputType, OutputPlugin
|
||||
from ._manager import InputPluginManager, OutputPluginManager
|
||||
|
||||
@ -10,7 +10,7 @@ __all__ = (
|
||||
"init_builtin_plugins",
|
||||
"InputPlugin",
|
||||
"InputPluginManager",
|
||||
"InputPluginReturn",
|
||||
"InputPluginValidationReturn",
|
||||
"OutputPlugin",
|
||||
"OutputPluginManager",
|
||||
"OutputType",
|
||||
|
@ -11,12 +11,15 @@ from pydantic import PrivateAttr
|
||||
from hyperglass.state.hooks import use_state
|
||||
|
||||
# Local
|
||||
from .._input import InputPlugin, InputPluginReturn
|
||||
from .._input import InputPlugin
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
# Project
|
||||
from hyperglass.models.api.query import Query
|
||||
|
||||
# Local
|
||||
from .._input import InputPluginValidationReturn
|
||||
|
||||
_32BIT = 0xFFFFFFFF
|
||||
_16BIT = 0xFFFF
|
||||
EXTENDED_TYPES = ("target", "origin")
|
||||
@ -63,10 +66,10 @@ def validate_new_format(value: str) -> bool:
|
||||
if all((check_decimal(one, _16BIT), check_decimal(two, _16BIT))):
|
||||
# Handle standard format, e.g. `65000:1`
|
||||
return True
|
||||
elif all((check_decimal(one, _16BIT), check_decimal(two, _32BIT))):
|
||||
if all((check_decimal(one, _16BIT), check_decimal(two, _32BIT))):
|
||||
# Handle extended format, e.g. `65000:4294967295`
|
||||
return True
|
||||
elif all((check_string(one), check_decimal(two, _16BIT))):
|
||||
if all((check_string(one), check_decimal(two, _16BIT))):
|
||||
# Handle IP address format, e.g. `192.0.2.1:65000`
|
||||
return True
|
||||
|
||||
@ -92,7 +95,7 @@ class ValidateBGPCommunity(InputPlugin):
|
||||
|
||||
__hyperglass_builtin__: bool = PrivateAttr(True)
|
||||
|
||||
def validate(self, query: "Query") -> InputPluginReturn:
|
||||
def validate(self, query: "Query") -> "InputPluginValidationReturn":
|
||||
"""Ensure an input query target is a valid BGP community."""
|
||||
|
||||
params = use_state("params")
|
||||
|
@ -48,19 +48,19 @@ def parse_arista(output: t.Sequence[str]) -> "OutputDataModel":
|
||||
|
||||
except json.JSONDecodeError as err:
|
||||
log.critical("Error decoding JSON: {}", str(err))
|
||||
raise ParsingError("Error parsing response data")
|
||||
raise ParsingError("Error parsing response data") from err
|
||||
|
||||
except KeyError as err:
|
||||
log.critical("'{}' was not found in the response", str(err))
|
||||
raise ParsingError("Error parsing response data")
|
||||
raise ParsingError("Error parsing response data") from err
|
||||
|
||||
except IndexError as err:
|
||||
log.critical(str(err))
|
||||
raise ParsingError("Error parsing response data")
|
||||
raise ParsingError("Error parsing response data") from err
|
||||
|
||||
except ValidationError as err:
|
||||
log.critical(str(err))
|
||||
raise ParsingError(err.errors())
|
||||
raise ParsingError(err.errors()) from err
|
||||
|
||||
return result
|
||||
|
||||
|
@ -108,10 +108,10 @@ def parse_juniper(output: Sequence[str]) -> "OutputDataModel": # noqa: C901
|
||||
raise ParsingError("Error parsing response data") from err
|
||||
|
||||
except KeyError as err:
|
||||
raise ParsingError("{key} was not found in the response", key=str(err))
|
||||
raise ParsingError("{key} was not found in the response", key=str(err)) from err
|
||||
|
||||
except ValidationError as err:
|
||||
raise ParsingError(err)
|
||||
raise ParsingError(err) from err
|
||||
|
||||
return result
|
||||
|
||||
|
@ -8,9 +8,11 @@ from ._base import DirectivePlugin, HyperglassPlugin
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
# Project
|
||||
from hyperglass.models.api.query import Query
|
||||
from hyperglass.models.api.query import Query, QueryTarget
|
||||
|
||||
InputPluginReturn = t.Union[None, bool]
|
||||
|
||||
InputPluginValidationReturn = t.Union[None, bool]
|
||||
InputPluginTransformReturn = t.Union[t.Sequence["QueryTarget"], "QueryTarget"]
|
||||
|
||||
|
||||
class InputPlugin(HyperglassPlugin, DirectivePlugin):
|
||||
@ -19,6 +21,10 @@ class InputPlugin(HyperglassPlugin, DirectivePlugin):
|
||||
_type = "input"
|
||||
failure_reason: t.Optional[str] = None
|
||||
|
||||
def validate(self, query: "Query") -> InputPluginReturn:
|
||||
def validate(self, query: "Query") -> InputPluginValidationReturn:
|
||||
"""Validate input from hyperglass UI/API."""
|
||||
return None
|
||||
|
||||
def transform(self, query: "Query") -> InputPluginTransformReturn:
|
||||
"""Transform query target prior to running commands."""
|
||||
return query.query_target
|
||||
|
@ -11,7 +11,7 @@ from hyperglass.exceptions.private import PluginError, InputValidationError
|
||||
|
||||
# Local
|
||||
from ._base import PluginType, HyperglassPlugin
|
||||
from ._input import InputPlugin, InputPluginReturn
|
||||
from ._input import InputPlugin, InputPluginTransformReturn, InputPluginValidationReturn
|
||||
from ._output import OutputType, OutputPlugin
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
@ -117,7 +117,7 @@ class PluginManager(t.Generic[PluginT]):
|
||||
log.success("Registered {} plugin {!r}", self._type, instance.name)
|
||||
return
|
||||
except TypeError:
|
||||
raise PluginError(
|
||||
raise PluginError( # noqa: B904
|
||||
"Plugin '{p}' has not defined a required method. "
|
||||
"Please consult the hyperglass documentation.",
|
||||
p=repr(plugin),
|
||||
@ -128,24 +128,27 @@ class PluginManager(t.Generic[PluginT]):
|
||||
class InputPluginManager(PluginManager[InputPlugin], type="input"):
|
||||
"""Manage Input Validation Plugins."""
|
||||
|
||||
def execute(self: "InputPluginManager", *, query: "Query") -> InputPluginReturn:
|
||||
def _gather_plugins(
|
||||
self: "InputPluginManager", query: "Query"
|
||||
) -> t.Generator[InputPlugin, None, None]:
|
||||
for plugin in self.plugins(builtins=True):
|
||||
if plugin.directives and query.directive.id in plugin.directives:
|
||||
yield plugin
|
||||
if plugin.ref in query.directive.plugins:
|
||||
yield plugin
|
||||
if plugin.common is True:
|
||||
yield plugin
|
||||
|
||||
def validate(self: "InputPluginManager", query: "Query") -> InputPluginValidationReturn:
|
||||
"""Execute all input validation plugins.
|
||||
|
||||
If any plugin returns `False`, execution is halted.
|
||||
"""
|
||||
result = None
|
||||
builtins = (
|
||||
plugin
|
||||
for plugin in self.plugins(builtins=True)
|
||||
if plugin.directives and query.directive.id in plugin.directives
|
||||
)
|
||||
directives = (plugin for plugin in self.plugins() if plugin.ref in query.directive.plugins)
|
||||
common = (plugin for plugin in self.plugins() if plugin.common is True)
|
||||
|
||||
for plugin in (*directives, *builtins, *common):
|
||||
for plugin in self._gather_plugins(query):
|
||||
result = plugin.validate(query)
|
||||
result_test = "valid" if result is True else "invalid" if result is False else "none"
|
||||
log.debug("Input Plugin {!r} result={!r}", plugin.name, result_test)
|
||||
log.debug("Input Plugin Validation {!r} result={!r}", plugin.name, result_test)
|
||||
if result is False:
|
||||
raise InputValidationError(
|
||||
error="No matched validation rules", target=query.query_target
|
||||
@ -154,6 +157,14 @@ class InputPluginManager(PluginManager[InputPlugin], type="input"):
|
||||
return result
|
||||
return result
|
||||
|
||||
def transform(self: "InputPluginManager", *, query: "Query") -> InputPluginTransformReturn:
|
||||
"""Execute all input transformation plugins."""
|
||||
result = query.query_target
|
||||
for plugin in self._gather_plugins(query):
|
||||
result = plugin.transform(query=query)
|
||||
log.debug("Input Plugin Transform {!r} result={!r}", plugin.name, result)
|
||||
return result
|
||||
|
||||
|
||||
class OutputPluginManager(PluginManager[OutputPlugin], type="output"):
|
||||
"""Manage Output Processing Plugins."""
|
||||
|
@ -24,14 +24,14 @@ async def move_files(src: Path, dst: Path, files: t.Iterable[Path]) -> t.Tuple[s
|
||||
if not isinstance(src, Path):
|
||||
try:
|
||||
src = Path(src)
|
||||
except TypeError:
|
||||
raise error("{p} is not a valid path", p=src)
|
||||
except TypeError as err:
|
||||
raise error("{p} is not a valid path", p=src) from err
|
||||
|
||||
if not isinstance(dst, Path):
|
||||
try:
|
||||
dst = Path(dst)
|
||||
except TypeError:
|
||||
raise error("{p} is not a valid path", p=dst)
|
||||
except TypeError as err:
|
||||
raise error("{p} is not a valid path", p=dst) from err
|
||||
|
||||
if not isinstance(files, (t.List, t.Tuple, t.Generator)):
|
||||
raise error(
|
||||
@ -57,7 +57,7 @@ async def move_files(src: Path, dst: Path, files: t.Iterable[Path]) -> t.Tuple[s
|
||||
shutil.copyfile(file, dst_file)
|
||||
migrated += (str(dst_file),)
|
||||
except Exception as e:
|
||||
raise error("Failed to migrate {f}: {e}", f=dst_file, e=e)
|
||||
raise error("Failed to migrate {f}: {e}", f=dst_file, e=e) from e
|
||||
|
||||
return migrated
|
||||
|
||||
|
@ -60,8 +60,8 @@ async def read_package_json() -> t.Dict[str, t.Any]:
|
||||
with package_json_file.open("r") as file:
|
||||
package_json = json.load(file)
|
||||
|
||||
except Exception as e:
|
||||
raise RuntimeError(f"Error reading package.json: {str(e)}")
|
||||
except Exception as err:
|
||||
raise RuntimeError(f"Error reading package.json: {str(err)}") from err
|
||||
|
||||
log.debug("package.json:\n{p}", p=package_json)
|
||||
|
||||
@ -98,8 +98,8 @@ async def node_initial(timeout: int = 180, dev_mode: bool = False) -> str:
|
||||
await proc.wait()
|
||||
all_messages += (messages,)
|
||||
|
||||
except Exception as e:
|
||||
raise RuntimeError(str(e))
|
||||
except Exception as err:
|
||||
raise RuntimeError(str(err)) from err
|
||||
|
||||
return "\n".join(all_messages)
|
||||
|
||||
@ -107,7 +107,7 @@ async def node_initial(timeout: int = 180, dev_mode: bool = False) -> str:
|
||||
async def build_ui(app_path: Path):
|
||||
"""Execute `next build` & `next export` from UI directory.
|
||||
|
||||
Raises:
|
||||
### Raises
|
||||
RuntimeError: Raised if exit code is not 0.
|
||||
RuntimeError: Raised when any other error occurs.
|
||||
"""
|
||||
@ -139,12 +139,12 @@ async def build_ui(app_path: Path):
|
||||
await proc.wait()
|
||||
all_messages.append(messages)
|
||||
|
||||
except asyncio.TimeoutError:
|
||||
raise RuntimeError(f"{timeout} second timeout exceeded while building UI")
|
||||
except asyncio.TimeoutError as err:
|
||||
raise RuntimeError(f"{timeout} second timeout exceeded while building UI") from err
|
||||
|
||||
except Exception as err:
|
||||
log.error(err)
|
||||
raise RuntimeError(str(err))
|
||||
raise RuntimeError(str(err)) from err
|
||||
|
||||
return "\n".join(all_messages)
|
||||
|
||||
|
@ -128,9 +128,9 @@ def deep_convert_keys(_dict: t.Type[DeepConvert], predicate: t.Callable[[str], s
|
||||
def get_value(value: t.Any):
|
||||
if isinstance(value, t.Dict):
|
||||
return {predicate(k): get_value(v) for k, v in value.items()}
|
||||
elif isinstance(value, t.List):
|
||||
if isinstance(value, t.List):
|
||||
return [get_value(v) for v in value]
|
||||
elif isinstance(value, t.Tuple):
|
||||
if isinstance(value, t.Tuple):
|
||||
return tuple(get_value(v) for v in value)
|
||||
return value
|
||||
|
||||
|
@ -35,12 +35,12 @@ def get_driver(_type: str, driver: t.Optional[str]) -> str:
|
||||
# If no driver is set, use the driver map with netmiko as
|
||||
# fallback.
|
||||
return DRIVER_MAP.get(_type, "netmiko")
|
||||
elif driver in ALL_DRIVERS:
|
||||
if driver in ALL_DRIVERS:
|
||||
# If a driver is set and it is valid, allow it.
|
||||
return driver
|
||||
else:
|
||||
# Otherwise, fail validation.
|
||||
raise ValueError("{} is not a supported driver.".format(driver))
|
||||
|
||||
# Otherwise, fail validation.
|
||||
raise ValueError("{} is not a supported driver.".format(driver))
|
||||
|
||||
|
||||
def resolve_hostname(
|
||||
|
286
poetry.lock
generated
286
poetry.lock
generated
@ -34,17 +34,6 @@ python-versions = ">=3.7"
|
||||
[package.extras]
|
||||
tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"]
|
||||
|
||||
[[package]]
|
||||
name = "aspy-yaml"
|
||||
version = "1.3.0"
|
||||
description = "A few extensions to pyyaml."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||
|
||||
[package.dependencies]
|
||||
pyyaml = "*"
|
||||
|
||||
[[package]]
|
||||
name = "attrs"
|
||||
version = "22.1.0"
|
||||
@ -296,157 +285,6 @@ mccabe = ">=0.7.0,<0.8.0"
|
||||
pycodestyle = ">=2.10.0,<2.11.0"
|
||||
pyflakes = ">=3.0.0,<3.1.0"
|
||||
|
||||
[[package]]
|
||||
name = "flake8-bandit"
|
||||
version = "4.1.1"
|
||||
description = "Automated security testing with bandit and flake8."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
|
||||
[package.dependencies]
|
||||
bandit = ">=1.7.3"
|
||||
flake8 = ">=5.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "flake8-black"
|
||||
version = "0.3.5"
|
||||
description = "flake8 plugin to call black as a code style validator"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[package.dependencies]
|
||||
black = ">=22.1.0"
|
||||
flake8 = ">=3"
|
||||
tomli = "*"
|
||||
|
||||
[package.extras]
|
||||
develop = ["build", "twine"]
|
||||
|
||||
[[package]]
|
||||
name = "flake8-breakpoint"
|
||||
version = "1.1.0"
|
||||
description = "Flake8 plugin that check forgotten breakpoints"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.6,<4.0"
|
||||
|
||||
[package.dependencies]
|
||||
flake8-plugin-utils = ">=1.0,<2.0"
|
||||
|
||||
[[package]]
|
||||
name = "flake8-bugbear"
|
||||
version = "22.12.6"
|
||||
description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[package.dependencies]
|
||||
attrs = ">=19.2.0"
|
||||
flake8 = ">=3.0.0"
|
||||
|
||||
[package.extras]
|
||||
dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "tox"]
|
||||
|
||||
[[package]]
|
||||
name = "flake8-builtins"
|
||||
version = "2.0.1"
|
||||
description = "Check for python builtins being used as variables or parameters."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[package.dependencies]
|
||||
flake8 = "*"
|
||||
|
||||
[package.extras]
|
||||
test = ["pytest"]
|
||||
|
||||
[[package]]
|
||||
name = "flake8-comprehensions"
|
||||
version = "3.10.1"
|
||||
description = "A flake8 plugin to help you write better list/set/dict comprehensions."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[package.dependencies]
|
||||
flake8 = ">=3.0,<3.2.0 || >3.2.0"
|
||||
|
||||
[[package]]
|
||||
name = "flake8-deprecated"
|
||||
version = "2.0.1"
|
||||
description = "Warns about deprecated method calls."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[package.dependencies]
|
||||
flake8 = "*"
|
||||
|
||||
[package.extras]
|
||||
test = ["pytest"]
|
||||
|
||||
[[package]]
|
||||
name = "flake8-docstrings"
|
||||
version = "1.6.0"
|
||||
description = "Extension for flake8 which uses pydocstyle to check docstrings"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
|
||||
[package.dependencies]
|
||||
flake8 = ">=3"
|
||||
pydocstyle = ">=2.1"
|
||||
|
||||
[[package]]
|
||||
name = "flake8-isort"
|
||||
version = "5.0.3"
|
||||
description = "flake8 plugin that integrates isort ."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[package.dependencies]
|
||||
flake8 = "*"
|
||||
isort = ">=4.3.5,<6"
|
||||
|
||||
[package.extras]
|
||||
test = ["pytest"]
|
||||
|
||||
[[package]]
|
||||
name = "flake8-plugin-utils"
|
||||
version = "1.3.2"
|
||||
description = "The package provides base classes and utils for flake8 plugin writing"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.6,<4.0"
|
||||
|
||||
[[package]]
|
||||
name = "flake8-polyfill"
|
||||
version = "1.0.2"
|
||||
description = "Polyfill package for Flake8 plugins"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
|
||||
[package.dependencies]
|
||||
flake8 = "*"
|
||||
|
||||
[[package]]
|
||||
name = "flake8-print"
|
||||
version = "5.0.0"
|
||||
description = "print statement checker plugin for flake8"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[package.dependencies]
|
||||
flake8 = ">=3.0"
|
||||
pycodestyle = "*"
|
||||
|
||||
[[package]]
|
||||
name = "future"
|
||||
version = "0.18.2"
|
||||
@ -773,21 +611,19 @@ testing = ["pytest", "pytest-benchmark"]
|
||||
|
||||
[[package]]
|
||||
name = "pre-commit"
|
||||
version = "1.21.0"
|
||||
version = "2.20.0"
|
||||
description = "A framework for managing and maintaining multi-language pre-commit hooks."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[package.dependencies]
|
||||
"aspy.yaml" = "*"
|
||||
cfgv = ">=2.0.0"
|
||||
identify = ">=1.0.0"
|
||||
nodeenv = ">=0.11.1"
|
||||
pyyaml = "*"
|
||||
six = "*"
|
||||
pyyaml = ">=5.1"
|
||||
toml = "*"
|
||||
virtualenv = ">=15.2"
|
||||
virtualenv = ">=20.0.8"
|
||||
|
||||
[[package]]
|
||||
name = "psutil"
|
||||
@ -840,20 +676,6 @@ typing-extensions = ">=3.7.4.3"
|
||||
dotenv = ["python-dotenv (>=0.10.4)"]
|
||||
email = ["email-validator (>=1.0.3)"]
|
||||
|
||||
[[package]]
|
||||
name = "pydocstyle"
|
||||
version = "6.1.1"
|
||||
description = "Python docstring style checker"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
|
||||
[package.dependencies]
|
||||
snowballstemmer = "*"
|
||||
|
||||
[package.extras]
|
||||
toml = ["toml"]
|
||||
|
||||
[[package]]
|
||||
name = "pyflakes"
|
||||
version = "3.0.1"
|
||||
@ -1018,6 +840,14 @@ pygments = ">=2.6.0,<3.0.0"
|
||||
[package.extras]
|
||||
jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.0.192"
|
||||
description = "An extremely fast Python linter, written in Rust."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[[package]]
|
||||
name = "scp"
|
||||
version = "0.14.4"
|
||||
@ -1066,14 +896,6 @@ category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[[package]]
|
||||
name = "snowballstemmer"
|
||||
version = "2.2.0"
|
||||
description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
|
||||
[[package]]
|
||||
name = "stackprinter"
|
||||
version = "0.2.10"
|
||||
@ -1316,7 +1138,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||
[metadata]
|
||||
lock-version = "1.1"
|
||||
python-versions = ">=3.8.1,<4.0"
|
||||
content-hash = "1b4540f55d01e47f526891a5e33e8d9346883b478bbcb17a043e9e24201ab1f6"
|
||||
content-hash = "03ec17db8c6644b1b6e4e4fc53177107c75f23179c491e5c722a379b3bdb765e"
|
||||
|
||||
[metadata.files]
|
||||
aiofiles = [
|
||||
@ -1331,10 +1153,6 @@ asgiref = [
|
||||
{file = "asgiref-3.5.2-py3-none-any.whl", hash = "sha256:1d2880b792ae8757289136f1db2b7b99100ce959b2aa57fd69dab783d05afac4"},
|
||||
{file = "asgiref-3.5.2.tar.gz", hash = "sha256:4a29362a6acebe09bf1d6640db38c1dc3d9217c68e6f9f6204d72667fc19a424"},
|
||||
]
|
||||
aspy-yaml = [
|
||||
{file = "aspy.yaml-1.3.0-py2.py3-none-any.whl", hash = "sha256:463372c043f70160a9ec950c3f1e4c3a82db5fca01d334b6bc89c7164d744bdc"},
|
||||
{file = "aspy.yaml-1.3.0.tar.gz", hash = "sha256:e7c742382eff2caed61f87a39d13f99109088e5e93f04d76eb8d4b28aa143f45"},
|
||||
]
|
||||
attrs = [
|
||||
{file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"},
|
||||
{file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"},
|
||||
@ -1616,54 +1434,6 @@ flake8 = [
|
||||
{file = "flake8-6.0.0-py2.py3-none-any.whl", hash = "sha256:3833794e27ff64ea4e9cf5d410082a8b97ff1a06c16aa3d2027339cd0f1195c7"},
|
||||
{file = "flake8-6.0.0.tar.gz", hash = "sha256:c61007e76655af75e6785a931f452915b371dc48f56efd765247c8fe68f2b181"},
|
||||
]
|
||||
flake8-bandit = [
|
||||
{file = "flake8_bandit-4.1.1-py3-none-any.whl", hash = "sha256:4c8a53eb48f23d4ef1e59293657181a3c989d0077c9952717e98a0eace43e06d"},
|
||||
{file = "flake8_bandit-4.1.1.tar.gz", hash = "sha256:068e09287189cbfd7f986e92605adea2067630b75380c6b5733dab7d87f9a84e"},
|
||||
]
|
||||
flake8-black = [
|
||||
{file = "flake8-black-0.3.5.tar.gz", hash = "sha256:9e93252b1314a8eb3c2f55dec54a07239e502b12f57567f2c105f2202714b15e"},
|
||||
{file = "flake8_black-0.3.5-py3-none-any.whl", hash = "sha256:4948a579fdddd98fbf935fd94255dfcfce560c4ddc1ceee08e3f12d6114c8619"},
|
||||
]
|
||||
flake8-breakpoint = [
|
||||
{file = "flake8-breakpoint-1.1.0.tar.gz", hash = "sha256:5bc70d478f0437a3655d094e1d2fca81ddacabaa84d99db45ad3630bf2004064"},
|
||||
{file = "flake8_breakpoint-1.1.0-py3-none-any.whl", hash = "sha256:27e0cb132647f9ef348b4a3c3126e7350bedbb22e8e221cd11712a223855ea0b"},
|
||||
]
|
||||
flake8-bugbear = [
|
||||
{file = "flake8-bugbear-22.12.6.tar.gz", hash = "sha256:4cdb2c06e229971104443ae293e75e64c6107798229202fbe4f4091427a30ac0"},
|
||||
{file = "flake8_bugbear-22.12.6-py3-none-any.whl", hash = "sha256:b69a510634f8a9c298dfda2b18a8036455e6b19ecac4fe582e4d7a0abfa50a30"},
|
||||
]
|
||||
flake8-builtins = [
|
||||
{file = "flake8-builtins-2.0.1.tar.gz", hash = "sha256:5aeb420130efe8acbdaf8708a582492413293a3ca25653518f687937879650a5"},
|
||||
{file = "flake8_builtins-2.0.1-py3-none-any.whl", hash = "sha256:a5b9ca9cbc921c4455ea02e2e9963c990ac66d028c15b654625e012a1e3bbb4d"},
|
||||
]
|
||||
flake8-comprehensions = [
|
||||
{file = "flake8-comprehensions-3.10.1.tar.gz", hash = "sha256:412052ac4a947f36b891143430fef4859705af11b2572fbb689f90d372cf26ab"},
|
||||
{file = "flake8_comprehensions-3.10.1-py3-none-any.whl", hash = "sha256:d763de3c74bc18a79c039a7ec732e0a1985b0c79309ceb51e56401ad0a2cd44e"},
|
||||
]
|
||||
flake8-deprecated = [
|
||||
{file = "flake8-deprecated-2.0.1.tar.gz", hash = "sha256:c7659a530aa76c3ad8be0c1e8331ed56d882ef8bfba074501a545bb3352b0c23"},
|
||||
{file = "flake8_deprecated-2.0.1-py3-none-any.whl", hash = "sha256:8c61d2cb8d487118b6c20392b25f08ba1ec49c759e4ea562c7a60172912bc7ee"},
|
||||
]
|
||||
flake8-docstrings = [
|
||||
{file = "flake8-docstrings-1.6.0.tar.gz", hash = "sha256:9fe7c6a306064af8e62a055c2f61e9eb1da55f84bb39caef2b84ce53708ac34b"},
|
||||
{file = "flake8_docstrings-1.6.0-py2.py3-none-any.whl", hash = "sha256:99cac583d6c7e32dd28bbfbef120a7c0d1b6dde4adb5a9fd441c4227a6534bde"},
|
||||
]
|
||||
flake8-isort = [
|
||||
{file = "flake8-isort-5.0.3.tar.gz", hash = "sha256:0951398c343c67f4933407adbbfb495d4df7c038650c5d05753a006efcfeb390"},
|
||||
{file = "flake8_isort-5.0.3-py3-none-any.whl", hash = "sha256:8c4ab431d87780d0c8336e9614e50ef11201bc848ef64ca017532dec39d4bf49"},
|
||||
]
|
||||
flake8-plugin-utils = [
|
||||
{file = "flake8-plugin-utils-1.3.2.tar.gz", hash = "sha256:20fa2a8ca2decac50116edb42e6af0a1253ef639ad79941249b840531889c65a"},
|
||||
{file = "flake8_plugin_utils-1.3.2-py3-none-any.whl", hash = "sha256:1fe43e3e9acf3a7c0f6b88f5338cad37044d2f156c43cb6b080b5f9da8a76f06"},
|
||||
]
|
||||
flake8-polyfill = [
|
||||
{file = "flake8-polyfill-1.0.2.tar.gz", hash = "sha256:e44b087597f6da52ec6393a709e7108b2905317d0c0b744cdca6208e670d8eda"},
|
||||
{file = "flake8_polyfill-1.0.2-py2.py3-none-any.whl", hash = "sha256:12be6a34ee3ab795b19ca73505e7b55826d5f6ad7230d31b18e106400169b9e9"},
|
||||
]
|
||||
flake8-print = [
|
||||
{file = "flake8-print-5.0.0.tar.gz", hash = "sha256:76915a2a389cc1c0879636c219eb909c38501d3a43cc8dae542081c9ba48bdf9"},
|
||||
{file = "flake8_print-5.0.0-py3-none-any.whl", hash = "sha256:84a1a6ea10d7056b804221ac5e62b1cee1aefc897ce16f2e5c42d3046068f5d8"},
|
||||
]
|
||||
future = [
|
||||
{file = "future-0.18.2.tar.gz", hash = "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"},
|
||||
]
|
||||
@ -1896,8 +1666,8 @@ pluggy = [
|
||||
{file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"},
|
||||
]
|
||||
pre-commit = [
|
||||
{file = "pre_commit-1.21.0-py2.py3-none-any.whl", hash = "sha256:f92a359477f3252452ae2e8d3029de77aec59415c16ae4189bcfba40b757e029"},
|
||||
{file = "pre_commit-1.21.0.tar.gz", hash = "sha256:8f48d8637bdae6fa70cc97db9c1dd5aa7c5c8bf71968932a380628c25978b850"},
|
||||
{file = "pre_commit-2.20.0-py2.py3-none-any.whl", hash = "sha256:51a5ba7c480ae8072ecdb6933df22d2f812dc897d5fe848778116129a681aac7"},
|
||||
{file = "pre_commit-2.20.0.tar.gz", hash = "sha256:a978dac7bc9ec0bcee55c18a277d553b0f419d259dadb4b9418ff2d00eb43959"},
|
||||
]
|
||||
psutil = [
|
||||
{file = "psutil-5.9.4-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c1ca331af862803a42677c120aff8a814a804e09832f166f226bfd22b56feee8"},
|
||||
@ -1963,10 +1733,6 @@ pydantic = [
|
||||
{file = "pydantic-1.9.2-py3-none-any.whl", hash = "sha256:78a4d6bdfd116a559aeec9a4cfe77dda62acc6233f8b56a716edad2651023e5e"},
|
||||
{file = "pydantic-1.9.2.tar.gz", hash = "sha256:8cb0bc509bfb71305d7a59d00163d5f9fc4530f0881ea32c74ff4f74c85f3d3d"},
|
||||
]
|
||||
pydocstyle = [
|
||||
{file = "pydocstyle-6.1.1-py3-none-any.whl", hash = "sha256:6987826d6775056839940041beef5c08cc7e3d71d63149b48e36727f70144dc4"},
|
||||
{file = "pydocstyle-6.1.1.tar.gz", hash = "sha256:1d41b7c459ba0ee6c345f2eb9ae827cab14a7533a88c5c6f7e94923f72df92dc"},
|
||||
]
|
||||
pyflakes = [
|
||||
{file = "pyflakes-3.0.1-py2.py3-none-any.whl", hash = "sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf"},
|
||||
{file = "pyflakes-3.0.1.tar.gz", hash = "sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd"},
|
||||
@ -2087,6 +1853,24 @@ rich = [
|
||||
{file = "rich-10.16.2-py3-none-any.whl", hash = "sha256:c59d73bd804c90f747c8d7b1d023b88f2a9ac2454224a4aeaf959b21eeb42d03"},
|
||||
{file = "rich-10.16.2.tar.gz", hash = "sha256:720974689960e06c2efdb54327f8bf0cdbdf4eae4ad73b6c94213cad405c371b"},
|
||||
]
|
||||
ruff = [
|
||||
{file = "ruff-0.0.192-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:eac8b1447b82744aa6d64303be081227bbc9a6c3577c793f4cf8ed5c09decb71"},
|
||||
{file = "ruff-0.0.192-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:71d49353a7e8799bc879df9cc17e2f2a8664240617b25a11db517d97ed65b377"},
|
||||
{file = "ruff-0.0.192-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:113562bc3298b680df41c4a26fb55d10ed6e38432e987437302e8959a26ca8f5"},
|
||||
{file = "ruff-0.0.192-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e054e307f53af0db530eb1f8a810edd671561b512cf17954f8842bf7d786153b"},
|
||||
{file = "ruff-0.0.192-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:340ea45b619a6729e518658bbd11b3650d1de89f87e01334d36f8f22c454fe89"},
|
||||
{file = "ruff-0.0.192-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:a6b6863dcbac58666b87b840fd92a49a8791ee9c52bca2cc33e480380e6bf50d"},
|
||||
{file = "ruff-0.0.192-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8661c278c6a9dd059e1327d1f4b9ea1e749da6e26b77e567f1566d31d561868a"},
|
||||
{file = "ruff-0.0.192-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:51ffca9d10f139932479c5ca3838b5194ccef534677969e21b368daee5e13bca"},
|
||||
{file = "ruff-0.0.192-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03921ceae167be1733be9b50774627a3a5874e0d0db6d0f3d22cb7d4ec0bc50d"},
|
||||
{file = "ruff-0.0.192-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e41e8492413b17451eb5ffee07339dbbd6c7b89a6c968b4fcb7f1188505f418a"},
|
||||
{file = "ruff-0.0.192-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:de4a29092aa026a484e174acb1ddc92c752836daebd4fb6d94078986531a2f4d"},
|
||||
{file = "ruff-0.0.192-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6623a936cfc2547a9abbb888c9d5ffcae06897306b2649c4700a2f33bd08ada6"},
|
||||
{file = "ruff-0.0.192-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:6bd9410f10fa3efbf34ef45afae8064c7e8fddcc3ea0c1755f4e2d12c0a197e5"},
|
||||
{file = "ruff-0.0.192-py3-none-win32.whl", hash = "sha256:c3f79b3469b28961ea1737de1b2aa0e001c0f0d16fa17ead8fcfa7b9fbcd25d1"},
|
||||
{file = "ruff-0.0.192-py3-none-win_amd64.whl", hash = "sha256:fb60b2ecba8e59ad553ba003dd529e716e1eef5cd660f1f94466765f57d60c17"},
|
||||
{file = "ruff-0.0.192.tar.gz", hash = "sha256:a7ecadd76b938c3b05f74d4223fa7cf443563086cbdfae2189220c3be0bde648"},
|
||||
]
|
||||
scp = [
|
||||
{file = "scp-0.14.4-py2.py3-none-any.whl", hash = "sha256:29ddaafbfba60793a8a779694c97d8c150d365668a4ef67616c515b80a69ef2f"},
|
||||
{file = "scp-0.14.4.tar.gz", hash = "sha256:54699b92cb68ae34b5928c48a888eab9722a212502cba89aa795bd56597505bd"},
|
||||
@ -2107,10 +1891,6 @@ sniffio = [
|
||||
{file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"},
|
||||
{file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"},
|
||||
]
|
||||
snowballstemmer = [
|
||||
{file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"},
|
||||
{file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"},
|
||||
]
|
||||
stackprinter = [
|
||||
{file = "stackprinter-0.2.10-py3-none-any.whl", hash = "sha256:496e6cd058e7dd6f41e0c67e044f79a894297bec9fb80493a4fd094fac1e4677"},
|
||||
{file = "stackprinter-0.2.10.tar.gz", hash = "sha256:99d1ea6b91ffad96b28241edd7bcf071752b0cf694cab58d2335df5353acd086"},
|
||||
|
@ -9,10 +9,10 @@ classifiers = [
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Intended Audience :: Information Technology",
|
||||
"Operating System :: POSIX :: Linux",
|
||||
"Programming Language :: JavaScript",
|
||||
"Programming Language :: Python :: 3.6",
|
||||
"Programming Language :: Python :: 3.7",
|
||||
"Programming Language :: TypeScript",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Topic :: Internet",
|
||||
"Topic :: System :: Networking",
|
||||
]
|
||||
@ -57,24 +57,12 @@ xmltodict = "^0.12.0"
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
bandit = "^1.7.4"
|
||||
black = "^22.12.0"
|
||||
flake8 = "^6.0.0"
|
||||
flake8-bandit = "^4.1.1"
|
||||
flake8-black = "^0.3.5"
|
||||
flake8-breakpoint = "^1.1.0"
|
||||
flake8-bugbear = "^22.12.6"
|
||||
flake8-builtins = "^2.0.1"
|
||||
flake8-comprehensions = "^3.10.1"
|
||||
flake8-deprecated = "^2.0.1"
|
||||
flake8-docstrings = "^1.6.0"
|
||||
flake8-isort = "^5.0.3"
|
||||
flake8-plugin-utils = "^1.3.2"
|
||||
flake8-polyfill = "^1.0.2"
|
||||
flake8-print = "^5.0.0"
|
||||
isort = "^5.10.1"
|
||||
pep8-naming = "^0.13.2"
|
||||
pre-commit = "^1.21.0"
|
||||
pre-commit = "^2.20.0"
|
||||
pytest = "^7.2.0"
|
||||
pytest-dependency = "^0.5.1"
|
||||
ruff = "^0.0.192"
|
||||
stackprinter = "^0.2.10"
|
||||
taskipy = "^1.10.3"
|
||||
|
||||
@ -94,19 +82,20 @@ known_third_party = ["starlette", "fastapi", "inquirer"]
|
||||
length_sort = true
|
||||
line_length = 100
|
||||
multi_line_output = 3
|
||||
profile = "black"
|
||||
skip_glob = "hyperglass/api/examples/*.py"
|
||||
|
||||
[tool.pyright]
|
||||
exclude = ["**/node_modules", "**/ui", "**/__pycache__"]
|
||||
include = ["hyperglass"]
|
||||
pythonVersion = "3.8"
|
||||
pythonVersion = "3.9"
|
||||
reportMissingImports = true
|
||||
reportMissingTypeStubs = true
|
||||
|
||||
[tool.taskipy.tasks]
|
||||
check = {cmd = "task lint && task ui-lint", help = "Run all lint checks"}
|
||||
format = {cmd = "black hyperglass", help = "Run Black"}
|
||||
lint = {cmd = "flake8 hyperglass", help = "Run Flake8"}
|
||||
lint = {cmd = "ruff hyperglass", help = "Run Ruff Linter"}
|
||||
sort = {cmd = "isort hyperglass", help = "Run iSort"}
|
||||
start = {cmd = "python3 -m hyperglass.main", help = "Start hyperglass"}
|
||||
start-asgi = {cmd = "uvicorn hyperglass.api:app", help = "Start hyperglass via Uvicorn"}
|
||||
@ -118,3 +107,59 @@ ui-lint = {cmd = "yarn --cwd ./hyperglass/ui/ lint", help = "Run ESLint"}
|
||||
ui-typecheck = {cmd = "yarn --cwd ./hyperglass/ui/ typecheck", help = "Run TypeScript Check"}
|
||||
upgrade = {cmd = "python3 version.py", help = "Upgrade hyperglass version"}
|
||||
yarn = {cmd = "yarn --cwd ./hyperglass/ui/", help = "Run a yarn command from the UI directory"}
|
||||
|
||||
[tool.ruff]
|
||||
exclude = [
|
||||
".git",
|
||||
"__pycache__",
|
||||
"hyperglass/ui",
|
||||
"hyperglass/plugins/external",
|
||||
"hyperglass/api/examples/*.py",
|
||||
"hyperglass/compat/_sshtunnel.py",
|
||||
]
|
||||
ignore = [
|
||||
# "W503",
|
||||
"RET504",
|
||||
"D202", # "S403",
|
||||
# "S301",
|
||||
# "S404",
|
||||
"E731",
|
||||
"D203", # Blank line before docstring.
|
||||
"D213", # Multiline docstring summary on second line.
|
||||
"D402",
|
||||
"D406",
|
||||
"D407",
|
||||
"B008",
|
||||
"I001",
|
||||
"D418", # No docstring on overloaded functions.
|
||||
"N818", # Error suffix on custom exceptions.
|
||||
"RET501", # Explicitly return None
|
||||
]
|
||||
line-length = 100
|
||||
select = ["B", "C", "D", "E", "F", "I", "N", "S", "RET", "W"]
|
||||
|
||||
[tool.ruff.mccabe]
|
||||
max-complexity = 10
|
||||
|
||||
[tool.ruff.per-file-ignores]
|
||||
"hyperglass/main.py" = ["E402"]
|
||||
# Disable classmethod warning for validator decorat
|
||||
"hyperglass/configuration/models/*.py" = ["N805"]
|
||||
"hyperglass/defaults/*/*.py" = ["E501"]
|
||||
"hyperglass/models/*.py" = ["N805", "E501"]
|
||||
"hyperglass/models/api/*.py" = ["N805", "E501"]
|
||||
"hyperglass/models/commands/*.py" = ["N805", "E5"]
|
||||
"hyperglass/parsing/models/*.py" = ["N805"]
|
||||
# Disable unused import warning for modules
|
||||
"hyperglass/*/__init__.py" = ["F401"]
|
||||
"hyperglass/models/*/__init__.py" = ["F401"]
|
||||
# Disable assertion and docstring checks on tests.
|
||||
"hyperglass/**/test_*.py" = ["S101", "D103", "D100", "D104"]
|
||||
"hyperglass/**/tests/*.py" = ["S101", "D103", "D100", "D104"]
|
||||
"hyperglass/**/tests/__init__.py" = ["D103", "D100", "D104"]
|
||||
"hyperglass/state/hooks.py" = ["F811"]
|
||||
# Ignore whitespace in docstrings
|
||||
"hyperglass/cli/static.py" = []
|
||||
# Ignore docstring standards
|
||||
"hyperglass/cli/*.py" = ["B904"]
|
||||
"hyperglass/cli/main.py" = ["D400", "D403", "D415"]
|
||||
|
11
version.py
11
version.py
@ -77,10 +77,10 @@ class Version:
|
||||
if self._did_update:
|
||||
old, new = self.upgrade_path
|
||||
return f"Upgraded {self.name} from {old} → {new}"
|
||||
elif self._did_check:
|
||||
if self._did_check:
|
||||
return f"No update required for {self.name} from version {self.old_version}"
|
||||
else:
|
||||
return f"{self.name} has not been checked"
|
||||
|
||||
return f"{self.name} has not been checked"
|
||||
|
||||
def upgrade(self) -> None:
|
||||
"""Find a matching current version and upgrade it to the new version."""
|
||||
@ -121,7 +121,10 @@ def update_versions(new_version: str) -> None:
|
||||
"""Update hyperglass version in all package files."""
|
||||
for name, file, pattern in UPGRADES:
|
||||
with Version(
|
||||
name=name, file=file, line_pattern=pattern, new_version=new_version,
|
||||
name=name,
|
||||
file=file,
|
||||
line_pattern=pattern,
|
||||
new_version=new_version,
|
||||
) as version:
|
||||
version.upgrade()
|
||||
typer.echo(str(version))
|
||||
|
Reference in New Issue
Block a user