2019-12-29 23:57:39 -07:00
|
|
|
"""Import configuration files and returns default values if undefined."""
|
2019-07-07 02:49:54 -07:00
|
|
|
|
2020-02-03 02:35:11 -07:00
|
|
|
# Standard Library
|
|
|
|
import os
|
2020-01-26 02:18:39 -07:00
|
|
|
import copy
|
2020-01-28 09:52:54 -07:00
|
|
|
import math
|
2020-02-03 02:35:11 -07:00
|
|
|
import asyncio
|
2019-07-07 02:49:54 -07:00
|
|
|
from pathlib import Path
|
2019-06-07 18:33:49 -07:00
|
|
|
|
2020-02-03 02:35:11 -07:00
|
|
|
# Third Party
|
2019-07-10 15:57:21 -07:00
|
|
|
import yaml
|
2020-02-03 02:35:11 -07:00
|
|
|
import ujson as json
|
2020-01-20 10:17:22 -07:00
|
|
|
from aiofile import AIOFile
|
2019-07-07 02:49:54 -07:00
|
|
|
from pydantic import ValidationError
|
2019-05-26 18:46:43 -07:00
|
|
|
|
2020-02-03 02:35:11 -07:00
|
|
|
# Project
|
|
|
|
from hyperglass.util import log, check_path
|
|
|
|
from hyperglass.constants import (
|
|
|
|
CREDIT,
|
|
|
|
LOG_LEVELS,
|
|
|
|
LOG_HANDLER,
|
|
|
|
DEFAULT_HELP,
|
|
|
|
DEFAULT_TERMS,
|
|
|
|
DEFAULT_DETAILS,
|
|
|
|
LOG_HANDLER_FILE,
|
|
|
|
SUPPORTED_QUERY_TYPES,
|
|
|
|
)
|
|
|
|
from hyperglass.exceptions import ConfigError, ConfigInvalid, ConfigMissing
|
2019-10-09 03:10:52 -07:00
|
|
|
from hyperglass.configuration.models import params as _params
|
|
|
|
from hyperglass.configuration.models import routers as _routers
|
2020-02-03 02:35:11 -07:00
|
|
|
from hyperglass.configuration.models import commands as _commands
|
|
|
|
from hyperglass.configuration.markdown import get_markdown
|
2019-10-04 17:17:08 -07:00
|
|
|
|
2020-02-14 16:30:40 -07:00
|
|
|
CONFIG_PATH = Path(os.environ["hyperglass_directory"])
|
|
|
|
log.info("Configuration directory: {d}", d=str(CONFIG_PATH))
|
|
|
|
|
2019-05-26 18:46:43 -07:00
|
|
|
# Project Directories
|
2020-01-28 14:11:34 -07:00
|
|
|
WORKING_DIR = Path(__file__).resolve().parent
|
|
|
|
CONFIG_FILES = (
|
|
|
|
("hyperglass.yaml", False),
|
|
|
|
("devices.yaml", True),
|
|
|
|
("commands.yaml", False),
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
async def _check_config_files(directory):
|
|
|
|
"""Verify config files exist and are readable.
|
|
|
|
|
|
|
|
Arguments:
|
|
|
|
directory {Path} -- Config directory Path object
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
ConfigMissing: Raised if a required config file does not pass checks.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
{tuple} -- main config, devices config, commands config
|
|
|
|
"""
|
|
|
|
files = ()
|
|
|
|
for file in CONFIG_FILES:
|
|
|
|
file_name, required = file
|
|
|
|
file_path = directory / file_name
|
|
|
|
|
|
|
|
checked = await check_path(file_path)
|
|
|
|
|
|
|
|
if checked is None and required:
|
|
|
|
raise ConfigMissing(missing_item=str(file_path))
|
|
|
|
|
|
|
|
if checked is None and not required:
|
|
|
|
log.warning(
|
|
|
|
"'{f}' was not found, but is not required to run hyperglass. "
|
|
|
|
+ "Defaults will be used.",
|
|
|
|
f=str(file_path),
|
|
|
|
)
|
|
|
|
files += (file_path,)
|
2019-07-07 02:49:54 -07:00
|
|
|
|
2020-01-28 14:11:34 -07:00
|
|
|
return files
|
|
|
|
|
|
|
|
|
2020-02-14 16:30:40 -07:00
|
|
|
STATIC_PATH = CONFIG_PATH / "static"
|
2020-01-28 14:11:34 -07:00
|
|
|
|
|
|
|
CONFIG_MAIN, CONFIG_DEVICES, CONFIG_COMMANDS = asyncio.run(
|
|
|
|
_check_config_files(CONFIG_PATH)
|
|
|
|
)
|
2019-09-03 00:42:22 -07:00
|
|
|
|
2019-12-31 11:08:30 -07:00
|
|
|
|
2020-01-03 03:03:57 -07:00
|
|
|
def _set_log_level(debug, log_file=None):
|
2019-12-31 11:08:30 -07:00
|
|
|
"""Set log level based on debug state.
|
|
|
|
|
|
|
|
Arguments:
|
|
|
|
debug {bool} -- Debug state from config file
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
{bool} -- True
|
|
|
|
"""
|
2020-01-03 03:03:57 -07:00
|
|
|
stdout_handler = LOG_HANDLER.copy()
|
|
|
|
file_handler = LOG_HANDLER_FILE.copy()
|
|
|
|
|
2019-12-31 11:08:30 -07:00
|
|
|
if debug:
|
|
|
|
log_level = "DEBUG"
|
2020-01-03 03:03:57 -07:00
|
|
|
stdout_handler["level"] = log_level
|
|
|
|
file_handler["level"] = log_level
|
|
|
|
|
|
|
|
if log_file is not None:
|
|
|
|
file_handler.update({"sink": log_file})
|
|
|
|
log_handlers = [stdout_handler, file_handler]
|
|
|
|
else:
|
|
|
|
log_handlers = [stdout_handler]
|
|
|
|
|
|
|
|
log.remove()
|
|
|
|
log.configure(handlers=log_handlers, levels=LOG_LEVELS)
|
|
|
|
if debug:
|
|
|
|
log.debug("Debugging enabled")
|
2019-12-31 11:08:30 -07:00
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
async def _config_main():
|
|
|
|
"""Open main config file and load YAML to dict.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
{dict} -- Main config file
|
|
|
|
"""
|
2020-01-28 14:11:34 -07:00
|
|
|
config = {}
|
2019-12-31 11:08:30 -07:00
|
|
|
try:
|
2020-01-28 14:11:34 -07:00
|
|
|
async with AIOFile(CONFIG_MAIN, "r") as cf:
|
2019-12-31 11:08:30 -07:00
|
|
|
raw = await cf.read()
|
|
|
|
config = yaml.safe_load(raw)
|
|
|
|
except (yaml.YAMLError, yaml.MarkedYAMLError) as yaml_error:
|
|
|
|
raise ConfigError(error_msg=str(yaml_error)) from None
|
|
|
|
return config
|
|
|
|
|
|
|
|
|
|
|
|
async def _config_commands():
|
|
|
|
"""Open commands config file and load YAML to dict.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
{dict} -- Commands config file
|
|
|
|
"""
|
2020-01-28 14:11:34 -07:00
|
|
|
if CONFIG_COMMANDS is None:
|
|
|
|
config = {}
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
async with AIOFile(CONFIG_COMMANDS, "r") as cf:
|
|
|
|
raw = await cf.read()
|
|
|
|
config = yaml.safe_load(raw) or {}
|
2020-02-14 16:30:40 -07:00
|
|
|
log.debug("Unvalidated commands: {c}", c=config)
|
2020-01-28 14:11:34 -07:00
|
|
|
except (yaml.YAMLError, yaml.MarkedYAMLError) as yaml_error:
|
|
|
|
raise ConfigError(error_msg=str(yaml_error)) from None
|
2019-12-31 11:08:30 -07:00
|
|
|
return config
|
|
|
|
|
|
|
|
|
|
|
|
async def _config_devices():
|
|
|
|
"""Open devices config file and load YAML to dict.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
{dict} -- Devices config file
|
|
|
|
"""
|
|
|
|
try:
|
2020-01-28 14:11:34 -07:00
|
|
|
async with AIOFile(CONFIG_DEVICES, "r") as cf:
|
2019-12-31 11:08:30 -07:00
|
|
|
raw = await cf.read()
|
|
|
|
config = yaml.safe_load(raw)
|
2020-02-14 16:30:40 -07:00
|
|
|
log.debug("Unvalidated device config: {c}", c=config)
|
2019-12-31 11:08:30 -07:00
|
|
|
except (yaml.YAMLError, yaml.MarkedYAMLError) as yaml_error:
|
|
|
|
raise ConfigError(error_msg=str(yaml_error)) from None
|
|
|
|
return config
|
|
|
|
|
|
|
|
|
|
|
|
user_config = asyncio.run(_config_main())
|
|
|
|
|
|
|
|
# Logging Config
|
2019-07-07 02:49:54 -07:00
|
|
|
try:
|
2020-02-14 16:30:40 -07:00
|
|
|
_debug = user_config["debug"]
|
2019-12-31 11:08:30 -07:00
|
|
|
except KeyError:
|
|
|
|
_debug = True
|
|
|
|
|
2020-01-28 14:11:34 -07:00
|
|
|
# Read raw debug value from config to enable debugging quickly.
|
2019-12-31 11:08:30 -07:00
|
|
|
_set_log_level(_debug)
|
|
|
|
|
2020-01-28 14:11:34 -07:00
|
|
|
_user_commands = asyncio.run(_config_commands())
|
|
|
|
_user_devices = asyncio.run(_config_devices())
|
2019-07-07 02:49:54 -07:00
|
|
|
|
|
|
|
# Map imported user config files to expected schema:
|
|
|
|
try:
|
2020-01-28 14:11:34 -07:00
|
|
|
params = _params.Params(**user_config)
|
|
|
|
commands = _commands.Commands.import_params(_user_commands)
|
|
|
|
devices = _routers.Routers._import(_user_devices.get("routers", {}))
|
2019-07-07 02:49:54 -07:00
|
|
|
except ValidationError as validation_errors:
|
|
|
|
errors = validation_errors.errors()
|
2020-01-05 00:34:44 -07:00
|
|
|
log.error(errors)
|
2019-07-07 02:49:54 -07:00
|
|
|
for error in errors:
|
2019-08-25 23:22:20 -07:00
|
|
|
raise ConfigInvalid(
|
|
|
|
field=": ".join([str(item) for item in error["loc"]]),
|
|
|
|
error_msg=error["msg"],
|
2019-12-31 18:29:43 -07:00
|
|
|
)
|
2019-09-09 23:05:10 -07:00
|
|
|
|
2020-01-28 09:52:54 -07:00
|
|
|
"""
|
|
|
|
Perform post-config initialization string formatting or other
|
|
|
|
functions that require access to other config levels. E.g.,
|
|
|
|
something in 'params.web.text' needs to be formatted with a value
|
|
|
|
from params.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
params.web.text.subtitle = params.web.text.subtitle.format(
|
2020-01-28 10:16:18 -07:00
|
|
|
**params.dict(exclude={"web", "queries", "messages"})
|
2020-01-28 09:52:54 -07:00
|
|
|
)
|
|
|
|
if params.cache.timeout >= 60:
|
|
|
|
_cache_timeout = math.ceil(params.cache.timeout / 60)
|
|
|
|
_cache_period = "minutes"
|
|
|
|
elif params.cache.timeout < 60:
|
|
|
|
_cache_timeout = params.cache.timeout
|
|
|
|
_cache_period = "seconds"
|
|
|
|
params.web.text.cache = params.web.text.cache.format(
|
|
|
|
timeout=_cache_timeout, period=_cache_period
|
|
|
|
)
|
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2019-12-31 11:08:30 -07:00
|
|
|
# Re-evaluate debug state after config is validated
|
2020-01-28 08:59:27 -07:00
|
|
|
_set_log_level(params.debug, params.log_file)
|
2019-12-29 23:57:39 -07:00
|
|
|
|
2019-07-29 22:13:11 -07:00
|
|
|
|
2019-12-31 11:08:30 -07:00
|
|
|
def _build_frontend_networks():
|
|
|
|
"""Build filtered JSON structure of networks for frontend.
|
2019-07-29 22:13:11 -07:00
|
|
|
|
2019-12-31 11:08:30 -07:00
|
|
|
Schema:
|
2019-10-09 03:10:52 -07:00
|
|
|
{
|
|
|
|
"device.network.display_name": {
|
|
|
|
"device.name": {
|
|
|
|
"display_name": "device.display_name",
|
|
|
|
"vrfs": [
|
|
|
|
"Global",
|
|
|
|
"vrf.display_name"
|
|
|
|
]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-12-31 11:08:30 -07:00
|
|
|
|
|
|
|
Raises:
|
|
|
|
ConfigError: Raised if parsing/building error occurs.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
{dict} -- Frontend networks
|
2019-10-09 03:10:52 -07:00
|
|
|
"""
|
|
|
|
frontend_dict = {}
|
|
|
|
for device in devices.routers:
|
|
|
|
if device.network.display_name in frontend_dict:
|
|
|
|
frontend_dict[device.network.display_name].update(
|
|
|
|
{
|
|
|
|
device.name: {
|
|
|
|
"display_name": device.network.display_name,
|
|
|
|
"vrfs": [vrf.display_name for vrf in device.vrfs],
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
|
|
|
elif device.network.display_name not in frontend_dict:
|
|
|
|
frontend_dict[device.network.display_name] = {
|
|
|
|
device.name: {
|
|
|
|
"display_name": device.network.display_name,
|
|
|
|
"vrfs": [vrf.display_name for vrf in device.vrfs],
|
|
|
|
}
|
|
|
|
}
|
|
|
|
frontend_dict["default_vrf"] = devices.default_vrf
|
|
|
|
if not frontend_dict:
|
|
|
|
raise ConfigError(error_msg="Unable to build network to device mapping")
|
|
|
|
return frontend_dict
|
|
|
|
|
|
|
|
|
2019-12-31 11:08:30 -07:00
|
|
|
def _build_frontend_devices():
|
|
|
|
"""Build filtered JSON structure of devices for frontend.
|
|
|
|
|
|
|
|
Schema:
|
2019-10-09 03:10:52 -07:00
|
|
|
{
|
|
|
|
"device.name": {
|
|
|
|
"display_name": "device.display_name",
|
|
|
|
"vrfs": [
|
|
|
|
"Global",
|
|
|
|
"vrf.display_name"
|
|
|
|
]
|
|
|
|
}
|
|
|
|
}
|
2019-12-31 11:08:30 -07:00
|
|
|
|
|
|
|
Raises:
|
|
|
|
ConfigError: Raised if parsing/building error occurs.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
{dict} -- Frontend devices
|
2019-10-09 03:10:52 -07:00
|
|
|
"""
|
|
|
|
frontend_dict = {}
|
|
|
|
for device in devices.routers:
|
|
|
|
if device.name in frontend_dict:
|
|
|
|
frontend_dict[device.name].update(
|
|
|
|
{
|
|
|
|
"network": device.network.display_name,
|
|
|
|
"display_name": device.display_name,
|
2020-01-16 02:51:10 -07:00
|
|
|
"vrfs": [
|
|
|
|
{"id": vrf.name, "display_name": vrf.display_name}
|
|
|
|
for vrf in device.vrfs
|
|
|
|
],
|
2019-10-09 03:10:52 -07:00
|
|
|
}
|
|
|
|
)
|
|
|
|
elif device.name not in frontend_dict:
|
|
|
|
frontend_dict[device.name] = {
|
|
|
|
"network": device.network.display_name,
|
|
|
|
"display_name": device.display_name,
|
2020-01-16 02:51:10 -07:00
|
|
|
"vrfs": [
|
|
|
|
{"id": vrf.name, "display_name": vrf.display_name}
|
|
|
|
for vrf in device.vrfs
|
|
|
|
],
|
2019-10-09 03:10:52 -07:00
|
|
|
}
|
|
|
|
if not frontend_dict:
|
|
|
|
raise ConfigError(error_msg="Unable to build network to device mapping")
|
|
|
|
return frontend_dict
|
|
|
|
|
|
|
|
|
2019-12-31 11:08:30 -07:00
|
|
|
def _build_networks():
|
|
|
|
"""Build filtered JSON Structure of networks & devices for Jinja templates.
|
|
|
|
|
|
|
|
Raises:
|
2019-12-31 18:29:43 -07:00
|
|
|
ConfigError: Raised if parsing/building error occurs.
|
2019-12-31 11:08:30 -07:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
{dict} -- Networks & devices
|
|
|
|
"""
|
2020-01-16 02:51:10 -07:00
|
|
|
networks = []
|
|
|
|
_networks = list(set({device.network.display_name for device in devices.routers}))
|
|
|
|
|
|
|
|
for _network in _networks:
|
|
|
|
network_def = {"display_name": _network, "locations": []}
|
|
|
|
for device in devices.routers:
|
|
|
|
if device.network.display_name == _network:
|
|
|
|
network_def["locations"].append(
|
|
|
|
{
|
|
|
|
"name": device.name,
|
|
|
|
"display_name": device.display_name,
|
|
|
|
"network": device.network.display_name,
|
|
|
|
"vrfs": [
|
|
|
|
{"id": vrf.name, "display_name": vrf.display_name}
|
|
|
|
for vrf in device.vrfs
|
|
|
|
],
|
|
|
|
}
|
|
|
|
)
|
|
|
|
networks.append(network_def)
|
|
|
|
|
|
|
|
if not networks:
|
2019-10-09 03:10:52 -07:00
|
|
|
raise ConfigError(error_msg="Unable to build network to device mapping")
|
2020-01-16 02:51:10 -07:00
|
|
|
return networks
|
2019-10-09 03:10:52 -07:00
|
|
|
|
|
|
|
|
2020-01-16 02:51:10 -07:00
|
|
|
def _build_vrfs():
|
|
|
|
vrfs = []
|
|
|
|
for device in devices.routers:
|
|
|
|
for vrf in device.vrfs:
|
|
|
|
vrf_dict = {"id": vrf.name, "display_name": vrf.display_name}
|
|
|
|
if vrf_dict not in vrfs:
|
|
|
|
vrfs.append(vrf_dict)
|
|
|
|
return vrfs
|
|
|
|
|
|
|
|
|
2020-01-17 02:50:57 -07:00
|
|
|
content_params = json.loads(
|
2020-01-28 08:59:27 -07:00
|
|
|
params.json(include={"primary_asn", "org_name", "site_title", "site_description"})
|
2020-01-17 02:50:57 -07:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def _build_vrf_help():
|
|
|
|
"""Build a dict of vrfs as keys, help content as values.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
{dict} -- Formatted VRF help
|
|
|
|
"""
|
|
|
|
all_help = {}
|
|
|
|
for vrf in devices.vrf_objects:
|
2020-01-26 02:18:39 -07:00
|
|
|
|
2020-01-17 02:50:57 -07:00
|
|
|
vrf_help = {}
|
2020-01-26 02:18:39 -07:00
|
|
|
for command in SUPPORTED_QUERY_TYPES:
|
2020-01-17 02:50:57 -07:00
|
|
|
cmd = getattr(vrf.info, command)
|
2020-01-26 02:18:39 -07:00
|
|
|
help_params = {**content_params, **cmd.params.dict()}
|
|
|
|
|
|
|
|
if help_params["title"] is None:
|
2020-01-28 10:16:18 -07:00
|
|
|
command_params = getattr(params.queries, command)
|
2020-01-26 02:18:39 -07:00
|
|
|
help_params[
|
|
|
|
"title"
|
|
|
|
] = f"{vrf.display_name}: {command_params.display_name}"
|
|
|
|
|
2020-01-17 02:50:57 -07:00
|
|
|
md = asyncio.run(
|
|
|
|
get_markdown(
|
|
|
|
config_path=cmd,
|
|
|
|
default=DEFAULT_DETAILS[command],
|
|
|
|
params=help_params,
|
|
|
|
)
|
|
|
|
)
|
2020-01-26 02:18:39 -07:00
|
|
|
|
2020-01-17 02:50:57 -07:00
|
|
|
vrf_help.update(
|
|
|
|
{command: {"content": md, "enable": cmd.enable, "params": help_params}}
|
|
|
|
)
|
2020-01-26 02:18:39 -07:00
|
|
|
|
2020-01-17 02:50:57 -07:00
|
|
|
all_help.update({vrf.name: vrf_help})
|
2020-01-26 02:18:39 -07:00
|
|
|
|
2020-01-17 02:50:57 -07:00
|
|
|
return all_help
|
|
|
|
|
|
|
|
|
|
|
|
content_vrf = _build_vrf_help()
|
|
|
|
|
2020-01-26 02:18:39 -07:00
|
|
|
content_help_params = copy.copy(content_params)
|
2020-01-28 09:14:47 -07:00
|
|
|
content_help_params["title"] = params.web.help_menu.title
|
2020-01-17 02:50:57 -07:00
|
|
|
content_help = asyncio.run(
|
|
|
|
get_markdown(
|
2020-01-28 09:14:47 -07:00
|
|
|
config_path=params.web.help_menu,
|
2020-01-17 02:50:57 -07:00
|
|
|
default=DEFAULT_HELP,
|
2020-01-26 02:18:39 -07:00
|
|
|
params=content_help_params,
|
2020-01-17 02:50:57 -07:00
|
|
|
)
|
|
|
|
)
|
2020-01-26 02:18:39 -07:00
|
|
|
|
|
|
|
content_terms_params = copy.copy(content_params)
|
2020-01-28 09:14:47 -07:00
|
|
|
content_terms_params["title"] = params.web.terms.title
|
2020-01-17 02:50:57 -07:00
|
|
|
content_terms = asyncio.run(
|
|
|
|
get_markdown(
|
2020-01-28 09:14:47 -07:00
|
|
|
config_path=params.web.terms, default=DEFAULT_TERMS, params=content_terms_params
|
2020-01-17 02:50:57 -07:00
|
|
|
)
|
|
|
|
)
|
|
|
|
content_credit = CREDIT
|
|
|
|
|
2020-01-16 02:51:10 -07:00
|
|
|
vrfs = _build_vrfs()
|
2019-12-31 11:08:30 -07:00
|
|
|
networks = _build_networks()
|
|
|
|
frontend_networks = _build_frontend_networks()
|
|
|
|
frontend_devices = _build_frontend_devices()
|
2020-01-16 02:51:10 -07:00
|
|
|
_frontend_fields = {
|
2020-01-28 09:52:54 -07:00
|
|
|
"debug": ...,
|
2020-02-14 16:30:40 -07:00
|
|
|
"developer_mode": ...,
|
2020-01-28 09:52:54 -07:00
|
|
|
"primary_asn": ...,
|
|
|
|
"request_timeout": ...,
|
|
|
|
"org_name": ...,
|
|
|
|
"google_analytics": ...,
|
|
|
|
"site_description": ...,
|
|
|
|
"web": ...,
|
2020-01-16 02:51:10 -07:00
|
|
|
"messages": ...,
|
|
|
|
}
|
|
|
|
_frontend_params = params.dict(include=_frontend_fields)
|
|
|
|
_frontend_params.update(
|
|
|
|
{
|
2020-01-28 12:03:47 -07:00
|
|
|
"queries": {**params.queries.map, "list": params.queries.list},
|
2020-01-16 02:51:10 -07:00
|
|
|
"devices": frontend_devices,
|
|
|
|
"networks": networks,
|
|
|
|
"vrfs": vrfs,
|
2020-01-17 02:50:57 -07:00
|
|
|
"content": {
|
|
|
|
"help_menu": content_help,
|
|
|
|
"terms": content_terms,
|
|
|
|
"credit": content_credit,
|
|
|
|
"vrf": content_vrf,
|
|
|
|
},
|
2020-01-16 02:51:10 -07:00
|
|
|
}
|
|
|
|
)
|
|
|
|
frontend_params = _frontend_params
|
2020-01-21 17:32:31 -07:00
|
|
|
|
2020-02-14 16:30:40 -07:00
|
|
|
URL_DEV = f"http://localhost:{str(params.listen_port)}/"
|
2020-01-21 17:32:31 -07:00
|
|
|
URL_PROD = "/api/"
|
|
|
|
|
|
|
|
REDIS_CONFIG = {
|
2020-01-28 09:52:54 -07:00
|
|
|
"host": str(params.cache.host),
|
|
|
|
"port": params.cache.port,
|
2020-01-21 17:32:31 -07:00
|
|
|
"decode_responses": True,
|
|
|
|
}
|