2021-10-15 03:25:38 -05:00
|
|
|
"""
|
|
|
|
Regen the api cache files.
|
|
|
|
"""
|
2021-07-10 10:12:35 -05:00
|
|
|
import datetime
|
2018-11-08 19:45:21 +00:00
|
|
|
import os
|
2023-03-15 10:47:00 -05:00
|
|
|
import shutil
|
|
|
|
import tempfile
|
2021-07-10 10:12:35 -05:00
|
|
|
import time
|
2018-11-08 19:45:21 +00:00
|
|
|
import traceback
|
2021-07-10 10:12:35 -05:00
|
|
|
|
|
|
|
from django.conf import settings
|
2023-09-12 20:58:14 -05:00
|
|
|
from django.contrib.auth.models import AnonymousUser
|
2021-07-10 10:12:35 -05:00
|
|
|
from django.core.management.base import BaseCommand
|
|
|
|
from rest_framework.test import APIRequestFactory
|
|
|
|
|
2018-11-08 19:45:21 +00:00
|
|
|
import peeringdb_server.models as pdbm
|
|
|
|
import peeringdb_server.rest as pdbr
|
2023-10-24 20:17:03 +03:00
|
|
|
from peeringdb_server.export_kmz import fac_export_kmz
|
2018-11-08 19:45:21 +00:00
|
|
|
from peeringdb_server.renderers import MetaJSONRenderer
|
|
|
|
|
|
|
|
MODELS = [
|
2019-12-05 16:57:52 +00:00
|
|
|
pdbm.Organization,
|
|
|
|
pdbm.Network,
|
|
|
|
pdbm.InternetExchange,
|
|
|
|
pdbm.Facility,
|
|
|
|
pdbm.NetworkContact,
|
|
|
|
pdbm.NetworkFacility,
|
|
|
|
pdbm.IXLan,
|
|
|
|
pdbm.IXLanPrefix,
|
|
|
|
pdbm.NetworkIXLan,
|
2018-11-08 19:45:21 +00:00
|
|
|
]
|
|
|
|
|
|
|
|
VIEWSETS = {
|
|
|
|
"org": pdbr.OrganizationViewSet,
|
|
|
|
"net": pdbr.NetworkViewSet,
|
|
|
|
"ix": pdbr.InternetExchangeViewSet,
|
|
|
|
"fac": pdbr.FacilityViewSet,
|
2023-01-18 18:32:46 +02:00
|
|
|
"carrier": pdbr.CarrierViewSet,
|
2018-11-08 19:45:21 +00:00
|
|
|
"ixlan": pdbr.IXLanViewSet,
|
|
|
|
"ixfac": pdbr.InternetExchangeFacilityViewSet,
|
|
|
|
"ixpfx": pdbr.IXLanPrefixViewSet,
|
|
|
|
"netfac": pdbr.NetworkFacilityViewSet,
|
|
|
|
"netixlan": pdbr.NetworkIXLanViewSet,
|
2019-12-05 16:57:52 +00:00
|
|
|
"poc": pdbr.NetworkContactViewSet,
|
2023-01-18 18:32:46 +02:00
|
|
|
"carrierfac": pdbr.CarrierFacilityViewSet,
|
2023-02-15 09:55:01 +02:00
|
|
|
"campus": pdbr.CampusViewSet,
|
2018-11-08 19:45:21 +00:00
|
|
|
}
|
|
|
|
|
2023-03-15 10:47:00 -05:00
|
|
|
MONODEPTH = {
|
|
|
|
"carrierfac",
|
|
|
|
"fac",
|
|
|
|
"ixfac",
|
|
|
|
"ixpfx",
|
|
|
|
"netfac",
|
|
|
|
"netixlan",
|
|
|
|
"poc",
|
|
|
|
}
|
|
|
|
|
2018-11-08 19:45:21 +00:00
|
|
|
settings.DEBUG = False
|
|
|
|
|
|
|
|
|
|
|
|
class Command(BaseCommand):
|
|
|
|
help = "Regen the api cache files"
|
|
|
|
|
|
|
|
def add_arguments(self, parser):
|
|
|
|
parser.add_argument(
|
2019-12-05 16:57:52 +00:00
|
|
|
"--only", action="store", default=False, help="only run specified type"
|
|
|
|
)
|
2023-10-24 20:17:03 +03:00
|
|
|
parser.add_argument(
|
|
|
|
"--gen-kmz",
|
|
|
|
action="store_true",
|
|
|
|
help="will generate kmz file in from the api-cache data",
|
|
|
|
)
|
2019-12-05 16:57:52 +00:00
|
|
|
parser.add_argument(
|
|
|
|
"--date",
|
|
|
|
action="store",
|
|
|
|
default=None,
|
|
|
|
help="generate cache for objects create before or at the specified date (YYYYMMDD)",
|
2018-11-08 19:45:21 +00:00
|
|
|
)
|
2023-03-15 10:47:00 -05:00
|
|
|
parser.add_argument(
|
|
|
|
"--depths",
|
|
|
|
action="store",
|
|
|
|
default="0,1,2,3",
|
|
|
|
help="comma separated list of depths to generate",
|
|
|
|
)
|
2023-09-12 20:58:14 -05:00
|
|
|
parser.add_argument(
|
|
|
|
"--output-dir",
|
|
|
|
action="store",
|
|
|
|
default=settings.API_CACHE_ROOT,
|
|
|
|
help=f"output files to this directory (default: {settings.API_CACHE_ROOT})",
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--public-data",
|
|
|
|
action="store_true",
|
|
|
|
default=False,
|
|
|
|
help="dump public data only as anonymous user",
|
|
|
|
)
|
2018-11-08 19:45:21 +00:00
|
|
|
|
|
|
|
def log(self, id, msg):
|
|
|
|
if self.log_file:
|
2020-09-30 01:13:38 +00:00
|
|
|
self.log_file.write(f"{id}: {msg}")
|
2018-11-08 19:45:21 +00:00
|
|
|
self.log_file.flush()
|
2020-09-30 01:13:38 +00:00
|
|
|
print(f"{id}: {msg}")
|
2018-11-08 19:45:21 +00:00
|
|
|
|
|
|
|
def row_datetime(self, row, field="created"):
|
|
|
|
return datetime.datetime.strptime(row.get(field), "%Y-%m-%dT%H:%M:%SZ")
|
|
|
|
|
|
|
|
def handle(self, *args, **options):
|
2019-12-05 16:57:52 +00:00
|
|
|
only = options.get("only", None)
|
|
|
|
date = options.get("date", None)
|
2023-09-12 20:58:14 -05:00
|
|
|
output_dir = options.get("output_dir")
|
2023-03-15 10:47:00 -05:00
|
|
|
depths = list(map(int, options.get("depths").split(",")))
|
2018-11-08 19:45:21 +00:00
|
|
|
|
2023-09-12 20:58:14 -05:00
|
|
|
if options.get("public_data"):
|
|
|
|
request_user = AnonymousUser()
|
|
|
|
|
|
|
|
else:
|
|
|
|
request_user = pdbm.User.objects.filter(is_superuser=True).first()
|
|
|
|
# temporary setting to indicate api-cache is being generated
|
|
|
|
# this forced api responses to be generated without permission
|
|
|
|
# checks
|
|
|
|
settings.GENERATING_API_CACHE = True
|
2021-01-13 20:35:07 +00:00
|
|
|
|
2018-11-08 19:45:21 +00:00
|
|
|
if only:
|
|
|
|
only = only.split(",")
|
|
|
|
|
|
|
|
if date:
|
2023-10-24 20:17:03 +03:00
|
|
|
last_updated = datetime.datetime.strptime(date, "%Y%m%d")
|
2018-11-08 19:45:21 +00:00
|
|
|
else:
|
2023-10-24 20:17:03 +03:00
|
|
|
last_updated = datetime.datetime.now()
|
|
|
|
|
|
|
|
meta = {"generated": last_updated.timestamp()}
|
2018-11-08 19:45:21 +00:00
|
|
|
self.log_file = open(settings.API_CACHE_LOG, "w+")
|
2023-09-12 20:58:14 -05:00
|
|
|
self.log("info", f"Regnerating cache files to '{output_dir}'")
|
2023-03-15 10:47:00 -05:00
|
|
|
self.log(
|
|
|
|
"info",
|
2023-10-24 20:17:03 +03:00
|
|
|
f"Caching depths {depths} for timestamp: {last_updated}",
|
2023-03-15 10:47:00 -05:00
|
|
|
)
|
2023-09-12 20:58:14 -05:00
|
|
|
request_factory = APIRequestFactory()
|
2018-11-08 19:45:21 +00:00
|
|
|
renderer = MetaJSONRenderer()
|
|
|
|
|
|
|
|
settings.API_DEPTH_ROW_LIMIT = 0
|
|
|
|
|
2022-06-15 15:23:26 +03:00
|
|
|
# will be using RequestFactory to spawn requests to generate api-cache
|
|
|
|
# CSRF_USE_SESSIONS needs to be disabled as these are not session-enabled requests
|
|
|
|
|
|
|
|
settings.CSRF_USE_SESSIONS = False
|
|
|
|
|
2023-09-12 20:58:14 -05:00
|
|
|
start_time = time.time()
|
|
|
|
|
2018-11-08 19:45:21 +00:00
|
|
|
try:
|
|
|
|
cache = {}
|
2023-03-15 10:47:00 -05:00
|
|
|
# make a temp dir to create the cache files for an atomic swap
|
|
|
|
tmpdir = tempfile.TemporaryDirectory()
|
2018-11-08 19:45:21 +00:00
|
|
|
|
2020-01-08 13:29:58 -06:00
|
|
|
for tag, viewset in list(VIEWSETS.items()):
|
2018-11-08 19:45:21 +00:00
|
|
|
if only and tag not in only:
|
|
|
|
continue
|
|
|
|
|
2023-03-15 10:47:00 -05:00
|
|
|
for depth in depths:
|
|
|
|
if depth >= 1 and tag in MONODEPTH:
|
|
|
|
break
|
|
|
|
|
2023-10-24 20:17:03 +03:00
|
|
|
self.log(tag, f"generating depth {depth} to {tmpdir.name}...")
|
2018-11-08 19:45:21 +00:00
|
|
|
if depth:
|
2023-09-12 20:58:14 -05:00
|
|
|
request = request_factory.get(
|
2023-10-24 20:17:03 +03:00
|
|
|
f"/api/{tag}?depth={depth}&updated__lte={last_updated}Z&_ctf"
|
2019-12-05 16:57:52 +00:00
|
|
|
)
|
2018-11-08 19:45:21 +00:00
|
|
|
else:
|
2023-09-12 20:58:14 -05:00
|
|
|
request = request_factory.get(
|
2023-10-24 20:17:03 +03:00
|
|
|
f"/api/{tag}?updated__lte={last_updated}Z&_ctf"
|
2023-09-12 20:58:14 -05:00
|
|
|
)
|
|
|
|
request.user = request_user
|
2019-12-05 16:57:52 +00:00
|
|
|
vs = viewset.as_view({"get": "list"})
|
2023-09-12 20:58:14 -05:00
|
|
|
response = vs(request)
|
2023-03-15 10:47:00 -05:00
|
|
|
|
|
|
|
id = f"{tag}-{depth}"
|
|
|
|
file_name = os.path.join(tmpdir.name, f"{tag}-{depth}.json")
|
|
|
|
cache[id] = file_name
|
|
|
|
renderer.render(
|
2023-09-12 20:58:14 -05:00
|
|
|
response.data,
|
|
|
|
renderer_context={"response": response},
|
2023-03-15 10:47:00 -05:00
|
|
|
file_name=file_name,
|
2023-10-24 20:17:03 +03:00
|
|
|
default_meta=meta,
|
2019-12-05 16:57:52 +00:00
|
|
|
)
|
2023-03-15 10:47:00 -05:00
|
|
|
|
2023-09-12 20:58:14 -05:00
|
|
|
del response
|
2018-11-08 19:45:21 +00:00
|
|
|
del vs
|
|
|
|
|
2023-03-15 10:47:00 -05:00
|
|
|
# move the tmp files to the cache dir
|
|
|
|
for id, src_file in list(cache.items()):
|
2023-09-12 20:58:14 -05:00
|
|
|
print(f"output_dir: {output_dir}")
|
|
|
|
file_name = os.path.join(output_dir, "%s.json" % (id))
|
2023-03-15 10:47:00 -05:00
|
|
|
shutil.move(src_file, file_name)
|
|
|
|
|
|
|
|
# copy the monodepth files to the other depths
|
|
|
|
for tag in MONODEPTH:
|
|
|
|
if only and tag not in only:
|
|
|
|
continue
|
|
|
|
|
|
|
|
for depth in [1, 2, 3]:
|
|
|
|
id = f"{tag}-{depth}"
|
2023-09-12 20:58:14 -05:00
|
|
|
src_file = os.path.join(output_dir, f"{tag}-0.json")
|
|
|
|
file_name = os.path.join(output_dir, f"{id}.json")
|
2023-03-15 10:47:00 -05:00
|
|
|
self.log("info", f"copying {src_file} to {file_name}")
|
|
|
|
shutil.copyfile(src_file, file_name)
|
2018-11-08 19:45:21 +00:00
|
|
|
|
|
|
|
except Exception:
|
|
|
|
self.log("error", traceback.format_exc())
|
|
|
|
raise
|
|
|
|
|
2023-03-15 10:47:00 -05:00
|
|
|
finally:
|
|
|
|
tmpdir.cleanup()
|
|
|
|
self.log_file.close()
|
|
|
|
|
2023-10-24 20:17:03 +03:00
|
|
|
if options.get("gen_kmz"):
|
|
|
|
print("Generating kmz file")
|
2023-11-28 16:21:06 +02:00
|
|
|
fac_export_kmz(path=output_dir)
|
2023-10-24 20:17:03 +03:00
|
|
|
|
2023-09-12 20:58:14 -05:00
|
|
|
end_time = time.time()
|
2018-11-08 19:45:21 +00:00
|
|
|
|
2023-09-12 20:58:14 -05:00
|
|
|
print("Finished after %.2f seconds" % (end_time - start_time))
|