2018-11-08 19:45:21 +00:00
|
|
|
from django.core.management.base import BaseCommand
|
|
|
|
import os
|
|
|
|
import traceback
|
|
|
|
import peeringdb_server.models as pdbm
|
|
|
|
import peeringdb_server.rest as pdbr
|
|
|
|
import datetime
|
|
|
|
import time
|
|
|
|
from peeringdb_server.renderers import MetaJSONRenderer
|
|
|
|
from django.conf import settings
|
|
|
|
from optparse import make_option
|
|
|
|
from rest_framework.test import APIRequestFactory
|
|
|
|
|
|
|
|
MODELS = [
|
2019-12-05 16:57:52 +00:00
|
|
|
pdbm.Organization,
|
|
|
|
pdbm.Network,
|
|
|
|
pdbm.InternetExchange,
|
|
|
|
pdbm.Facility,
|
|
|
|
pdbm.NetworkContact,
|
|
|
|
pdbm.NetworkFacility,
|
|
|
|
pdbm.IXLan,
|
|
|
|
pdbm.IXLanPrefix,
|
|
|
|
pdbm.NetworkIXLan,
|
2018-11-08 19:45:21 +00:00
|
|
|
]
|
|
|
|
|
|
|
|
VIEWSETS = {
|
|
|
|
"org": pdbr.OrganizationViewSet,
|
|
|
|
"net": pdbr.NetworkViewSet,
|
|
|
|
"ix": pdbr.InternetExchangeViewSet,
|
|
|
|
"fac": pdbr.FacilityViewSet,
|
|
|
|
"ixlan": pdbr.IXLanViewSet,
|
|
|
|
"ixfac": pdbr.InternetExchangeFacilityViewSet,
|
|
|
|
"ixpfx": pdbr.IXLanPrefixViewSet,
|
|
|
|
"netfac": pdbr.NetworkFacilityViewSet,
|
|
|
|
"netixlan": pdbr.NetworkIXLanViewSet,
|
2019-12-05 16:57:52 +00:00
|
|
|
"poc": pdbr.NetworkContactViewSet,
|
2018-11-08 19:45:21 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
settings.DEBUG = False
|
|
|
|
|
|
|
|
|
|
|
|
class Command(BaseCommand):
|
|
|
|
help = "Regen the api cache files"
|
|
|
|
|
|
|
|
def add_arguments(self, parser):
|
|
|
|
parser.add_argument(
|
2019-12-05 16:57:52 +00:00
|
|
|
"--only", action="store", default=False, help="only run specified type"
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--date",
|
|
|
|
action="store",
|
|
|
|
default=None,
|
|
|
|
help="generate cache for objects create before or at the specified date (YYYYMMDD)",
|
2018-11-08 19:45:21 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
def log(self, id, msg):
|
|
|
|
if self.log_file:
|
2020-09-30 01:13:38 +00:00
|
|
|
self.log_file.write(f"{id}: {msg}")
|
2018-11-08 19:45:21 +00:00
|
|
|
self.log_file.flush()
|
2020-09-30 01:13:38 +00:00
|
|
|
print(f"{id}: {msg}")
|
2018-11-08 19:45:21 +00:00
|
|
|
|
|
|
|
def row_datetime(self, row, field="created"):
|
|
|
|
return datetime.datetime.strptime(row.get(field), "%Y-%m-%dT%H:%M:%SZ")
|
|
|
|
|
|
|
|
def handle(self, *args, **options):
|
2019-12-05 16:57:52 +00:00
|
|
|
only = options.get("only", None)
|
|
|
|
date = options.get("date", None)
|
2018-11-08 19:45:21 +00:00
|
|
|
|
2021-01-13 20:35:07 +00:00
|
|
|
# temporary setting to indicate api-cache is being generated
|
|
|
|
# this forced api responses to be generated without permission
|
|
|
|
# checks
|
|
|
|
settings.GENERATING_API_CACHE = True
|
|
|
|
|
2018-11-08 19:45:21 +00:00
|
|
|
if only:
|
|
|
|
only = only.split(",")
|
|
|
|
|
|
|
|
if date:
|
|
|
|
dt = datetime.datetime.strptime(date, "%Y%m%d")
|
|
|
|
else:
|
|
|
|
dt = datetime.datetime.now()
|
|
|
|
dtstr = dt.strftime("%Y-%m-%dT%H:%M:%SZ")
|
|
|
|
self.log_file = open(settings.API_CACHE_LOG, "w+")
|
2019-12-05 16:57:52 +00:00
|
|
|
self.log("info", "Regnerating cache files to '%s'" % settings.API_CACHE_ROOT)
|
2018-11-08 19:45:21 +00:00
|
|
|
self.log("info", "Caching data for timestamp: %s" % dtstr)
|
|
|
|
rf = APIRequestFactory()
|
|
|
|
renderer = MetaJSONRenderer()
|
|
|
|
|
|
|
|
t = time.time()
|
|
|
|
|
|
|
|
su = pdbm.User.objects.filter(is_superuser=True).first()
|
|
|
|
|
|
|
|
settings.API_DEPTH_ROW_LIMIT = 0
|
|
|
|
|
|
|
|
try:
|
|
|
|
cache = {}
|
|
|
|
|
2020-01-08 13:29:58 -06:00
|
|
|
for tag, viewset in list(VIEWSETS.items()):
|
2018-11-08 19:45:21 +00:00
|
|
|
if only and tag not in only:
|
|
|
|
continue
|
|
|
|
|
|
|
|
for depth in [0, 1, 2, 3]:
|
|
|
|
self.log(tag, "generating depth %d" % depth)
|
|
|
|
if depth:
|
2019-12-05 16:57:52 +00:00
|
|
|
req = rf.get(
|
|
|
|
"/api/%s?depth=%d&updated__lte=%s&_ctf"
|
|
|
|
% (tag, depth, dtstr)
|
|
|
|
)
|
2018-11-08 19:45:21 +00:00
|
|
|
else:
|
2020-09-30 01:13:38 +00:00
|
|
|
req = rf.get(f"/api/{tag}?updated__lte={dtstr}&_ctf")
|
2018-11-08 19:45:21 +00:00
|
|
|
req.user = su
|
2019-12-05 16:57:52 +00:00
|
|
|
vs = viewset.as_view({"get": "list"})
|
2018-11-08 19:45:21 +00:00
|
|
|
res = vs(req)
|
2020-09-30 01:13:38 +00:00
|
|
|
cache[f"{tag}-{depth}"] = renderer.render(
|
2019-12-05 16:57:52 +00:00
|
|
|
res.data, renderer_context={"response": res}
|
|
|
|
)
|
2018-11-08 19:45:21 +00:00
|
|
|
del res
|
|
|
|
del vs
|
|
|
|
|
2020-01-08 13:29:58 -06:00
|
|
|
for id, data in list(cache.items()):
|
2018-11-08 19:45:21 +00:00
|
|
|
self.log(id, "saving file")
|
|
|
|
with open(
|
2019-12-05 16:57:52 +00:00
|
|
|
os.path.join(settings.API_CACHE_ROOT, "%s.json" % (id)), "w+"
|
|
|
|
) as output:
|
2018-11-08 19:45:21 +00:00
|
|
|
output.write(data)
|
|
|
|
|
|
|
|
except Exception:
|
|
|
|
self.log("error", traceback.format_exc())
|
|
|
|
raise
|
|
|
|
|
|
|
|
t2 = time.time()
|
|
|
|
|
2020-01-08 13:29:58 -06:00
|
|
|
print("Finished after %.2f seconds" % (t2 - t))
|