1
0
mirror of https://github.com/peeringdb/peeringdb.git synced 2024-05-11 05:55:09 +00:00
Files
peeringdb-peeringdb/peeringdb_server/management/commands/pdb_api_cache.py

127 lines
3.8 KiB
Python
Raw Normal View History

2018-11-08 19:45:21 +00:00
from django.core.management.base import BaseCommand
import os
import traceback
import peeringdb_server.models as pdbm
import peeringdb_server.rest as pdbr
import datetime
import time
from peeringdb_server.renderers import MetaJSONRenderer
from django.conf import settings
from optparse import make_option
from rest_framework.test import APIRequestFactory
MODELS = [
2019-12-05 16:57:52 +00:00
pdbm.Organization,
pdbm.Network,
pdbm.InternetExchange,
pdbm.Facility,
pdbm.NetworkContact,
pdbm.NetworkFacility,
pdbm.IXLan,
pdbm.IXLanPrefix,
pdbm.NetworkIXLan,
2018-11-08 19:45:21 +00:00
]
VIEWSETS = {
"org": pdbr.OrganizationViewSet,
"net": pdbr.NetworkViewSet,
"ix": pdbr.InternetExchangeViewSet,
"fac": pdbr.FacilityViewSet,
"ixlan": pdbr.IXLanViewSet,
"ixfac": pdbr.InternetExchangeFacilityViewSet,
"ixpfx": pdbr.IXLanPrefixViewSet,
"netfac": pdbr.NetworkFacilityViewSet,
"netixlan": pdbr.NetworkIXLanViewSet,
2019-12-05 16:57:52 +00:00
"poc": pdbr.NetworkContactViewSet,
2018-11-08 19:45:21 +00:00
}
settings.DEBUG = False
class Command(BaseCommand):
help = "Regen the api cache files"
def add_arguments(self, parser):
parser.add_argument(
2019-12-05 16:57:52 +00:00
"--only", action="store", default=False, help="only run specified type"
)
parser.add_argument(
"--date",
action="store",
default=None,
help="generate cache for objects create before or at the specified date (YYYYMMDD)",
2018-11-08 19:45:21 +00:00
)
def log(self, id, msg):
if self.log_file:
self.log_file.write("%s: %s" % (id, msg))
self.log_file.flush()
print("%s: %s" % (id, msg))
2018-11-08 19:45:21 +00:00
def row_datetime(self, row, field="created"):
return datetime.datetime.strptime(row.get(field), "%Y-%m-%dT%H:%M:%SZ")
def handle(self, *args, **options):
2019-12-05 16:57:52 +00:00
only = options.get("only", None)
date = options.get("date", None)
2018-11-08 19:45:21 +00:00
if only:
only = only.split(",")
if date:
dt = datetime.datetime.strptime(date, "%Y%m%d")
else:
dt = datetime.datetime.now()
dtstr = dt.strftime("%Y-%m-%dT%H:%M:%SZ")
self.log_file = open(settings.API_CACHE_LOG, "w+")
2019-12-05 16:57:52 +00:00
self.log("info", "Regnerating cache files to '%s'" % settings.API_CACHE_ROOT)
2018-11-08 19:45:21 +00:00
self.log("info", "Caching data for timestamp: %s" % dtstr)
rf = APIRequestFactory()
renderer = MetaJSONRenderer()
t = time.time()
su = pdbm.User.objects.filter(is_superuser=True).first()
settings.API_DEPTH_ROW_LIMIT = 0
try:
cache = {}
for tag, viewset in list(VIEWSETS.items()):
2018-11-08 19:45:21 +00:00
if only and tag not in only:
continue
for depth in [0, 1, 2, 3]:
self.log(tag, "generating depth %d" % depth)
if depth:
2019-12-05 16:57:52 +00:00
req = rf.get(
"/api/%s?depth=%d&updated__lte=%s&_ctf"
% (tag, depth, dtstr)
)
2018-11-08 19:45:21 +00:00
else:
2019-12-05 16:57:52 +00:00
req = rf.get("/api/%s?updated__lte=%s&_ctf" % (tag, dtstr))
2018-11-08 19:45:21 +00:00
req.user = su
2019-12-05 16:57:52 +00:00
vs = viewset.as_view({"get": "list"})
2018-11-08 19:45:21 +00:00
res = vs(req)
cache["%s-%s" % (tag, depth)] = renderer.render(
2019-12-05 16:57:52 +00:00
res.data, renderer_context={"response": res}
)
2018-11-08 19:45:21 +00:00
del res
del vs
for id, data in list(cache.items()):
2018-11-08 19:45:21 +00:00
self.log(id, "saving file")
with open(
2019-12-05 16:57:52 +00:00
os.path.join(settings.API_CACHE_ROOT, "%s.json" % (id)), "w+"
) as output:
2018-11-08 19:45:21 +00:00
output.write(data)
except Exception:
self.log("error", traceback.format_exc())
raise
t2 = time.time()
print("Finished after %.2f seconds" % (t2 - t))