mirror of
https://github.com/peeringdb/peeringdb.git
synced 2024-05-11 05:55:09 +00:00
add public cache gen (#1441)
* descriptive variable names * add flag for only public data
This commit is contained in:
@ -9,6 +9,7 @@ import time
|
||||
import traceback
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import AnonymousUser
|
||||
from django.core.management.base import BaseCommand
|
||||
from rest_framework.test import APIRequestFactory
|
||||
|
||||
@ -76,6 +77,18 @@ class Command(BaseCommand):
|
||||
default="0,1,2,3",
|
||||
help="comma separated list of depths to generate",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--output-dir",
|
||||
action="store",
|
||||
default=settings.API_CACHE_ROOT,
|
||||
help=f"output files to this directory (default: {settings.API_CACHE_ROOT})",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--public-data",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="dump public data only as anonymous user",
|
||||
)
|
||||
|
||||
def log(self, id, msg):
|
||||
if self.log_file:
|
||||
@ -89,12 +102,20 @@ class Command(BaseCommand):
|
||||
def handle(self, *args, **options):
|
||||
only = options.get("only", None)
|
||||
date = options.get("date", None)
|
||||
output_dir = options.get("output_dir")
|
||||
depths = list(map(int, options.get("depths").split(",")))
|
||||
|
||||
# temporary setting to indicate api-cache is being generated
|
||||
# this forced api responses to be generated without permission
|
||||
# checks
|
||||
settings.GENERATING_API_CACHE = True
|
||||
print(f"output_dir: {output_dir}")
|
||||
|
||||
if options.get("public_data"):
|
||||
request_user = AnonymousUser()
|
||||
|
||||
else:
|
||||
request_user = pdbm.User.objects.filter(is_superuser=True).first()
|
||||
# temporary setting to indicate api-cache is being generated
|
||||
# this forced api responses to be generated without permission
|
||||
# checks
|
||||
settings.GENERATING_API_CACHE = True
|
||||
|
||||
if only:
|
||||
only = only.split(",")
|
||||
@ -105,18 +126,14 @@ class Command(BaseCommand):
|
||||
dt = datetime.datetime.now()
|
||||
dtstr = dt.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||
self.log_file = open(settings.API_CACHE_LOG, "w+")
|
||||
self.log("info", "Regnerating cache files to '%s'" % settings.API_CACHE_ROOT)
|
||||
self.log("info", f"Regnerating cache files to '{output_dir}'")
|
||||
self.log(
|
||||
"info",
|
||||
f"Caching depths {str(depths)} for timestamp: {str(dtstr)}",
|
||||
)
|
||||
rf = APIRequestFactory()
|
||||
request_factory = APIRequestFactory()
|
||||
renderer = MetaJSONRenderer()
|
||||
|
||||
t = time.time()
|
||||
|
||||
su = pdbm.User.objects.filter(is_superuser=True).first()
|
||||
|
||||
settings.API_DEPTH_ROW_LIMIT = 0
|
||||
|
||||
# will be using RequestFactory to spawn requests to generate api-cache
|
||||
@ -124,6 +141,8 @@ class Command(BaseCommand):
|
||||
|
||||
settings.CSRF_USE_SESSIONS = False
|
||||
|
||||
start_time = time.time()
|
||||
|
||||
try:
|
||||
cache = {}
|
||||
# make a temp dir to create the cache files for an atomic swap
|
||||
@ -139,31 +158,34 @@ class Command(BaseCommand):
|
||||
|
||||
self.log(tag, "generating depth %d" % depth)
|
||||
if depth:
|
||||
req = rf.get(
|
||||
request = request_factory.get(
|
||||
"/api/%s?depth=%d&updated__lte=%s&_ctf"
|
||||
% (tag, depth, dtstr)
|
||||
)
|
||||
else:
|
||||
req = rf.get(f"/api/{tag}?updated__lte={dtstr}&_ctf")
|
||||
req.user = su
|
||||
request = request_factory.get(
|
||||
f"/api/{tag}?updated__lte={dtstr}&_ctf"
|
||||
)
|
||||
request.user = request_user
|
||||
vs = viewset.as_view({"get": "list"})
|
||||
res = vs(req)
|
||||
response = vs(request)
|
||||
|
||||
id = f"{tag}-{depth}"
|
||||
file_name = os.path.join(tmpdir.name, f"{tag}-{depth}.json")
|
||||
cache[id] = file_name
|
||||
renderer.render(
|
||||
res.data,
|
||||
renderer_context={"response": res},
|
||||
response.data,
|
||||
renderer_context={"response": response},
|
||||
file_name=file_name,
|
||||
)
|
||||
|
||||
del res
|
||||
del response
|
||||
del vs
|
||||
|
||||
# move the tmp files to the cache dir
|
||||
for id, src_file in list(cache.items()):
|
||||
file_name = os.path.join(settings.API_CACHE_ROOT, "%s.json" % (id))
|
||||
print(f"output_dir: {output_dir}")
|
||||
file_name = os.path.join(output_dir, "%s.json" % (id))
|
||||
shutil.move(src_file, file_name)
|
||||
|
||||
# copy the monodepth files to the other depths
|
||||
@ -173,8 +195,8 @@ class Command(BaseCommand):
|
||||
|
||||
for depth in [1, 2, 3]:
|
||||
id = f"{tag}-{depth}"
|
||||
src_file = os.path.join(settings.API_CACHE_ROOT, f"{tag}-0.json")
|
||||
file_name = os.path.join(settings.API_CACHE_ROOT, f"{id}.json")
|
||||
src_file = os.path.join(output_dir, f"{tag}-0.json")
|
||||
file_name = os.path.join(output_dir, f"{id}.json")
|
||||
self.log("info", f"copying {src_file} to {file_name}")
|
||||
shutil.copyfile(src_file, file_name)
|
||||
|
||||
@ -186,6 +208,6 @@ class Command(BaseCommand):
|
||||
tmpdir.cleanup()
|
||||
self.log_file.close()
|
||||
|
||||
t2 = time.time()
|
||||
end_time = time.time()
|
||||
|
||||
print("Finished after %.2f seconds" % (t2 - t))
|
||||
print("Finished after %.2f seconds" % (end_time - start_time))
|
||||
|
Reference in New Issue
Block a user