mirror of
https://github.com/peeringdb/peeringdb.git
synced 2024-05-11 05:55:09 +00:00
* install django-grainy * nsp to grainy first iteration * Fix validation error message overflow * Add migration, update views.py and template to add help_text to UI * nsp to grainy second iteration * grainy and django-grainy pinned to latest releases * deskpro ticket cc (#875) * black formatting * move ac link to bottom for ticket body * Fix typo * Update djangorestframework, peeringdb, django-ratelimit * Rewrite login view ratelimit decorator * Relock pipfile * add list() to make copy of dictionaries before iterating * respect ix-f url visibilty in ix-f conflict emails * Add type coercion to settings taken from environment variables * Add bool handling * relock pipfile with python3.9 change docker to use python3.9 * Check bool via isinstance * add ordering to admin search queryset for deskproticket and email * update settings with envvar_type option * Add tooltips to add ix and add exchange views (in org) * Add tooltip to suggest fac view * get phone information in view * add missing migration * add migration and make org a geo model * Wire normalization to put/create requests for Facility * Update admin with new address fields * Refactor serializer using mixin * Add floor and suite to address API * Write command to geonormalize existing entries * Remove unnecessary method from model * Add floor and suite to views * Add ignore geo status * Force refresh for fac and org updates * adjust frontend typo * add checking if update needs geosync * redo error handling for geosync * remove save keyword from geonormalize command script * change raw_id_fields * alternate autocomplete lookup field depending on where inline is called * remove unnecessary error handling * Add csv option * Fix bug with None vs empty string * add regex parsing for suite and floor conversion * Add migration that removes geo error as a field * add geostatus update to command * Ignore suite floor and address2 changes for api normalization * update geomodel by removing geo_error * Black models.py * Black serializers.py * remove geocode error from admin * Add function for reversing pretty speed * add conversion to export method * fix typo * fix speed value feedback after submit * remove conditional * Add error handling to create endpoint * Refine floor and suite parsing regex * Add geocoding tests * Add json for tests * IX-F Importer: Bogus output of "Preview" tool #896 * remove cruft * black formatting * IX-F Importer: history of changes per ixlan & netixlan #893 * 6 add geocode to org view * 4 update geocode without refresh * Update error display * Fix bug with formatting translated string * Add DateTimeFields to model * Add update signals * add last updated fields to views and serializers * Add last updated model migration * Add the data migration for last updated fields * add test that tests a normal org user with create org permissions * grainy to 1.7 django grainy to 1.9.1 * Fix formatting issues * Adjust var names * Refactor signals * Temporary: save override from network model * Empty vlan lists no longer cause error * typo in ixf.py * typo in admin * Typos in model verbose names * Add serializer IXLAN validation for ixf_ixp_import_enabled * Add model validation to IXLan * relock pipfile * relock pipfile * begin signal test file * Remove full clean from save in ixlan * use post_reversion_commit signal instead * remove redundant save override * remove cruft / debug code * Add signal tests * exclude organizations with city missing from commandline geosync * Skip geosync if the only address information we have is a country * initial commit for vlan matcher in importer * Add more tests and remove unused imports * update tests * Actually add vlan matching to importer * Add type checking for speed list and state * Change how we register connection.state * add bootstrap options * add rdap cache command * remove outdated perm docs * rdap from master and relock * propagate rdap settings to peeringdb.settings * add loaddata for initial fixtures * user friendly error message on RdapNotFound errors (#497) * update rdap errors * django-peeringdb to 2.5.0 and relock * rdap to 1.2.0 and relock * fix migration hierarchy * add ignore_recurse_errors option * add missing fields to mock remove cruft missed during merge * rdap to 1.2.1 * dont geo validate during api tests * fix tests * Add test file * fix merge * RDAP_SELF_BOOTSTRAP to False while running tests * black formatted * run black * add github actions * add runs on Co-authored-by: Stefan Pratter <stefan@20c.com> Co-authored-by: Elliot Frank <elliot@20c.com>
117 lines
3.4 KiB
Python
117 lines
3.4 KiB
Python
import os
|
|
import json
|
|
import collections
|
|
|
|
from django.conf import settings
|
|
|
|
from peeringdb_server.models import InternetExchange, IXLan, Network
|
|
|
|
|
|
class CacheRedirect(Exception):
|
|
"""
|
|
Raise this error to redirect to cache response during viewset.get_queryset
|
|
or viewset.list()
|
|
|
|
Argument should be an APICacheLoader instance
|
|
"""
|
|
|
|
def __init__(self, loader):
|
|
super(Exception, self).__init__(self, "Result to be loaded from cache")
|
|
self.loader = loader
|
|
|
|
|
|
###############################################################################
|
|
# API CACHE LOADER
|
|
|
|
|
|
class APICacheLoader:
|
|
"""
|
|
Checks if an API GET request qualifies for a cache load
|
|
and if it does allows you to provide the cached result
|
|
"""
|
|
|
|
def __init__(self, viewset, qset, filters):
|
|
request = viewset.request
|
|
self.request = request
|
|
self.qset = qset
|
|
self.filters = filters
|
|
self.model = viewset.model
|
|
self.viewset = viewset
|
|
self.depth = min(int(request.query_params.get("depth", 0)), 3)
|
|
self.limit = int(request.query_params.get("limit", 0))
|
|
self.skip = int(request.query_params.get("skip", 0))
|
|
self.since = int(request.query_params.get("since", 0))
|
|
self.fields = request.query_params.get("fields")
|
|
if self.fields:
|
|
self.fields = self.fields.split(",")
|
|
self.path = os.path.join(
|
|
settings.API_CACHE_ROOT,
|
|
f"{viewset.model.handleref.tag}-{self.depth}.json",
|
|
)
|
|
|
|
def qualifies(self):
|
|
"""
|
|
Check if request qualifies for a cache load
|
|
"""
|
|
|
|
# api cache use is disabled, no
|
|
if not getattr(settings, "API_CACHE_ENABLED", False):
|
|
return False
|
|
# no depth and a limit lower than 251 seems like a tipping point
|
|
# were non-cache retrieval is faster still
|
|
if (
|
|
not self.depth
|
|
and self.limit
|
|
and self.limit <= 250
|
|
and getattr(settings, "API_CACHE_ALL_LIMITS", False) is False
|
|
):
|
|
return False
|
|
# filters have been specified, no
|
|
if self.filters or self.since:
|
|
return False
|
|
# cache file non-existant, no
|
|
if not os.path.exists(self.path):
|
|
return False
|
|
# request method is anything but GET, no
|
|
if self.request.method != "GET":
|
|
return False
|
|
# primary key set in request, no
|
|
if self.viewset.kwargs:
|
|
return False
|
|
|
|
return True
|
|
|
|
def load(self):
|
|
"""
|
|
Load the cached response according to tag and depth
|
|
"""
|
|
|
|
# read cache file
|
|
with open(self.path) as f:
|
|
data = json.load(f)
|
|
|
|
data = data.get("data")
|
|
|
|
# apply pagination
|
|
if self.skip and self.limit:
|
|
data = data[self.skip : self.skip + self.limit]
|
|
elif self.skip:
|
|
data = data[self.skip :]
|
|
elif self.limit:
|
|
data = data[: self.limit]
|
|
|
|
if self.fields:
|
|
for row in data:
|
|
self.filter_fields(row)
|
|
|
|
return {"results": data, "__meta": {"generated": os.path.getmtime(self.path)}}
|
|
|
|
def filter_fields(self, row):
|
|
"""
|
|
Removes any unwanted fields from the resultset
|
|
according to the `fields` filter specified in the request
|
|
"""
|
|
for field in list(row.keys()):
|
|
if field not in self.fields and field != "_grainy":
|
|
del row[field]
|