1
0
mirror of https://github.com/peeringdb/peeringdb.git synced 2024-05-11 05:55:09 +00:00

June updates (#751)

* Add pointer from API docs to tutorial #650

* Sorting by clicking table headers should use local-compare #356

* Mark IXP peering LAN as bogon #352

* Add help text to "Add (Facility, Network, Exchange)" tab #669

* Add Looking Glass field to the IX object #672

* Add read-only Superuser #679

* Make spelling of traffic levels consistent #519 (#723)

* Offer 2FA (#290)

* Show "Last Updated" fields on fac, ix, org records (#526)

* Enable sort and reverse sort of IP column in IX display (#72)

* IRR validation not handling unexpected characters gracefully (#712)

* Support alternative direction of writing, e.g. Arabic (#618)

* Undeleting an ixlan with an emtpy IPv4 XOR IPv6 field throws a silly error (#644)

* Changing org while adding net results in 500 #654

* missing delete button for organisations (#121)

* When changing owner of an ix admin GUI borks because of "Ixlan for exchange already exists" #666

* Selection should only present undeleted objects (#664)

* change default encoding of API calls to 'utf-8' #663

* Posting https://www.peeringdb.com onto social media doesn't select a good preview image #537

* Revert "Add Looking Glass field to the IX object #672"

This reverts commit 4daf2520043c241fabe9a521757efa86a274e28a.

Conflicts:
	peeringdb_server/migrations/0037_ix_looking_glass.py
	peeringdb_server/views.py

* 500 Internal Error when creating IX where prefix already exists elsewhere #718

* Fix graceful restore of soft-deleted objects with translation active (#580)

* Don't return any POC data with status=deleted #569
Hard delete soft-deleted pocs after grace period #566

* django-peeringdb from github@2.0.0.2-beta

Co-authored-by: Stefan Pratter <stefan@20c.com>
This commit is contained in:
Matt Griswold
2020-06-24 12:55:01 -05:00
committed by GitHub
parent 09b4759b02
commit af6974e3d3
60 changed files with 1797 additions and 336 deletions

View File

@@ -812,21 +812,87 @@ class ModelSerializer(PermissionedModelSerializer):
return slz_fld.queryset.get(id=data[_fld])
def run_validation(self, data=serializers.empty):
"""
Custom validation handling
Will run the vanilla django-rest-framework validation but
wrap it with logic to handle unique constraint errors to
restore soft-deleted objects that are blocking a save on basis
of a unique constraint violation
"""
try:
return super(ModelSerializer, self).run_validation(data=data)
except RestValidationError as exc:
filters = {}
for k, v in list(exc.detail.items()):
v = v[0]
if k == "non_field_errors" and v.find("unique set") > -1:
m = re.match("The fields (.+) must make a unique set.", v)
if m:
for fld in [i.strip() for i in m.group(1).split(",")]:
filters[fld] = data.get(fld, self._unique_filter(fld, data))
elif v.find("must be unique") > -1:
# if code is not set on the error detail it's
# useless to us
if not hasattr(v, "code"):
continue
# During `ix` creation `prefix` is passed to create
# an `ixpfx` object alongside the ix, it's not part of ix
# so ignore it (#718)
if k == "prefix" and self.Meta.model == InternetExchange:
continue
# when handling unique constraint database errors
# we want to check if the offending object is
# currently soft-deleted and can gracefully be
# restored.
if v.code == "unique" and k == "non_field_errors":
# unique-set errors - database blocked save
# because of a unique multi key constraint
# find out which fields caused the issues
# this is done by checking all serializer fields
# against the error message.
#
# If a field is contained in the error message
# it can be safely assumed to be part of the
# unique set that caused the collision
columns = "|".join(self.Meta.fields)
m = re.findall(r"\b({})\b".format(columns), v)
# build django queryset filters we can use
# to retrieve the blocking object
for fld in m:
_value = data.get(fld, self._unique_filter(fld, data))
if _value is not None:
filters[fld] = _value
elif v.code == "unique":
# unique single field error
# build django queryset filter we can use to
# retrieve the blocking object
filters[k] = data.get(k, self._unique_filter(k, data))
request = self._context.get("request")
# handle graceful restore of soft-deleted object
# that is causing the unique constraint error
#
# if `filters` is set it means that we were able
# to identify a soft-deleted object that we want
# to restore
#
# At this point only `POST` (create) requests
# should ever attempt a restoration like this
if filters and request and request.user and request.method == "POST":
if "fac_id" in filters:
@@ -837,18 +903,21 @@ class ModelSerializer(PermissionedModelSerializer):
del filters["net_id"]
try:
filters.update(status="deleted")
self.instance = self.Meta.model.objects.get(**filters)
except self.Meta.model.DoesNotExist:
raise exc
except FieldError as exc:
raise exc
if (
has_perms(request.user, self.instance, "update")
and self.instance.status == "deleted"
):
if has_perms(request.user, self.instance, "update"):
rv = super(ModelSerializer, self).run_validation(data=data)
self._undelete = True
return rv
else:
raise RestValidationError({"non_field_errors": [_(
"Permission denied to restore deleted object/relationship"
)]})
raise
else:
raise
@@ -1226,6 +1295,20 @@ class NetworkContactSerializer(ModelSerializer):
def validate_phone(self, value):
return validate_phonenumber(value)
def to_representation(self, data):
# When a network contact is marked as deleted we
# want to return blank values for any sensitive
# fields (#569)
representation = super().to_representation(data)
if isinstance(representation,dict) and representation.get("status") == "deleted":
for field in ["name", "phone", "email", "url"]:
representation[field] = ""
return representation
class NetworkIXLanSerializer(ModelSerializer):
@@ -1806,6 +1889,7 @@ class IXLanPrefixSerializer(ModelSerializer):
"ixlan_id",
"protocol",
"prefix",
"in_dfz",
] + HandleRefSerializer.Meta.fields
related_fields = ["ixlan"]