1
0
mirror of https://github.com/netbox-community/netbox.git synced 2024-05-10 07:54:54 +00:00

Merge pull request #5010 from netbox-community/4990-custom-script-changelog

Fixes #4990: Object change logging during custom script execution
This commit is contained in:
Jeremy Stretch
2020-08-18 13:57:21 -04:00
committed by GitHub
15 changed files with 329 additions and 205 deletions

View File

@ -176,13 +176,12 @@ class CustomFieldModelSerializer(ValidatedModelSerializer):
def create(self, validated_data):
custom_fields = validated_data.pop('custom_fields', None)
with transaction.atomic():
instance = super().create(validated_data)
# Save custom fields
custom_fields = validated_data.get('custom_fields')
if custom_fields is not None:
self._save_custom_fields(instance, custom_fields)
instance.custom_fields = custom_fields
@ -191,10 +190,11 @@ class CustomFieldModelSerializer(ValidatedModelSerializer):
def update(self, instance, validated_data):
custom_fields = validated_data.pop('custom_fields', None)
with transaction.atomic():
custom_fields = validated_data.get('custom_fields')
instance._cf = custom_fields
instance = super().update(instance, validated_data)
# Save custom fields

View File

@ -108,6 +108,10 @@ class TaggedObjectSerializer(serializers.Serializer):
def update(self, instance, validated_data):
tags = validated_data.pop('tags', [])
# Cache tags on instance for change logging
instance._tags = tags
instance = super().update(instance, validated_data)
return self._save_tags(instance, tags)

View File

@ -0,0 +1,32 @@
from contextlib import contextmanager
from django.db.models.signals import m2m_changed, pre_delete, post_save
from extras.signals import _handle_changed_object, _handle_deleted_object
from utilities.utils import curry
@contextmanager
def change_logging(request):
"""
Enable change logging by connecting the appropriate signals to their receivers before code is run, and
disconnecting them afterward.
:param request: WSGIRequest object with a unique `id` set
"""
# Curry signals receivers to pass the current request
handle_changed_object = curry(_handle_changed_object, request)
handle_deleted_object = curry(_handle_deleted_object, request)
# Connect our receivers to the post_save and post_delete signals.
post_save.connect(handle_changed_object, dispatch_uid='handle_changed_object')
m2m_changed.connect(handle_changed_object, dispatch_uid='handle_changed_object')
pre_delete.connect(handle_deleted_object, dispatch_uid='handle_deleted_object')
yield
# Disconnect change logging signals. This is necessary to avoid recording any errant
# changes during test cleanup.
post_save.disconnect(handle_changed_object, dispatch_uid='handle_changed_object')
m2m_changed.disconnect(handle_changed_object, dispatch_uid='handle_changed_object')
pre_delete.disconnect(handle_deleted_object, dispatch_uid='handle_deleted_object')

View File

@ -29,6 +29,9 @@ class CustomFieldModelForm(forms.ModelForm):
super().__init__(*args, **kwargs)
if self.instance._cf is None:
self.instance._cf = {}
self._append_customfield_fields()
def _append_customfield_fields(self):
@ -48,9 +51,12 @@ class CustomFieldModelForm(forms.ModelForm):
field_name = 'cf_{}'.format(cf.name)
if self.instance.pk:
self.fields[field_name] = cf.to_form_field(set_initial=False)
self.fields[field_name].initial = self.custom_field_values.get(cf.name)
value = self.custom_field_values.get(cf.name)
self.fields[field_name].initial = value
self.instance._cf[cf.name] = value
else:
self.fields[field_name] = cf.to_form_field()
self.instance._cf[cf.name] = self.fields[field_name].initial
# Annotate the field in the list of CustomField form fields
self.custom_fields.append(field_name)
@ -77,13 +83,18 @@ class CustomFieldModelForm(forms.ModelForm):
cfv.save()
def save(self, commit=True):
# Cache custom field values on object prior to save to ensure change logging
for cf_name in self.custom_fields:
self.instance._cf[cf_name[3:]] = self.cleaned_data.get(cf_name)
obj = super().save(commit)
# Handle custom fields the same way we do M2M fields
if commit:
self._save_custom_fields()
else:
self.save_custom_fields = self._save_custom_fields
obj.save_custom_fields = self._save_custom_fields
return obj

View File

@ -1,64 +1,6 @@
import random
import threading
import uuid
from copy import deepcopy
from datetime import timedelta
from django.conf import settings
from django.contrib import messages
from django.db.models.signals import pre_delete, post_save
from django.utils import timezone
from django_prometheus.models import model_deletes, model_inserts, model_updates
from redis.exceptions import RedisError
from extras.utils import is_taggable
from utilities.api import is_api_request
from utilities.querysets import DummyQuerySet
from .choices import ObjectChangeActionChoices
from .models import ObjectChange
from .signals import purge_changelog
from .webhooks import enqueue_webhooks
_thread_locals = threading.local()
def handle_changed_object(sender, instance, **kwargs):
"""
Fires when an object is created or updated.
"""
# Queue the object for processing once the request completes
action = ObjectChangeActionChoices.ACTION_CREATE if kwargs['created'] else ObjectChangeActionChoices.ACTION_UPDATE
_thread_locals.changed_objects.append(
(instance, action)
)
def handle_deleted_object(sender, instance, **kwargs):
"""
Fires when an object is deleted.
"""
# Cache custom fields prior to copying the instance
if hasattr(instance, 'cache_custom_fields'):
instance.cache_custom_fields()
# Create a copy of the object being deleted
copy = deepcopy(instance)
# Preserve tags
if is_taggable(instance):
copy.tags = DummyQuerySet(instance.tags.all())
# Queue the copy of the object for processing once the request completes
_thread_locals.changed_objects.append(
(copy, ObjectChangeActionChoices.ACTION_DELETE)
)
def purge_objectchange_cache(sender, **kwargs):
"""
Delete any queued object changes waiting to be written.
"""
_thread_locals.changed_objects = []
from .context_managers import change_logging
class ObjectChangeMiddleware(object):
@ -79,74 +21,12 @@ class ObjectChangeMiddleware(object):
self.get_response = get_response
def __call__(self, request):
# Initialize an empty list to cache objects being saved.
_thread_locals.changed_objects = []
# Assign a random unique ID to the request. This will be used to associate multiple object changes made during
# the same request.
request.id = uuid.uuid4()
# Connect our receivers to the post_save and post_delete signals.
post_save.connect(handle_changed_object, dispatch_uid='handle_changed_object')
pre_delete.connect(handle_deleted_object, dispatch_uid='handle_deleted_object')
# Provide a hook for purging the change cache
purge_changelog.connect(purge_objectchange_cache)
# Process the request
response = self.get_response(request)
# If the change cache is empty, there's nothing more we need to do.
if not _thread_locals.changed_objects:
return response
# Disconnect our receivers from the post_save and post_delete signals.
post_save.disconnect(handle_changed_object, dispatch_uid='handle_changed_object')
pre_delete.disconnect(handle_deleted_object, dispatch_uid='handle_deleted_object')
# Create records for any cached objects that were changed.
redis_failed = False
for instance, action in _thread_locals.changed_objects:
# Refresh cached custom field values
if action in [ObjectChangeActionChoices.ACTION_CREATE, ObjectChangeActionChoices.ACTION_UPDATE]:
if hasattr(instance, 'cache_custom_fields'):
instance.cache_custom_fields()
# Record an ObjectChange if applicable
if hasattr(instance, 'to_objectchange'):
objectchange = instance.to_objectchange(action)
objectchange.user = request.user
objectchange.request_id = request.id
objectchange.save()
# Enqueue webhooks
try:
enqueue_webhooks(instance, request.user, request.id, action)
except RedisError as e:
if not redis_failed and not is_api_request(request):
messages.error(
request,
"There was an error processing webhooks for this request. Check that the Redis service is "
"running and reachable. The full error details were: {}".format(e)
)
redis_failed = True
# Increment metric counters
if action == ObjectChangeActionChoices.ACTION_CREATE:
model_inserts.labels(instance._meta.model_name).inc()
elif action == ObjectChangeActionChoices.ACTION_UPDATE:
model_updates.labels(instance._meta.model_name).inc()
elif action == ObjectChangeActionChoices.ACTION_DELETE:
model_deletes.labels(instance._meta.model_name).inc()
# Housekeeping: 1% chance of clearing out expired ObjectChanges. This applies only to requests which result in
# one or more changes being logged.
if settings.CHANGELOG_RETENTION and random.randint(1, 100) == 1:
cutoff = timezone.now() - timedelta(days=settings.CHANGELOG_RETENTION)
purged_count, _ = ObjectChange.objects.filter(
time__lt=cutoff
).delete()
# Process the request with change logging enabled
with change_logging(request):
response = self.get_response(request)
return response

View File

@ -1,4 +1,3 @@
import logging
from collections import OrderedDict
from datetime import date
@ -18,11 +17,14 @@ from extras.utils import FeatureQuery
#
class CustomFieldModel(models.Model):
_cf = None
class Meta:
abstract = True
def __init__(self, *args, custom_fields=None, **kwargs):
self._cf = custom_fields
super().__init__(*args, **kwargs)
def cache_custom_fields(self):
"""
Cache all custom field values for this instance

View File

@ -22,8 +22,8 @@ from ipam.formfields import IPAddressFormField, IPNetworkFormField
from ipam.validators import MaxPrefixLengthValidator, MinPrefixLengthValidator, prefix_validator
from utilities.exceptions import AbortTransaction
from utilities.forms import DynamicModelChoiceField, DynamicModelMultipleChoiceField
from .context_managers import change_logging
from .forms import ScriptForm
from .signals import purge_changelog
__all__ = [
'BaseScript',
@ -436,41 +436,44 @@ def run_script(data, request, commit=True, *args, **kwargs):
if 'commit' in inspect.signature(script.run).parameters:
kwargs['commit'] = commit
else:
warnings.warn(f"The run() method of script {script} should support a 'commit' argument. This will be required "
f"beginning with NetBox v2.10.")
warnings.warn(
f"The run() method of script {script} should support a 'commit' argument. This will be required beginning "
f"with NetBox v2.10."
)
try:
with transaction.atomic():
script.output = script.run(**kwargs)
with change_logging(request):
try:
with transaction.atomic():
script.output = script.run(**kwargs)
if not commit:
raise AbortTransaction()
except AbortTransaction:
pass
except Exception as e:
stacktrace = traceback.format_exc()
script.log_failure(
"An exception occurred: `{}: {}`\n```\n{}\n```".format(type(e).__name__, e, stacktrace)
)
logger.error(f"Exception raised during script execution: {e}")
commit = False
job_result.set_status(JobResultStatusChoices.STATUS_ERRORED)
finally:
if job_result.status != JobResultStatusChoices.STATUS_ERRORED:
job_result.data = ScriptOutputSerializer(script).data
job_result.set_status(JobResultStatusChoices.STATUS_COMPLETED)
if not commit:
raise AbortTransaction()
# Delete all pending changelog entries
script.log_info(
"Database changes have been reverted automatically."
)
except AbortTransaction:
pass
except Exception as e:
stacktrace = traceback.format_exc()
script.log_failure(
"An exception occurred: `{}: {}`\n```\n{}\n```".format(type(e).__name__, e, stacktrace)
)
logger.error(f"Exception raised during script execution: {e}")
commit = False
job_result.set_status(JobResultStatusChoices.STATUS_ERRORED)
finally:
if job_result.status != JobResultStatusChoices.STATUS_ERRORED:
job_result.data = ScriptOutputSerializer(script).data
job_result.set_status(JobResultStatusChoices.STATUS_COMPLETED)
if not commit:
# Delete all pending changelog entries
purge_changelog.send(Script)
script.log_info(
"Database changes have been reverted automatically."
)
logger.info(f"Script completed in {job_result.duration}")
logger.info(f"Script completed in {job_result.duration}")
# Delete any previous terminal state results
JobResult.objects.filter(

View File

@ -1,7 +1,75 @@
import random
from datetime import timedelta
from cacheops.signals import cache_invalidated, cache_read
from django.dispatch import Signal
from django.conf import settings
from django.utils import timezone
from django_prometheus.models import model_deletes, model_inserts, model_updates
from prometheus_client import Counter
from .choices import ObjectChangeActionChoices
from .models import ObjectChange
from .webhooks import enqueue_webhooks
#
# Change logging/webhooks
#
def _handle_changed_object(request, sender, instance, **kwargs):
"""
Fires when an object is created or updated.
"""
# Queue the object for processing once the request completes
if kwargs.get('created'):
action = ObjectChangeActionChoices.ACTION_CREATE
elif 'created' in kwargs:
action = ObjectChangeActionChoices.ACTION_UPDATE
elif kwargs.get('action') in ['post_add', 'post_remove'] and kwargs['pk_set']:
# m2m_changed with objects added or removed
action = ObjectChangeActionChoices.ACTION_UPDATE
else:
return
# Record an ObjectChange if applicable
if hasattr(instance, 'to_objectchange'):
objectchange = instance.to_objectchange(action)
objectchange.user = request.user
objectchange.request_id = request.id
objectchange.save()
# Enqueue webhooks
enqueue_webhooks(instance, request.user, request.id, action)
# Increment metric counters
if action == ObjectChangeActionChoices.ACTION_CREATE:
model_inserts.labels(instance._meta.model_name).inc()
elif action == ObjectChangeActionChoices.ACTION_UPDATE:
model_updates.labels(instance._meta.model_name).inc()
# Housekeeping: 0.1% chance of clearing out expired ObjectChanges
if settings.CHANGELOG_RETENTION and random.randint(1, 1000) == 1:
cutoff = timezone.now() - timedelta(days=settings.CHANGELOG_RETENTION)
ObjectChange.objects.filter(time__lt=cutoff).delete()
def _handle_deleted_object(request, sender, instance, **kwargs):
"""
Fires when an object is deleted.
"""
# Record an ObjectChange if applicable
if hasattr(instance, 'to_objectchange'):
objectchange = instance.to_objectchange(ObjectChangeActionChoices.ACTION_DELETE)
objectchange.user = request.user
objectchange.request_id = request.id
objectchange.save()
# Enqueue webhooks
enqueue_webhooks(instance, request.user, request.id, ObjectChangeActionChoices.ACTION_DELETE)
# Increment metric counters
model_deletes.labels(instance._meta.model_name).inc()
#
# Caching
@ -25,10 +93,3 @@ def cache_invalidated_collector(sender, obj_dict, **kwargs):
cache_read.connect(cache_read_collector)
cache_invalidated.connect(cache_invalidated_collector)
#
# Change logging
#
purge_changelog = Signal()

View File

@ -2,13 +2,125 @@ from django.contrib.contenttypes.models import ContentType
from django.urls import reverse
from rest_framework import status
from dcim.choices import SiteStatusChoices
from dcim.models import Site
from extras.choices import *
from extras.models import CustomField, CustomFieldValue, ObjectChange
from extras.models import CustomField, CustomFieldValue, ObjectChange, Tag
from utilities.testing import APITestCase
from utilities.testing.utils import post_data
from utilities.testing.views import ModelViewTestCase
class ChangeLogTest(APITestCase):
class ChangeLogViewTest(ModelViewTestCase):
model = Site
@classmethod
def setUpTestData(cls):
# Create a custom field on the Site model
ct = ContentType.objects.get_for_model(Site)
cf = CustomField(
type=CustomFieldTypeChoices.TYPE_TEXT,
name='my_field',
required=False
)
cf.save()
cf.obj_type.set([ct])
def test_create_object(self):
tags = self.create_tags('Tag 1', 'Tag 2')
form_data = {
'name': 'Test Site 1',
'slug': 'test-site-1',
'status': SiteStatusChoices.STATUS_ACTIVE,
'cf_my_field': 'ABC',
'tags': [tag.pk for tag in tags],
}
request = {
'path': self._get_url('add'),
'data': post_data(form_data),
}
self.add_permissions('dcim.add_site')
response = self.client.post(**request)
self.assertHttpStatus(response, 302)
site = Site.objects.get(name='Test Site 1')
# First OC is the creation; second is the tags update
oc_list = ObjectChange.objects.filter(
changed_object_type=ContentType.objects.get_for_model(Site),
changed_object_id=site.pk
).order_by('pk')
self.assertEqual(oc_list[0].changed_object, site)
self.assertEqual(oc_list[0].action, ObjectChangeActionChoices.ACTION_CREATE)
self.assertEqual(oc_list[0].object_data['custom_fields']['my_field'], form_data['cf_my_field'])
self.assertEqual(oc_list[1].action, ObjectChangeActionChoices.ACTION_UPDATE)
self.assertEqual(oc_list[1].object_data['tags'], ['Tag 1', 'Tag 2'])
def test_update_object(self):
site = Site(name='Test Site 1', slug='test-site-1')
site.save()
tags = self.create_tags('Tag 1', 'Tag 2', 'Tag 3')
site.tags.set('Tag 1', 'Tag 2')
form_data = {
'name': 'Test Site X',
'slug': 'test-site-x',
'status': SiteStatusChoices.STATUS_PLANNED,
'cf_my_field': 'DEF',
'tags': [tags[2].pk],
}
request = {
'path': self._get_url('edit', instance=site),
'data': post_data(form_data),
}
self.add_permissions('dcim.change_site')
response = self.client.post(**request)
self.assertHttpStatus(response, 302)
site.refresh_from_db()
# Get only the most recent OC
oc = ObjectChange.objects.filter(
changed_object_type=ContentType.objects.get_for_model(Site),
changed_object_id=site.pk
).first()
self.assertEqual(oc.changed_object, site)
self.assertEqual(oc.action, ObjectChangeActionChoices.ACTION_UPDATE)
self.assertEqual(oc.object_data['custom_fields']['my_field'], form_data['cf_my_field'])
self.assertEqual(oc.object_data['tags'], ['Tag 3'])
def test_delete_object(self):
site = Site(
name='Test Site 1',
slug='test-site-1'
)
site.save()
self.create_tags('Tag 1', 'Tag 2')
site.tags.set('Tag 1', 'Tag 2')
CustomFieldValue.objects.create(
field=CustomField.objects.get(name='my_field'),
obj=site,
value='ABC'
)
request = {
'path': self._get_url('delete', instance=site),
'data': post_data({'confirm': True}),
}
self.add_permissions('dcim.delete_site')
response = self.client.post(**request)
self.assertHttpStatus(response, 302)
oc = ObjectChange.objects.first()
self.assertEqual(oc.changed_object, None)
self.assertEqual(oc.object_repr, site.name)
self.assertEqual(oc.action, ObjectChangeActionChoices.ACTION_DELETE)
self.assertEqual(oc.object_data['custom_fields']['my_field'], 'ABC')
self.assertEqual(oc.object_data['tags'], ['Tag 1', 'Tag 2'])
class ChangeLogAPITest(APITestCase):
def setUp(self):
super().setUp()
@ -23,6 +135,14 @@ class ChangeLogTest(APITestCase):
cf.save()
cf.obj_type.set([ct])
# Create some tags
tags = (
Tag(name='Tag 1', slug='tag-1'),
Tag(name='Tag 2', slug='tag-2'),
Tag(name='Tag 3', slug='tag-3'),
)
Tag.objects.bulk_create(tags)
def test_create_object(self):
data = {
'name': 'Test Site 1',
@ -30,6 +150,10 @@ class ChangeLogTest(APITestCase):
'custom_fields': {
'my_field': 'ABC'
},
'tags': [
{'name': 'Tag 1'},
{'name': 'Tag 2'},
]
}
self.assertEqual(ObjectChange.objects.count(), 0)
url = reverse('dcim-api:site-list')
@ -39,13 +163,16 @@ class ChangeLogTest(APITestCase):
self.assertHttpStatus(response, status.HTTP_201_CREATED)
site = Site.objects.get(pk=response.data['id'])
oc = ObjectChange.objects.get(
# First OC is the creation; second is the tags update
oc_list = ObjectChange.objects.filter(
changed_object_type=ContentType.objects.get_for_model(Site),
changed_object_id=site.pk
)
self.assertEqual(oc.changed_object, site)
self.assertEqual(oc.action, ObjectChangeActionChoices.ACTION_CREATE)
self.assertEqual(oc.object_data['custom_fields'], data['custom_fields'])
).order_by('pk')
self.assertEqual(oc_list[0].changed_object, site)
self.assertEqual(oc_list[0].action, ObjectChangeActionChoices.ACTION_CREATE)
self.assertEqual(oc_list[0].object_data['custom_fields'], data['custom_fields'])
self.assertEqual(oc_list[1].action, ObjectChangeActionChoices.ACTION_UPDATE)
self.assertEqual(oc_list[1].object_data['tags'], ['Tag 1', 'Tag 2'])
def test_update_object(self):
site = Site(name='Test Site 1', slug='test-site-1')
@ -57,6 +184,9 @@ class ChangeLogTest(APITestCase):
'custom_fields': {
'my_field': 'DEF'
},
'tags': [
{'name': 'Tag 3'}
]
}
self.assertEqual(ObjectChange.objects.count(), 0)
self.add_permissions('dcim.change_site')
@ -66,13 +196,15 @@ class ChangeLogTest(APITestCase):
self.assertHttpStatus(response, status.HTTP_200_OK)
site = Site.objects.get(pk=response.data['id'])
oc = ObjectChange.objects.get(
# Get only the most recent OC
oc = ObjectChange.objects.filter(
changed_object_type=ContentType.objects.get_for_model(Site),
changed_object_id=site.pk
)
).first()
self.assertEqual(oc.changed_object, site)
self.assertEqual(oc.action, ObjectChangeActionChoices.ACTION_UPDATE)
self.assertEqual(oc.object_data['custom_fields'], data['custom_fields'])
self.assertEqual(oc.object_data['tags'], ['Tag 3'])
def test_delete_object(self):
site = Site(
@ -80,6 +212,7 @@ class ChangeLogTest(APITestCase):
slug='test-site-1'
)
site.save()
site.tags.set(*Tag.objects.all()[:2])
CustomFieldValue.objects.create(
field=CustomField.objects.get(name='my_field'),
obj=site,
@ -98,3 +231,4 @@ class ChangeLogTest(APITestCase):
self.assertEqual(oc.object_repr, site.name)
self.assertEqual(oc.action, ObjectChangeActionChoices.ACTION_DELETE)
self.assertEqual(oc.object_data['custom_fields'], {'my_field': 'ABC'})
self.assertEqual(oc.object_data['tags'], ['Tag 1', 'Tag 2'])

View File

@ -3,7 +3,6 @@ import collections
from django.db.models import Q
from django.utils.deconstruct import deconstructible
from taggit.managers import _TaggableManager
from utilities.querysets import DummyQuerySet
from extras.constants import EXTRAS_FEATURES
from extras.registry import registry
@ -16,9 +15,6 @@ def is_taggable(obj):
if hasattr(obj, 'tags'):
if issubclass(obj.tags.__class__, _TaggableManager):
return True
# TaggableManager has been replaced with a DummyQuerySet prior to object deletion
if isinstance(obj.tags, DummyQuerySet):
return True
return False

View File

@ -142,9 +142,9 @@ class APISelect(SelectWithDisabled):
values = json.loads(self.attrs.get(key, '[]'))
if type(value) is list:
values.extend(value)
values.extend([str(v) for v in value])
else:
values.append(value)
values.append(str(value))
self.attrs[key] = json.dumps(values)

View File

@ -3,20 +3,6 @@ from django.db.models import Q, QuerySet
from utilities.permissions import permission_is_exempt
class DummyQuerySet:
"""
A fake QuerySet that can be used to cache relationships to objects that have been deleted.
"""
def __init__(self, queryset):
self._cache = [obj for obj in queryset.all()]
def __iter__(self):
return iter(self._cache)
def all(self):
return self._cache
class RestrictedQuerySet(QuerySet):
def restrict(self, user, action='view'):

View File

@ -115,7 +115,7 @@ class TestCase(_TestCase):
"""
err_message = "Expected HTTP status {}; received {}: {}"
self.assertEqual(response.status_code, expected_status, err_message.format(
expected_status, response.status_code, getattr(response, 'data', 'No data')
expected_status, response.status_code, getattr(response, 'data', response.content)
))
def assertInstanceEqual(self, instance, data, api=False):

View File

@ -97,9 +97,10 @@ def serialize_object(obj, extra=None, exclude=None):
field: str(value) for field, value in obj.cf.items()
}
# Include any tags
# Include any tags. Check for tags cached on the instance; fall back to using the manager.
if is_taggable(obj):
data['tags'] = [tag.name for tag in obj.tags.all()]
tags = getattr(obj, '_tags', obj.tags.all())
data['tags'] = [tag.name for tag in tags]
# Append any extra data
if extra is not None:
@ -276,6 +277,13 @@ def flatten_dict(d, prefix='', separator='.'):
return ret
# Taken from django.utils.functional (<3.0)
def curry(_curried_func, *args, **kwargs):
def _curried(*moreargs, **morekwargs):
return _curried_func(*args, *moreargs, **{**kwargs, **morekwargs})
return _curried
#
# Fake request object
#
@ -305,5 +313,6 @@ def copy_safe_request(request):
'GET': request.GET,
'FILES': request.FILES,
'user': request.user,
'path': request.path
'path': request.path,
'id': getattr(request, 'id', None), # UUID assigned by middleware
})

View File

@ -949,6 +949,12 @@ class BulkEditView(GetReturnURLMixin, ObjectPermissionRequiredMixin, View):
elif form.cleaned_data[name] not in (None, ''):
setattr(obj, name, form.cleaned_data[name])
# Cache custom fields on instance prior to save()
if custom_fields:
obj._cf = {
name: form.cleaned_data[name] for name in custom_fields
}
obj.full_clean()
obj.save()
updated_objects.append(obj)