diff --git a/docker/configs/settings_local.py b/docker/configs/settings_local.py index e357ce3f73..1d4e6916b9 100644 --- a/docker/configs/settings_local.py +++ b/docker/configs/settings_local.py @@ -105,3 +105,7 @@ "ietf.api.red_api" : ["devtoken", "redtoken"], # Not a real secret "ietf.api.views_rpc" : ["devtoken"], # Not a real secret } + +# Errata system api configuration +ERRATA_METADATA_NOTIFICATION_URL = "http://host.docker.internal:8808/api/rfc_metadata_update/" +ERRATA_METADATA_NOTIFICATION_API_KEY = "not a real secret" diff --git a/ietf/api/serializers_rpc.py b/ietf/api/serializers_rpc.py index 34e2c791c0..d5f5363990 100644 --- a/ietf/api/serializers_rpc.py +++ b/ietf/api/serializers_rpc.py @@ -216,32 +216,24 @@ class Meta: read_only_fields = ["id", "name"] -class EditableRfcSerializer(serializers.ModelSerializer): - # Would be nice to reconcile this with ietf.doc.serializers.RfcSerializer. - # The purposes of that serializer (representing data for Red) and this one - # (accepting updates from Purple) are different enough that separate formats - # may be needed, but if not it'd be nice to have a single RfcSerializer that - # can serve both. - # - # For now, only handles authors - authors = RfcAuthorSerializer(many=True, min_length=1, source="rfcauthor_set") +def _update_authors(rfc, authors_data): + # Construct unsaved instances from validated author data + new_authors = [RfcAuthor(**authdata) for authdata in authors_data] + # Update the RFC with the new author set + with transaction.atomic(): + change_events = update_rfcauthors(rfc, new_authors) + for event in change_events: + event.save() + return change_events - class Meta: - model = Document - fields = ["id", "authors"] - def update(self, instance, validated_data): - assert isinstance(instance, Document) - authors_data = validated_data.pop("rfcauthor_set", None) - if authors_data is not None: - # Construct unsaved instances from validated author data - new_authors = [RfcAuthor(**ad) for ad in authors_data] - # Update the RFC with the new author set - with transaction.atomic(): - change_events = update_rfcauthors(instance, new_authors) - for event in change_events: - event.save() - return instance +class SubseriesNameField(serializers.RegexField): + + def __init__(self, **kwargs): + # pattern: no leading 0, finite length (arbitrarily set to 5 digits) + regex = r"^(bcp|std|fyi)[1-9][0-9]{0,4}$" + super().__init__(regex, **kwargs) + class RfcPubSerializer(serializers.ModelSerializer): @@ -283,13 +275,7 @@ class RfcPubSerializer(serializers.ModelSerializer): slug_field="rfc_number", queryset=Document.objects.filter(type_id="rfc"), ) - subseries = serializers.ListField( - child=serializers.RegexField( - required=False, - # pattern: no leading 0, finite length (arbitrarily set to 5 digits) - regex=r"^(bcp|std|fyi)[1-9][0-9]{0,4}$", - ) - ) + subseries = serializers.ListField(child=SubseriesNameField(required=False)) # N.b., authors is _not_ a field on Document! authors = RfcAuthorSerializer(many=True) @@ -327,6 +313,9 @@ def validate(self, data): ) return data + def update(self, instance, validated_data): + raise RuntimeError("Cannot update with this serializer") + def create(self, validated_data): """Publish an RFC""" published = validated_data.pop("published") @@ -515,6 +504,182 @@ def _create_rfc(self, validated_data): return rfc +class EditableRfcSerializer(serializers.ModelSerializer): + # Would be nice to reconcile this with ietf.doc.serializers.RfcSerializer. + # The purposes of that serializer (representing data for Red) and this one + # (accepting updates from Purple) are different enough that separate formats + # may be needed, but if not it'd be nice to have a single RfcSerializer that + # can serve both. + # + # Should also consider whether this and RfcPubSerializer should merge. + # + # Treats published and subseries fields as write-only. This isn't quite correct, + # but makes it easier and we don't currently use the serialized value except for + # debugging. + published = serializers.DateTimeField( + default_timezone=datetime.timezone.utc, + write_only=True, + ) + authors = RfcAuthorSerializer(many=True, min_length=1, source="rfcauthor_set") + subseries = serializers.ListField( + child=SubseriesNameField(required=False), + write_only=True, + ) + + class Meta: + model = Document + fields = [ + "published", + "title", + "authors", + "stream", + "abstract", + "pages", + "std_level", + "subseries", + ] + + def create(self, validated_data): + raise RuntimeError("Cannot create with this serializer") + + def update(self, instance, validated_data): + assert isinstance(instance, Document) + assert instance.type_id == "rfc" + rfc = instance # get better name + + system_person = Person.objects.get(name="(System)") + + # Remove data that needs special handling. Use a singleton object to detect + # missing values in case we ever support a value that needs None as an option. + omitted = object() + published = validated_data.pop("published", omitted) + subseries = validated_data.pop("subseries", omitted) + authors_data = validated_data.pop("rfcauthor_set", omitted) + + # Transaction to clean up if something fails + with transaction.atomic(): + # update the rfc Document itself + rfc_changes = [] + rfc_events = [] + + for attr, new_value in validated_data.items(): + old_value = getattr(rfc, attr) + if new_value != old_value: + rfc_changes.append( + f"changed {attr} to '{new_value}' from '{old_value}'" + ) + setattr(rfc, attr, new_value) + if len(rfc_changes) > 0: + rfc_change_summary = f"{', '.join(rfc_changes)}" + rfc_events.append( + DocEvent.objects.create( + doc=rfc, + rev=rfc.rev, + by=system_person, + type="sync_from_rfc_editor", + desc=f"Changed metadata: {rfc_change_summary}", + ) + ) + if authors_data is not omitted: + rfc_events.extend(_update_authors(instance, authors_data)) + + if published is not omitted: + published_event = rfc.latest_event(type="published_rfc") + if published_event is None: + # unexpected, but possible in theory + rfc_events.append( + DocEvent.objects.create( + doc=rfc, + rev=rfc.rev, + type="published_rfc", + time=published, + by=system_person, + desc="RFC published", + ) + ) + rfc_events.append( + DocEvent.objects.create( + doc=rfc, + rev=rfc.rev, + type="sync_from_rfc_editor", + by=system_person, + desc=( + f"Set publication timestamp to {published.isoformat()}" + ), + ) + ) + else: + original_pub_time = published_event.time + if published != original_pub_time: + published_event.time = published + published_event.save() + rfc_events.append( + DocEvent.objects.create( + doc=rfc, + rev=rfc.rev, + type="sync_from_rfc_editor", + by=system_person, + desc=( + f"Changed publication time to " + f"{published.isoformat()} from " + f"{original_pub_time.isoformat()}" + ) + ) + ) + + # update subseries relations + if subseries is not omitted: + for subseries_doc_name in subseries: + ss_slug = subseries_doc_name[:3] + subseries_doc, ss_doc_created = Document.objects.get_or_create( + type_id=ss_slug, name=subseries_doc_name + ) + if ss_doc_created: + subseries_doc.docevent_set.create( + type=f"{ss_slug}_doc_created", + by=system_person, + desc=f"Created {subseries_doc_name} via update of {rfc.name}", + ) + _, ss_rel_created = subseries_doc.relateddocument_set.get_or_create( + relationship_id="contains", target=rfc + ) + if ss_rel_created: + subseries_doc.docevent_set.create( + type="sync_from_rfc_editor", + by=system_person, + desc=f"Added {rfc.name} to {subseries_doc.name}", + ) + rfc_events.append( + rfc.docevent_set.create( + type="sync_from_rfc_editor", + by=system_person, + desc=f"Added {rfc.name} to {subseries_doc.name}", + ) + ) + # Delete subseries relations that are no longer current + stale_subseries_relations = rfc.relations_that("contains").exclude( + source__name__in=subseries + ) + for stale_relation in stale_subseries_relations: + stale_subseries_doc = stale_relation.source + rfc_events.append( + rfc.docevent_set.create( + type="sync_from_rfc_editor", + by=system_person, + desc=f"Removed {rfc.name} from {stale_subseries_doc.name}", + ) + ) + stale_subseries_doc.docevent_set.create( + type="sync_from_rfc_editor", + by=system_person, + desc=f"Removed {rfc.name} from {stale_subseries_doc.name}", + ) + stale_subseries_relations.delete() + if len(rfc_events) > 0: + rfc.save_with_history(rfc_events) + return rfc + + class RfcFileSerializer(serializers.Serializer): # The structure of this serializer is constrained by what openapi-generator-cli's # python generator can correctly serialize as multipart/form-data. It does not diff --git a/ietf/api/tests_serializers_rpc.py b/ietf/api/tests_serializers_rpc.py new file mode 100644 index 0000000000..1babb4c30f --- /dev/null +++ b/ietf/api/tests_serializers_rpc.py @@ -0,0 +1,139 @@ +# Copyright The IETF Trust 2026, All Rights Reserved +from django.utils import timezone + +from ietf.utils.test_utils import TestCase +from ietf.doc.models import Document +from ietf.doc.factories import WgRfcFactory +from .serializers_rpc import EditableRfcSerializer + + +class EditableRfcSerializerTests(TestCase): + def test_create(self): + serializer = EditableRfcSerializer( + data={ + "published": timezone.now(), + "title": "Yadda yadda yadda", + "authors": [ + { + "titlepage_name": "B. Fett", + "is_editor": False, + "affiliation": "DBA Galactic Empire", + "country": "", + }, + ], + "stream": "ietf", + "abstract": "A long time ago in a galaxy far, far away...", + "pages": 3, + "std_level": "inf", + "subseries": ["fyi999"], + } + ) + self.assertTrue(serializer.is_valid()) + with self.assertRaises(RuntimeError, msg="serializer does not allow create()"): + serializer.save() + + def test_update(self): + rfc = WgRfcFactory(pages=10) + serializer = EditableRfcSerializer( + instance=rfc, + data={ + "published": timezone.now(), + "title": "Yadda yadda yadda", + "authors": [ + { + "titlepage_name": "B. Fett", + "is_editor": False, + "affiliation": "DBA Galactic Empire", + "country": "", + }, + ], + "stream": "ise", + "abstract": "A long time ago in a galaxy far, far away...", + "pages": 3, + "std_level": "inf", + "subseries": ["fyi999"], + }, + ) + self.assertTrue(serializer.is_valid()) + result = serializer.save() + result.refresh_from_db() + self.assertEqual(result.title, "Yadda yadda yadda") + self.assertEqual( + list( + result.rfcauthor_set.values( + "titlepage_name", "is_editor", "affiliation", "country" + ) + ), + [ + { + "titlepage_name": "B. Fett", + "is_editor": False, + "affiliation": "DBA Galactic Empire", + "country": "", + }, + ], + ) + self.assertEqual(result.stream_id, "ise") + self.assertEqual( + result.abstract, "A long time ago in a galaxy far, far away..." + ) + self.assertEqual(result.pages, 3) + self.assertEqual(result.std_level_id, "inf") + self.assertEqual( + result.part_of(), + [Document.objects.get(name="fyi999")], + ) + + def test_partial_update(self): + # We could test other permutations of fields, but authors is a partial update + # we know we are going to use, so verifying that one in particular. + rfc = WgRfcFactory(pages=10, abstract="do or do not", title="padawan") + serializer = EditableRfcSerializer( + partial=True, + instance=rfc, + data={ + "authors": [ + { + "titlepage_name": "B. Fett", + "is_editor": False, + "affiliation": "DBA Galactic Empire", + "country": "", + }, + ], + }, + ) + self.assertTrue(serializer.is_valid()) + result = serializer.save() + result.refresh_from_db() + self.assertEqual(rfc.title, "padawan") + self.assertEqual( + list( + result.rfcauthor_set.values( + "titlepage_name", "is_editor", "affiliation", "country" + ) + ), + [ + { + "titlepage_name": "B. Fett", + "is_editor": False, + "affiliation": "DBA Galactic Empire", + "country": "", + }, + ], + ) + self.assertEqual(result.stream_id, "ietf") + self.assertEqual(result.abstract, "do or do not") + self.assertEqual(result.pages, 10) + self.assertEqual(result.std_level_id, "ps") + self.assertEqual(result.part_of(), []) + + # Test only a field on the Document itself to be sure that it works + serializer = EditableRfcSerializer( + partial=True, + instance=rfc, + data={"title": "jedi master"}, + ) + self.assertTrue(serializer.is_valid()) + result = serializer.save() + result.refresh_from_db() + self.assertEqual(rfc.title, "jedi master") diff --git a/ietf/api/tests_views_rpc.py b/ietf/api/tests_views_rpc.py index 1fbb4c3f02..6a5a5c9b88 100644 --- a/ietf/api/tests_views_rpc.py +++ b/ietf/api/tests_views_rpc.py @@ -9,9 +9,10 @@ from django.db.models.functions import Coalesce from django.test.utils import override_settings from django.urls import reverse as urlreverse +import mock from ietf.blobdb.models import Blob -from ietf.doc.factories import IndividualDraftFactory, WgDraftFactory, WgRfcFactory +from ietf.doc.factories import IndividualDraftFactory, RfcFactory, WgDraftFactory, WgRfcFactory from ietf.doc.models import RelatedDocument, Document from ietf.group.factories import RoleFactory, GroupFactory from ietf.person.factories import PersonFactory @@ -77,7 +78,8 @@ def test_draftviewset_references(self): self.assertEqual(refs[0]["name"], draft_bar.name) @override_settings(APP_API_TOKENS={"ietf.api.views_rpc": ["valid-token"]}) - def test_notify_rfc_published(self): + @mock.patch("ietf.doc.tasks.signal_update_rfc_metadata_task.delay") + def test_notify_rfc_published(self, mock_task_delay): url = urlreverse("ietf.api.purple_api.notify_rfc_published") area = GroupFactory(type_id="area") rfc_group = GroupFactory(type_id="wg") @@ -90,6 +92,8 @@ def test_notify_rfc_published(self): ) rfc_stream_id = "ise" assert isinstance(draft, Document), "WgDraftFactory should generate a Document" + updates = RfcFactory.create_batch(2) + obsoletes = RfcFactory.create_batch(2) unused_rfc_number = ( Document.objects.filter(rfc_number__isnull=False).aggregate( unused_rfc_number=Max("rfc_number") + 1 @@ -120,8 +124,8 @@ def test_notify_rfc_published(self): "pages": draft.pages + 10, "std_level": "ps", "ad": rfc_ad.pk, - "obsoletes": [], - "updates": [], + "obsoletes": [o.rfc_number for o in obsoletes], + "updates": [o.rfc_number for o in updates], "subseries": [], } r = self.client.post(url, data=post_data, format="json") @@ -172,13 +176,25 @@ def test_notify_rfc_published(self): self.assertEqual(rfc.pages, draft.pages + 10) self.assertEqual(rfc.std_level_id, "ps") self.assertEqual(rfc.ad, rfc_ad) - self.assertEqual(rfc.related_that_doc("obs"), []) - self.assertEqual(rfc.related_that_doc("updates"), []) + self.assertEqual(set(rfc.related_that_doc("obs")), set([o for o in obsoletes])) + self.assertEqual( + set(rfc.related_that_doc("updates")), set([o for o in updates]) + ) self.assertEqual(rfc.part_of(), []) self.assertEqual(draft.get_state().slug, "rfc") # todo test non-empty relationships # todo test references (when updating that is part of the handling) + self.assertTrue(mock_task_delay.called) + mock_args, mock_kwargs = mock_task_delay.call_args + self.assertIn("rfc_number_list", mock_kwargs) + expected_rfc_number_list = [rfc.rfc_number] + expected_rfc_number_list.extend( + [d.rfc_number for d in updates + obsoletes] + ) + expected_rfc_number_list = sorted(set(expected_rfc_number_list)) + self.assertEqual(mock_kwargs["rfc_number_list"], expected_rfc_number_list) + @override_settings(APP_API_TOKENS={"ietf.api.views_rpc": ["valid-token"]}) def test_upload_rfc_files(self): def _valid_post_data(): diff --git a/ietf/api/views_rpc.py b/ietf/api/views_rpc.py index 2bf16480f2..8862bbf866 100644 --- a/ietf/api/views_rpc.py +++ b/ietf/api/views_rpc.py @@ -35,9 +35,10 @@ NotificationAckSerializer, RfcPubSerializer, RfcFileSerializer, EditableRfcSerializer, ) -from ietf.doc.models import Document, DocHistory, RfcAuthor +from ietf.doc.models import Document, DocHistory, RfcAuthor, DocEvent from ietf.doc.serializers import RfcAuthorSerializer from ietf.doc.storage_utils import remove_from_storage, store_file, exists_in_storage +from ietf.doc.tasks import signal_update_rfc_metadata_task from ietf.person.models import Email, Person @@ -278,6 +279,16 @@ class RfcViewSet(mixins.UpdateModelMixin, viewsets.GenericViewSet): lookup_field = "rfc_number" serializer_class = EditableRfcSerializer + def perform_update(self, serializer): + DocEvent.objects.create( + doc=serializer.instance, + rev=serializer.instance.rev, + by=Person.objects.get(name="(System)"), + type="sync_from_rfc_editor", + desc="Metadata update from RFC Editor", + ) + super().perform_update(serializer) + @action(detail=False, serializer_class=OriginalStreamSerializer) def rfc_original_stream(self, request): rfcs = self.get_queryset().annotate( @@ -362,7 +373,7 @@ def post(self, request): serializer.is_valid(raise_exception=True) # Create RFC try: - serializer.save() + rfc = serializer.save() except IntegrityError as err: if Document.objects.filter( rfc_number=serializer.validated_data["rfc_number"] @@ -375,6 +386,12 @@ def post(self, request): f"Unable to publish: {err}", code="unknown-integrity-error", ) + rfc_number_list = [rfc.rfc_number] + rfc_number_list.extend( + [d.rfc_number for d in rfc.related_that_doc(("updates", "obs"))] + ) + rfc_number_list = sorted(set(rfc_number_list)) + signal_update_rfc_metadata_task.delay(rfc_number_list=rfc_number_list) return Response(NotificationAckSerializer().data) diff --git a/ietf/doc/serializers.py b/ietf/doc/serializers.py index 36076c30be..a7ea640be8 100644 --- a/ietf/doc/serializers.py +++ b/ietf/doc/serializers.py @@ -27,6 +27,7 @@ class RfcAuthorSerializer(serializers.ModelSerializer): source="person.get_absolute_url", required=False, help_text="URL for person link (relative to datatracker base URL)", + read_only=True, ) class Meta: diff --git a/ietf/doc/tasks.py b/ietf/doc/tasks.py index b463b9cecf..90f4c80af5 100644 --- a/ietf/doc/tasks.py +++ b/ietf/doc/tasks.py @@ -35,6 +35,7 @@ investigate_fragment, ) from .utils_bofreq import fixup_bofreq_timestamps +from .utils_errata import signal_update_rfc_metadata @shared_task @@ -155,3 +156,7 @@ def rebuild_reference_relations_task(doc_names: list[str]): @shared_task def fixup_bofreq_timestamps_task(): # pragma: nocover fixup_bofreq_timestamps() + +@shared_task +def signal_update_rfc_metadata_task(rfc_number_list=()): + signal_update_rfc_metadata(rfc_number_list) diff --git a/ietf/doc/utils.py b/ietf/doc/utils.py index 42fab7d472..396b3fcfa4 100644 --- a/ietf/doc/utils.py +++ b/ietf/doc/utils.py @@ -740,14 +740,26 @@ def _rfcauthor_from_documentauthor(docauthor: DocumentAuthor) -> RfcAuthor: new_author.document = rfc new_author.order = order + 1 new_author.save() - changes.append(f'Added "{new_author.titlepage_name}" as author') + if new_author.person_id is not None: + person_desc = f"Person {new_author.person_id}" + else: + person_desc = "no Person linked" + changes.append( + f'Added "{new_author.titlepage_name}" ({person_desc}) as author' + ) # Any authors left in original_authors are no longer in the list, so remove them for removed_author in original_authors: # Skip actual removal of old authors if we are converting from the # DocumentAuthor models - the original_authors were just stand-ins anyway. if not converting_from_docauthors: removed_author.delete() - changes.append(f'Removed "{removed_author.titlepage_name}" as author') + if removed_author.person_id is not None: + person_desc = f"Person {removed_author.person_id}" + else: + person_desc = "no Person linked" + changes.append( + f'Removed "{removed_author.titlepage_name}" ({person_desc}) as author' + ) # Create DocEvents, but leave it up to caller to save if by is None: by = Person.objects.get(name="(System)") diff --git a/ietf/doc/utils_errata.py b/ietf/doc/utils_errata.py new file mode 100644 index 0000000000..539262151f --- /dev/null +++ b/ietf/doc/utils_errata.py @@ -0,0 +1,35 @@ +# Copyright The IETF Trust 2026, All Rights Reserved + +import requests + +from django.conf import settings + +from ietf.utils.log import log + + +def signal_update_rfc_metadata(rfc_number_list=()): + key = getattr(settings, "ERRATA_METADATA_NOTIFICATION_API_KEY", None) + if key is not None: + headers = {"X-Api-Key": settings.ERRATA_METADATA_NOTIFICATION_API_KEY} + post_dict = { + "rfc_number_list": list(rfc_number_list), + } + try: + response = requests.post( + settings.ERRATA_METADATA_NOTIFICATION_URL, + headers=headers, + json=post_dict, + timeout=settings.DEFAULT_REQUESTS_TIMEOUT, + ) + except requests.Timeout as e: + log( + f"POST request timed out for {settings.ERRATA_METADATA_NOTIFICATION_URL} ]: {e}" + ) + # raise RuntimeError(f'POST request timed out for {settings.ERRATA_METADATA_NOTIFICATION_URL}') from e + return + if response.status_code != 200: + log( + f"POST request failed for {settings.ERRATA_METADATA_NOTIFICATION_URL} ]: {response.status_code} {response.text}" + ) + else: + log("No API key configured for errata metadata notification, skipping") diff --git a/ietf/settings.py b/ietf/settings.py index 565e8825a9..71b110d762 100644 --- a/ietf/settings.py +++ b/ietf/settings.py @@ -1368,6 +1368,11 @@ def skip_unreadable_post(record): MEETECHO_AUDIO_STREAM_URL = "https://mp3.conf.meetecho.com/ietf{session.meeting.number}/{session.pk}.m3u" MEETECHO_SESSION_RECORDING_URL = "https://meetecho-player.ietf.org/playout/?session={session_label}" +# Errata system api configuration +# settings should provide +# ERRATA_METADATA_NOTIFICATION_URL +# ERRATA_METADATA_NOTIFICATION_API_KEY + # Put the production SECRET_KEY in settings_local.py, and also any other # sensitive or site-specific changes. DO NOT commit settings_local.py to svn. from ietf.settings_local import * # pyflakes:ignore pylint: disable=wildcard-import diff --git a/ietf/sync/tests.py b/ietf/sync/tests.py index bcc87a43aa..21d6cb5cd5 100644 --- a/ietf/sync/tests.py +++ b/ietf/sync/tests.py @@ -301,6 +301,7 @@ def test_rfc_index(self): ad=Person.objects.get(user__username='ad'), external_url="http://my-external-url.example.com", note="this is a note", + pages=54, # make sure this is not 42 ) DocumentAuthorFactory.create_batch(2, document=draft_doc) draft_doc.action_holders.add(draft_doc.ad) # not normally set, but add to be sure it's cleared