From 9c6fa92b7eed8b29bea96cb73148dba130678e63 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Tue, 27 Jan 2026 11:34:17 -0400 Subject: [PATCH 001/102] fix: update RPC publish API fields (#10308) * fix: update purple publish API fields * fix: handle IntegrityError more cleanly * fix: don't import RFC fields from draft * test: update test * chore: remove unused var/import * fix: f-string -> string --- ietf/api/serializers_rpc.py | 29 ++---------------------- ietf/api/tests_views_rpc.py | 44 ++++++++++++++++++------------------- ietf/api/views_rpc.py | 16 +++++++++++++- 3 files changed, 39 insertions(+), 50 deletions(-) diff --git a/ietf/api/serializers_rpc.py b/ietf/api/serializers_rpc.py index fe7f609251..440c2a73d4 100644 --- a/ietf/api/serializers_rpc.py +++ b/ietf/api/serializers_rpc.py @@ -27,7 +27,7 @@ update_rfcauthors, ) from ietf.group.models import Group -from ietf.name.models import StreamName, StdLevelName, FormalLanguageName +from ietf.name.models import StreamName, StdLevelName from ietf.person.models import Person from ietf.utils import log @@ -137,7 +137,6 @@ class Meta: "pages", "source_format", "authors", - "shepherd", "intended_std_level", "consensus", "shepherd", @@ -263,15 +262,6 @@ class RfcPubSerializer(serializers.ModelSerializer): stream = serializers.PrimaryKeyRelatedField( queryset=StreamName.objects.filter(used=True) ) - formal_languages = serializers.PrimaryKeyRelatedField( - many=True, - required=False, - queryset=FormalLanguageName.objects.filter(used=True), - help_text=( - "formal languages used in RFC (defaults to those from draft, send empty" - "list to override)" - ) - ) std_level = serializers.PrimaryKeyRelatedField( queryset=StdLevelName.objects.filter(used=True), ) @@ -315,11 +305,8 @@ class Meta: "stream", "abstract", "pages", - "words", - "formal_languages", "std_level", "ad", - "note", "obsoletes", "updates", "subseries", @@ -353,9 +340,6 @@ def create(self, validated_data): # If specified, retrieve draft and extract RFC default values from it if draft_name is None: draft = None - defaults_from_draft = { - "group": Group.objects.get(acronym="none", type_id="individ"), - } else: # validation enforces that draft_name and draft_rev are both present draft = Document.objects.filter( @@ -378,17 +362,11 @@ def create(self, validated_data): }, code="already-published-draft", ) - defaults_from_draft = { - "ad": draft.ad, - "formal_languages": draft.formal_languages.all(), - "group": draft.group, - "note": draft.note, - } # Transaction to clean up if something fails with transaction.atomic(): # create rfc, letting validated request data override draft defaults - rfc = self._create_rfc(defaults_from_draft | validated_data) + rfc = self._create_rfc(validated_data) DocEvent.objects.create( doc=rfc, rev=rfc.rev, @@ -523,14 +501,11 @@ def create(self, validated_data): def _create_rfc(self, validated_data): authors_data = validated_data.pop("authors") - formal_languages = validated_data.pop("formal_languages", []) - # todo ad field rfc = Document.objects.create( type_id="rfc", name=f"rfc{validated_data['rfc_number']}", **validated_data, ) - rfc.formal_languages.set(formal_languages) # list of PKs is ok for order, author_data in enumerate(authors_data): rfc.rfcauthor_set.create( order=order, diff --git a/ietf/api/tests_views_rpc.py b/ietf/api/tests_views_rpc.py index ecb50ee76c..09fb40bf6e 100644 --- a/ietf/api/tests_views_rpc.py +++ b/ietf/api/tests_views_rpc.py @@ -80,9 +80,15 @@ def test_draftviewset_references(self): def test_notify_rfc_published(self): url = urlreverse("ietf.api.purple_api.notify_rfc_published") area = GroupFactory(type_id="area") + rfc_group = GroupFactory(type_id="wg") draft_ad = RoleFactory(group=area, name_id="ad").person - authors = PersonFactory.create_batch(2) - draft = WgDraftFactory(group__parent=area, authors=authors) + rfc_ad = PersonFactory() + draft_authors = PersonFactory.create_batch(2) + rfc_authors = PersonFactory.create_batch(3) + draft = WgDraftFactory( + group__parent=area, authors=draft_authors, ad=draft_ad, stream_id="ietf" + ) + rfc_stream_id = "ise" assert isinstance(draft, Document), "WgDraftFactory should generate a Document" unused_rfc_number = ( Document.objects.filter(rfc_number__isnull=False).aggregate( @@ -96,7 +102,7 @@ def test_notify_rfc_published(self): "draft_name": draft.name, "draft_rev": draft.rev, "rfc_number": unused_rfc_number, - "title": draft.title, + "title": "RFC " + draft.title, "authors": [ { "titlepage_name": f"titlepage {author.name}", @@ -106,17 +112,14 @@ def test_notify_rfc_published(self): "affiliation": "Some Affiliation", "country": "CA", } - for author in authors + for author in rfc_authors ], - "group": draft.group.acronym, - "stream": draft.stream_id, - "abstract": draft.abstract, - "pages": draft.pages, - "words": draft.pages * 250, - "formal_languages": [], + "group": rfc_group.acronym, + "stream": rfc_stream_id, + "abstract": "RFC version of " + draft.abstract, + "pages": draft.pages + 10, "std_level": "ps", - "ad": draft_ad.pk, - "note": "noted", + "ad": rfc_ad.pk, "obsoletes": [], "updates": [], "subseries": [], @@ -137,7 +140,7 @@ def test_notify_rfc_published(self): ).count(), 1, ) - self.assertEqual(rfc.title, draft.title) + self.assertEqual(rfc.title, "RFC " + draft.title) self.assertEqual(rfc.documentauthor_set.count(), 0) self.assertEqual( list( @@ -159,18 +162,15 @@ def test_notify_rfc_published(self): "affiliation": "Some Affiliation", "country": "CA", } - for author in authors + for author in rfc_authors ], ) - self.assertEqual(rfc.group, draft.group) - self.assertEqual(rfc.stream, draft.stream) - self.assertEqual(rfc.abstract, draft.abstract) - self.assertEqual(rfc.pages, draft.pages) - self.assertEqual(rfc.words, draft.pages * 250) - self.assertEqual(rfc.formal_languages.count(), 0) + self.assertEqual(rfc.group, rfc_group) + self.assertEqual(rfc.stream_id, rfc_stream_id) + self.assertEqual(rfc.abstract, "RFC version of " + draft.abstract) + self.assertEqual(rfc.pages, draft.pages + 10) self.assertEqual(rfc.std_level_id, "ps") - self.assertEqual(rfc.ad, draft_ad) - self.assertEqual(rfc.note, "noted") + self.assertEqual(rfc.ad, rfc_ad) self.assertEqual(rfc.related_that_doc("obs"), []) self.assertEqual(rfc.related_that_doc("updates"), []) self.assertEqual(rfc.part_of(), []) diff --git a/ietf/api/views_rpc.py b/ietf/api/views_rpc.py index ea9c6348ca..6b1799f654 100644 --- a/ietf/api/views_rpc.py +++ b/ietf/api/views_rpc.py @@ -5,6 +5,7 @@ from tempfile import TemporaryDirectory from django.conf import settings +from django.db import IntegrityError from drf_spectacular.utils import OpenApiParameter from rest_framework import mixins, parsers, serializers, viewsets, status from rest_framework.decorators import action @@ -360,7 +361,20 @@ def post(self, request): serializer = RfcPubSerializer(data=request.data) serializer.is_valid(raise_exception=True) # Create RFC - serializer.save() + try: + serializer.save() + except IntegrityError as err: + if Document.objects.filter( + rfc_number=serializer.validated_data["rfc_number"] + ): + raise serializers.ValidationError( + "RFC with that number already exists", + code="rfc-number-in-use", + ) + raise serializers.ValidationError( + f"Unable to publish: {err}", + code="unknown-integrity-error", + ) return Response(NotificationAckSerializer().data) From 33fe0bcb7cafadc2096bfd1386c3d7e8a6a915f5 Mon Sep 17 00:00:00 2001 From: Rudi Matz Date: Wed, 28 Jan 2026 13:47:48 -0500 Subject: [PATCH 002/102] feat: add consensus in Draft serializer (#10327) --- ietf/api/serializers_rpc.py | 1 + 1 file changed, 1 insertion(+) diff --git a/ietf/api/serializers_rpc.py b/ietf/api/serializers_rpc.py index 440c2a73d4..34e2c791c0 100644 --- a/ietf/api/serializers_rpc.py +++ b/ietf/api/serializers_rpc.py @@ -175,6 +175,7 @@ class Meta: "pages", "source_format", "authors", + "consensus", ] From a174f43574c5ed4f20ddcc0d600d03e5156fb351 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Fri, 30 Jan 2026 14:49:47 -0400 Subject: [PATCH 003/102] fix: use current time for bofreq revisions (#10333) * fix: use current time for bofreq revisions * test: test time handling Adjusts assertion argument order to match our usual style --- ietf/doc/tests_bofreq.py | 19 ++++++++++++------- ietf/doc/views_bofreq.py | 1 - 2 files changed, 12 insertions(+), 8 deletions(-) diff --git a/ietf/doc/tests_bofreq.py b/ietf/doc/tests_bofreq.py index 6a7c9393ef..6b142149be 100644 --- a/ietf/doc/tests_bofreq.py +++ b/ietf/doc/tests_bofreq.py @@ -307,17 +307,20 @@ def test_submit(self): url = urlreverse('ietf.doc.views_bofreq.submit', kwargs=dict(name=doc.name)) rev = doc.rev + doc_time = doc.time r = self.client.post(url,{'bofreq_submission':'enter','bofreq_content':'# oiwefrase'}) self.assertEqual(r.status_code, 302) doc = reload_db_objects(doc) - self.assertEqual(rev, doc.rev) + self.assertEqual(doc.rev, rev) + self.assertEqual(doc.time, doc_time) nobody = PersonFactory() self.client.login(username=nobody.user.username, password=nobody.user.username+'+password') r = self.client.post(url,{'bofreq_submission':'enter','bofreq_content':'# oiwefrase'}) self.assertEqual(r.status_code, 403) doc = reload_db_objects(doc) - self.assertEqual(rev, doc.rev) + self.assertEqual(doc.rev, rev) + self.assertEqual(doc.time, doc_time) self.client.logout() editor = bofreq_editors(doc).first() @@ -339,12 +342,14 @@ def test_submit(self): r = self.client.post(url, postdict) self.assertEqual(r.status_code, 302) doc = reload_db_objects(doc) - self.assertEqual('%02d'%(int(rev)+1) ,doc.rev) - self.assertEqual(f'# {username}', doc.text()) - self.assertEqual(f'# {username}', retrieve_str('bofreq',doc.get_base_name())) - self.assertEqual(docevent_count+1, doc.docevent_set.count()) - self.assertEqual(1, len(outbox)) + self.assertEqual(doc.rev, '%02d'%(int(rev)+1)) + self.assertGreater(doc.time, doc_time) + self.assertEqual(doc.text(), f'# {username}') + self.assertEqual(retrieve_str('bofreq', doc.get_base_name()), f'# {username}') + self.assertEqual(doc.docevent_set.count(), docevent_count+1) + self.assertEqual(len(outbox), 1) rev = doc.rev + doc_time = doc.time finally: os.unlink(file.name) diff --git a/ietf/doc/views_bofreq.py b/ietf/doc/views_bofreq.py index 71cbe30491..94e3960dfa 100644 --- a/ietf/doc/views_bofreq.py +++ b/ietf/doc/views_bofreq.py @@ -91,7 +91,6 @@ def submit(request, name): by=request.user.person, rev=bofreq.rev, desc='New revision available', - time=bofreq.time, ) bofreq.save_with_history([e]) bofreq_submission = form.cleaned_data['bofreq_submission'] From 2dbe61e891752245447f4c2774353151f0bb34e5 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Mon, 9 Feb 2026 14:43:58 -0400 Subject: [PATCH 004/102] feat: speed up agenda.ics; cache more agenda data (#10362) * chore(dev): cprofile middleware settings * feat: precomputed agenda.ics (wip) * feat: precomp filtering support * fix: separately cache versioned hrefs Fixes https://github.com/ietf-tools/datatracker/issues/10355 * fix: versionless agenda href in agenda / ical * fix: preserve RESCHEDULED output * fix: fixup to agree with original output * feat: separate agenda cache, cache old meetings * feat: agenda refresh tasks * chore: undo accidental commit * chore: remove debug parameter * fix: convert session ID to int for comparison * test: update/fix tests, rename new task * refactor: rename task to have _task suffix Also changes a log msg so it won't contain "None" awkwardly * feat: no exceptions from agenda_data_refresh_task * test: explanatory comment * ci: agenda cache for k8s / testcrawl --- ietf/doc/models.py | 16 ++- ietf/meeting/tasks.py | 92 +++++++++++--- ietf/meeting/tests_tasks.py | 64 ++++++++-- ietf/meeting/views.py | 247 +++++++++++++++++++++++++++++++----- ietf/settings.py | 37 +++++- ietf/settings_testcrawl.py | 3 + k8s/settings_local.py | 10 ++ 7 files changed, 396 insertions(+), 73 deletions(-) diff --git a/ietf/doc/models.py b/ietf/doc/models.py index cce9203d09..463aa6fd97 100644 --- a/ietf/doc/models.py +++ b/ietf/doc/models.py @@ -239,14 +239,14 @@ def revisions_by_newrevisionevent(self): return revisions def get_href(self, meeting=None): - return self._get_ref(meeting=meeting,meeting_doc_refs=settings.MEETING_DOC_HREFS) + return self._get_ref(meeting=meeting, versioned=True) def get_versionless_href(self, meeting=None): - return self._get_ref(meeting=meeting,meeting_doc_refs=settings.MEETING_DOC_GREFS) + return self._get_ref(meeting=meeting, versioned=False) - def _get_ref(self, meeting=None, meeting_doc_refs=settings.MEETING_DOC_HREFS): + def _get_ref(self, meeting=None, versioned=True): """ Returns an url to the document text. This differs from .get_absolute_url(), which returns an url to the datatracker page for the document. @@ -255,12 +255,16 @@ def _get_ref(self, meeting=None, meeting_doc_refs=settings.MEETING_DOC_HREFS): # the earlier resolution order, but there's at the moment one single # instance which matches this (with correct results), so we won't # break things all over the place. - if not hasattr(self, '_cached_href'): + cache_attr = "_cached_href" if versioned else "_cached_versionless_href" + if not hasattr(self, cache_attr): validator = URLValidator() if self.external_url and self.external_url.split(':')[0] in validator.schemes: validator(self.external_url) return self.external_url + meeting_doc_refs = ( + settings.MEETING_DOC_HREFS if versioned else settings.MEETING_DOC_GREFS + ) if self.type_id in settings.DOC_HREFS and self.type_id in meeting_doc_refs: if self.meeting_related(): self.is_meeting_related = True @@ -312,8 +316,8 @@ def _get_ref(self, meeting=None, meeting_doc_refs=settings.MEETING_DOC_HREFS): if href.startswith('/'): href = settings.IDTRACKER_BASE_URL + href - self._cached_href = href - return self._cached_href + setattr(self, cache_attr, href) + return getattr(self, cache_attr) def set_state(self, state): """Switch state type implicit in state to state. This just diff --git a/ietf/meeting/tasks.py b/ietf/meeting/tasks.py index c361325f9a..a73763560b 100644 --- a/ietf/meeting/tasks.py +++ b/ietf/meeting/tasks.py @@ -1,11 +1,14 @@ -# Copyright The IETF Trust 2024-2025, All Rights Reserved +# Copyright The IETF Trust 2024-2026, All Rights Reserved # # Celery task definitions # import datetime -from celery import shared_task -# from django.db.models import QuerySet +from itertools import batched + +from celery import shared_task, chain +from django.db.models import IntegerField +from django.db.models.functions import Cast from django.utils import timezone from ietf.utils import log @@ -19,9 +22,56 @@ from .utils import fetch_attendance_from_meetings +@shared_task +def agenda_data_refresh_task(num=None): + """Refresh agenda data for one plenary meeting + + If `num` is `None`, refreshes data for the current meeting. + """ + log.log( + f"Refreshing agenda data for {f"IETF-{num}" if num else "current IETF meeting"}" + ) + try: + generate_agenda_data(num, force_refresh=True) + except Exception as err: + # Log and swallow exceptions so failure on one meeting won't break a chain of + # tasks. This is used by agenda_data_refresh_all_task(). + log.log(f"ERROR: Refreshing agenda data failed for num={num}: {err}") + + @shared_task def agenda_data_refresh(): - generate_agenda_data(force_refresh=True) + """Deprecated. Use agenda_data_refresh_task() instead. + + TODO remove this after switching the periodic task to the new name + """ + log.log("Deprecated agenda_data_refresh task called!") + agenda_data_refresh_task() + + +@shared_task +def agenda_data_refresh_all_task(*, batch_size=10): + """Refresh agenda data for all plenary meetings + + Executes as a chain of tasks, each computing up to `batch_size` meetings + in a single task. + """ + meeting_numbers = sorted( + Meeting.objects.annotate( + number_as_int=Cast("number", output_field=IntegerField()) + ) + .filter(type_id="ietf", number_as_int__gt=64) + .values_list("number_as_int", flat=True) + ) + # Batch using chained maps rather than celery.chunk so we only use one worker + # at a time. + batched_task_chain = chain( + *( + agenda_data_refresh_task.map(nums) + for nums in batched(meeting_numbers, batch_size) + ) + ) + batched_task_chain.delay() @shared_task @@ -55,7 +105,9 @@ def proceedings_content_refresh_task(*, all=False): @shared_task def fetch_meeting_attendance_task(): # fetch most recent two meetings - meetings = Meeting.objects.filter(type="ietf", date__lte=timezone.now()).order_by("-date")[:2] + meetings = Meeting.objects.filter(type="ietf", date__lte=timezone.now()).order_by( + "-date" + )[:2] try: stats = fetch_attendance_from_meetings(meetings) except RuntimeError as err: @@ -64,8 +116,11 @@ def fetch_meeting_attendance_task(): for meeting, meeting_stats in zip(meetings, stats): log.log( "Fetched data for meeting {:>3}: {:4d} created, {:4d} updated, {:4d} deleted, {:4d} processed".format( - meeting.number, meeting_stats['created'], meeting_stats['updated'], meeting_stats['deleted'], - meeting_stats['processed'] + meeting.number, + meeting_stats["created"], + meeting_stats["updated"], + meeting_stats["deleted"], + meeting_stats["processed"], ) ) @@ -73,7 +128,7 @@ def fetch_meeting_attendance_task(): def _select_meetings( meetings: list[str] | None = None, meetings_since: str | None = None, - meetings_until: str | None = None + meetings_until: str | None = None, ): # nyah """Select meetings by number or date range""" # IETF-1 = 1986-01-16 @@ -130,15 +185,15 @@ def _select_meetings( @shared_task def resolve_meeting_materials_task( *, # only allow kw arguments - meetings: list[str] | None=None, - meetings_since: str | None=None, - meetings_until: str | None=None + meetings: list[str] | None = None, + meetings_since: str | None = None, + meetings_until: str | None = None, ): """Run materials resolver on meetings - + Can request a set of meetings by number by passing a list in the meetings arg, or by range by passing an iso-format timestamps in meetings_since / meetings_until. - To select all meetings, set meetings_since="zero" and omit other parameters. + To select all meetings, set meetings_since="zero" and omit other parameters. """ meetings_qs = _select_meetings(meetings, meetings_since, meetings_until) for meeting in meetings_qs.order_by("date"): @@ -155,7 +210,9 @@ def resolve_meeting_materials_task( f"meeting {meeting.number}: {err}" ) else: - log.log(f"Resolved in {(timezone.now() - mark).total_seconds():0.3f} seconds.") + log.log( + f"Resolved in {(timezone.now() - mark).total_seconds():0.3f} seconds." + ) @shared_task @@ -163,13 +220,13 @@ def store_meeting_materials_as_blobs_task( *, # only allow kw arguments meetings: list[str] | None = None, meetings_since: str | None = None, - meetings_until: str | None = None + meetings_until: str | None = None, ): """Push meeting materials into the blob store Can request a set of meetings by number by passing a list in the meetings arg, or by range by passing an iso-format timestamps in meetings_since / meetings_until. - To select all meetings, set meetings_since="zero" and omit other parameters. + To select all meetings, set meetings_since="zero" and omit other parameters. """ meetings_qs = _select_meetings(meetings, meetings_since, meetings_until) for meeting in meetings_qs.order_by("date"): @@ -187,4 +244,5 @@ def store_meeting_materials_as_blobs_task( ) else: log.log( - f"Blobs created in {(timezone.now() - mark).total_seconds():0.3f} seconds.") + f"Blobs created in {(timezone.now() - mark).total_seconds():0.3f} seconds." + ) diff --git a/ietf/meeting/tests_tasks.py b/ietf/meeting/tests_tasks.py index a5da00ecbf..2c5120a39d 100644 --- a/ietf/meeting/tests_tasks.py +++ b/ietf/meeting/tests_tasks.py @@ -5,23 +5,63 @@ from ietf.utils.test_utils import TestCase from ietf.utils.timezone import date_today from .factories import MeetingFactory -from .tasks import proceedings_content_refresh_task, agenda_data_refresh +from .tasks import ( + proceedings_content_refresh_task, + agenda_data_refresh_task, + agenda_data_refresh_all_task, +) from .tasks import fetch_meeting_attendance_task class TaskTests(TestCase): @patch("ietf.meeting.tasks.generate_agenda_data") - def test_agenda_data_refresh(self, mock_generate): - agenda_data_refresh() + def test_agenda_data_refresh_task(self, mock_generate): + agenda_data_refresh_task() self.assertTrue(mock_generate.called) - self.assertEqual(mock_generate.call_args, call(force_refresh=True)) + self.assertEqual(mock_generate.call_args, call(None, force_refresh=True)) + + mock_generate.reset_mock() + mock_generate.side_effect = RuntimeError + try: + agenda_data_refresh_task() + except Exception as err: + self.fail( + f"agenda_data_refresh_task should not raise exceptions (got {repr(err)})" + ) + + @patch("ietf.meeting.tasks.agenda_data_refresh_task") + @patch("ietf.meeting.tasks.chain") + def test_agenda_data_refresh_all_task(self, mock_chain, mock_agenda_data_refresh): + # Patch the agenda_data_refresh_task task with a mock whose `.map` attribute + # converts its argument, which is expected to be an iterator, to a list + # and returns it. We'll use this to check that the expected task chain + # was set up, but we don't actually run any celery tasks. + mock_agenda_data_refresh.map.side_effect = lambda x: list(x) + + meetings = MeetingFactory.create_batch(5, type_id="ietf") + numbers = sorted(int(m.number) for m in meetings) + agenda_data_refresh_all_task(batch_size=2) + self.assertTrue(mock_chain.called) + # The lists in the call() below are the output of the lambda we patched in + # via mock_agenda_data_refresh.map.side_effect above. I.e., this tests that + # map() was called with the correct batched data. + self.assertEqual( + mock_chain.call_args, + call( + [numbers[0], numbers[1]], + [numbers[2], numbers[3]], + [numbers[4]], + ), + ) + self.assertEqual(mock_agenda_data_refresh.call_count, 0) + self.assertEqual(mock_agenda_data_refresh.map.call_count, 3) @patch("ietf.meeting.tasks.generate_proceedings_content") def test_proceedings_content_refresh_task(self, mock_generate): # Generate a couple of meetings meeting120 = MeetingFactory(type_id="ietf", number="120") # 24 * 5 meeting127 = MeetingFactory(type_id="ietf", number="127") # 24 * 5 + 7 - + # Times to be returned now_utc = datetime.datetime.now(tz=datetime.UTC) hour_00_utc = now_utc.replace(hour=0) @@ -34,19 +74,19 @@ def test_proceedings_content_refresh_task(self, mock_generate): self.assertEqual(mock_generate.call_count, 1) self.assertEqual(mock_generate.call_args, call(meeting120, force_refresh=True)) mock_generate.reset_mock() - + # hour 01 - should call no meetings with patch("ietf.meeting.tasks.timezone.now", return_value=hour_01_utc): proceedings_content_refresh_task() self.assertEqual(mock_generate.call_count, 0) - + # hour 07 - should call meeting with number % 24 == 0 with patch("ietf.meeting.tasks.timezone.now", return_value=hour_07_utc): proceedings_content_refresh_task() self.assertEqual(mock_generate.call_count, 1) self.assertEqual(mock_generate.call_args, call(meeting127, force_refresh=True)) mock_generate.reset_mock() - + # With all=True, all should be called regardless of time. Reuse hour_01_utc which called none before with patch("ietf.meeting.tasks.timezone.now", return_value=hour_01_utc): proceedings_content_refresh_task(all=True) @@ -61,10 +101,10 @@ def test_fetch_meeting_attendance_task(self, mock_fetch_attendance): MeetingFactory(type_id="ietf", date=today - datetime.timedelta(days=3)), ] data = { - 'created': 1, - 'updated': 2, - 'deleted': 0, - 'processed': 3, + "created": 1, + "updated": 2, + "deleted": 0, + "processed": 3, } mock_fetch_attendance.return_value = [data, data] diff --git a/ietf/meeting/views.py b/ietf/meeting/views.py index 903e3c7e79..8dccda9c87 100644 --- a/ietf/meeting/views.py +++ b/ietf/meeting/views.py @@ -40,7 +40,7 @@ from django.core.exceptions import ValidationError from django.core.files.uploadedfile import SimpleUploadedFile from django.core.validators import URLValidator -from django.urls import reverse,reverse_lazy +from django.urls import reverse, reverse_lazy, NoReverseMatch from django.db.models import F, Max, Q from django.forms.models import modelform_factory, inlineformset_factory from django.template import TemplateDoesNotExist @@ -1859,18 +1859,22 @@ def generate_agenda_data(num=None, force_refresh=False): :num: meeting number :force_refresh: True to force a refresh of the cache """ - cache = caches["default"] - cache_timeout = 6 * 60 - meeting = get_ietf_meeting(num) if meeting is None: raise Http404("No such full IETF meeting") elif int(meeting.number) <= 64: - return Http404("Pre-IETF 64 meetings are not available through this API") - else: - pass + raise Http404("Pre-IETF 64 meetings are not available through this API") + is_current_meeting = meeting.number == get_current_ietf_meeting_num() + + cache = caches["agenda"] + cache_timeout = ( + settings.AGENDA_CACHE_TIMEOUT_CURRENT_MEETING + if is_current_meeting + else settings.AGENDA_CACHE_TIMEOUT_DEFAULT + ) + cache_format = "1" # bump this on backward-incompatible data format changes - cache_key = f"generate_agenda_data_{meeting.number}" + cache_key = f"generate_agenda_data:{meeting.number}:v{cache_format}" if not force_refresh: cached_value = cache.get(cache_key) if cached_value is not None: @@ -1890,8 +1894,6 @@ def generate_agenda_data(num=None, force_refresh=False): filter_organizer = AgendaFilterOrganizer(assignments=filtered_assignments) - is_current_meeting = (num is None) or (num == get_current_ietf_meeting_num()) - # Get Floor Plans floors = FloorPlan.objects.filter(meeting=meeting).order_by('order') @@ -1966,21 +1968,32 @@ def api_get_session_materials(request, session_id=None): ) -def agenda_extract_schedule (item): +def agenda_extract_schedule(item): + if item.session.current_status == "resched": + resched_to = item.session.tombstone_for.official_timeslotassignment() + else: + resched_to = None return { "id": item.id, + "slug": item.slug(), "sessionId": item.session.id, - "room": item.room_name if item.timeslot.show_location else None, + "room": (item.timeslot.get_location() or None) if item.timeslot else None, "location": { "short": item.timeslot.location.floorplan.short, "name": item.timeslot.location.floorplan.name, } if (item.timeslot.show_location and item.timeslot.location and item.timeslot.location.floorplan) else {}, "acronym": item.acronym, - "duration": item.timeslot.duration.seconds, + "duration": item.timeslot.duration.total_seconds(), "name": item.session.name, + "slotId": item.timeslot.id, "slotName": item.timeslot.name, + "slotModified": item.timeslot.modified.isoformat(), "startDateTime": item.timeslot.time.isoformat(), "status": item.session.current_status, + "rescheduledTo": { + "startDateTime": resched_to.timeslot.time.isoformat(), + "duration": resched_to.timeslot.duration.total_seconds(), + } if resched_to is not None else {}, "type": item.session.type.slug, "purpose": item.session.purpose.slug, "isBoF": item.session.group_at_the_time().state_id == "bof", @@ -1998,7 +2011,7 @@ def agenda_extract_schedule (item): "showAgenda": True if (item.session.agenda() is not None or item.session.remote_instructions) else False }, "agenda": { - "url": item.session.agenda().get_href() + "url": item.session.agenda().get_versionless_href() } if item.session.agenda() is not None else { "url": None }, @@ -2290,10 +2303,131 @@ def ical_session_status(assignment): else: return "CONFIRMED" + +def render_icalendar_precomp(agenda_data): + ical_content = generate_agenda_ical_precomp(agenda_data) + return HttpResponse(ical_content, content_type="text/calendar") + + def render_icalendar(schedule, assignments): ical_content = generate_agenda_ical(schedule, assignments) return HttpResponse(ical_content, content_type="text/calendar") + +def generate_agenda_ical_precomp(agenda_data): + """Generate iCalendar from precomputed data using the icalendar library""" + + cal = Calendar() + cal.add("prodid", "-//IETF//datatracker.ietf.org ical agenda//EN") + cal.add("version", "2.0") + cal.add("method", "PUBLISH") + + meeting_data = agenda_data["meeting"] + for item in agenda_data["schedule"]: + event = Event() + + uid = f"ietf-{meeting_data["number"]}-{item["slotId"]}-{item["acronym"]}" + event.add("uid", uid) + + # add custom field with meeting's local TZ + event.add("x-meeting-tz", meeting_data["timezone"]) + + if item["name"]: + summary = item["name"] + else: + summary = f"{item["groupAcronym"]} - {item["groupName"]}" + + if item["note"]: + summary += f" ({item["note"]})" + + event.add("summary", summary) + + if item["room"]: + event.add("location", item["room"]) # room name + + if item["status"] == "canceled": + status = "CANCELLED" + elif item["status"] == "resched": + resched_to = item["rescheduledTo"] + if resched_to is None: + status = "RESCHEDULED" + else: + resched_start = datetime.datetime.fromisoformat( + resched_to["startDateTime"] + ) + dur = datetime.timedelta(seconds=resched_to["duration"]) + resched_end = resched_start + dur + formatted_start = resched_start.strftime("%A %H:%M").upper() + formatted_end = resched_end.strftime("%H:%M") + status = f"RESCHEDULED TO {formatted_start}-{formatted_end}" + else: + status = "CONFIRMED" + event.add("status", status) + + event.add("class", "PUBLIC") + + start_time = datetime.datetime.fromisoformat(item["startDateTime"]) + duration = datetime.timedelta(seconds=item["duration"]) + event.add("dtstart", start_time) + event.add("dtend", start_time + duration) + + # DTSTAMP: when the event was created or last modified (in UTC) + # n.b. timeslot.modified may not be an accurate measure of this + event.add("dtstamp", datetime.datetime.fromisoformat(item["slotModified"])) + + description_parts = [item["slotName"]] + + if item["note"]: + description_parts.append(f"Note: {item["note"]}") + + links = item["links"] + if links["onsiteTool"]: + description_parts.append(f"Onsite tool: {links["onsiteTool"]}") + + if links["videoStream"]: + description_parts.append(f"Meetecho: {links["videoStream"]}") + + if links["webex"]: + description_parts.append(f"Webex: {links["webex"]}") + + if item["remoteInstructions"]: + description_parts.append( + f"Remote instructions: {item["remoteInstructions"]}" + ) + + try: + materials_url = absurl( + "ietf.meeting.views.session_details", + num=meeting_data["number"], + acronym=item["acronym"], + ) + except NoReverseMatch: + pass + else: + description_parts.append(f"Session materials: {materials_url}") + event.add("url", materials_url) + + if meeting_data["number"].isdigit(): + try: + agenda_url = absurl("agenda", num=meeting_data["number"]) + except NoReverseMatch: + pass + else: + description_parts.append(f"See in schedule: {agenda_url}#row-{item["slug"]}") + + if item["agenda"] and item["agenda"]["url"]: + description_parts.append(f"Agenda {item["agenda"]["url"]}") + + # Join all description parts with 2 newlines + description = "\n\n".join(description_parts) + event.add("description", description) + + # Add event to calendar + cal.add_component(event) + + return cal.to_ical().decode("utf-8") + + def generate_agenda_ical(schedule, assignments): """Generate iCalendar using the icalendar library""" @@ -2428,10 +2562,66 @@ def parse_agenda_filter_params(querydict): def should_include_assignment(filter_params, assignment): """Decide whether to include an assignment""" - shown = len(set(filter_params['show']).intersection(assignment.filter_keywords)) > 0 - hidden = len(set(filter_params['hide']).intersection(assignment.filter_keywords)) > 0 + if hasattr(assignment, "filter_keywords"): + kw = assignment.filter_keywords + elif isinstance(assignment, dict): + kw = assignment.get("filterKeywords", []) + else: + raise ValueError("Unsupported assignment instance") + shown = len(set(filter_params['show']).intersection(kw)) > 0 + hidden = len(set(filter_params['hide']).intersection(kw)) > 0 return shown and not hidden + +def agenda_ical_ietf(meeting, filt_params, acronym=None, session_id=None): + agenda_data = generate_agenda_data(meeting.number, force_refresh=False) + if acronym: + agenda_data["schedule"] = [ + item + for item in agenda_data["schedule"] + if item["groupAcronym"] == acronym + ] + elif session_id: + agenda_data["schedule"] = [ + item + for item in agenda_data["schedule"] + if item["sessionId"] == session_id + ] + if filt_params is not None: + # Apply the filter + agenda_data["schedule"] = [ + item + for item in agenda_data["schedule"] + if should_include_assignment(filt_params, item) + ] + return render_icalendar_precomp(agenda_data) + + +def agenda_ical_interim(meeting, filt_params, acronym=None, session_id=None): + schedule = get_schedule(meeting) + + if schedule is None and acronym is None and session_id is None: + raise Http404 + + assignments = SchedTimeSessAssignment.objects.filter( + schedule__in=[schedule, schedule.base], + session__on_agenda=True, + ) + assignments = preprocess_assignments_for_agenda(assignments, meeting) + AgendaKeywordTagger(assignments=assignments).apply() + + if filt_params is not None: + # Apply the filter + assignments = [a for a in assignments if should_include_assignment(filt_params, a)] + + if acronym: + assignments = [ a for a in assignments if a.session.group_at_the_time().acronym == acronym ] + elif session_id: + assignments = [ a for a in assignments if a.session_id == int(session_id) ] + + return render_icalendar(schedule, assignments) + + def agenda_ical(request, num=None, acronym=None, session_id=None): """Agenda ical view @@ -2459,33 +2649,20 @@ def agenda_ical(request, num=None, acronym=None, session_id=None): raise Http404 else: meeting = get_meeting(num, type_in=None) # get requested meeting, whatever its type - schedule = get_schedule(meeting) - if schedule is None and acronym is None and session_id is None: - raise Http404 - - assignments = SchedTimeSessAssignment.objects.filter( - schedule__in=[schedule, schedule.base], - session__on_agenda=True, - ) - assignments = preprocess_assignments_for_agenda(assignments, meeting) - AgendaKeywordTagger(assignments=assignments).apply() + if isinstance(session_id, str) and session_id.isdigit(): + session_id = int(session_id) try: filt_params = parse_agenda_filter_params(request.GET) except ValueError as e: return HttpResponseBadRequest(str(e)) - if filt_params is not None: - # Apply the filter - assignments = [a for a in assignments if should_include_assignment(filt_params, a)] - - if acronym: - assignments = [ a for a in assignments if a.session.group_at_the_time().acronym == acronym ] - elif session_id: - assignments = [ a for a in assignments if a.session_id == int(session_id) ] + if meeting.type_id == "ietf": + return agenda_ical_ietf(meeting, filt_params, acronym, session_id) + else: + return agenda_ical_interim(meeting, filt_params, acronym, session_id) - return render_icalendar(schedule, assignments) @cache_page(15 * 60) def agenda_json(request, num=None): diff --git a/ietf/settings.py b/ietf/settings.py index 1cda79e21b..899a377ad7 100644 --- a/ietf/settings.py +++ b/ietf/settings.py @@ -228,6 +228,10 @@ BLOBSTORAGE_CONNECT_TIMEOUT = 10 # seconds; boto3 default is 60 BLOBSTORAGE_READ_TIMEOUT = 10 # seconds; boto3 default is 60 +# Caching for agenda data in seconds +AGENDA_CACHE_TIMEOUT_DEFAULT = 8 * 24 * 60 * 60 # 8 days +AGENDA_CACHE_TIMEOUT_CURRENT_MEETING = 6 * 60 # 6 minutes + WSGI_APPLICATION = "ietf.wsgi.application" AUTHENTICATION_BACKENDS = ( 'ietf.ietfauth.backends.CaseInsensitiveModelBackend', ) @@ -1400,6 +1404,16 @@ def skip_unreadable_post(record): f"{key_prefix}:{version}:{sha384(str(key).encode('utf8')).hexdigest()}" ), }, + "agenda": { + "BACKEND": "ietf.utils.cache.LenientMemcacheCache", + "LOCATION": f"{MEMCACHED_HOST}:{MEMCACHED_PORT}", + # No release-specific VERSION setting. + "KEY_PREFIX": "ietf:dt:agenda", + # Key function is default except with sha384-encoded key + "KEY_FUNCTION": lambda key, key_prefix, version: ( + f"{key_prefix}:{version}:{sha384(str(key).encode('utf8')).hexdigest()}" + ), + }, "proceedings": { "BACKEND": "ietf.utils.cache.LenientMemcacheCache", "LOCATION": f"{MEMCACHED_HOST}:{MEMCACHED_PORT}", @@ -1453,6 +1467,17 @@ def skip_unreadable_post(record): "VERSION": __version__, "KEY_PREFIX": "ietf:dt", }, + "agenda": { + "BACKEND": "django.core.cache.backends.dummy.DummyCache", + # "BACKEND": "ietf.utils.cache.LenientMemcacheCache", + # "LOCATION": "127.0.0.1:11211", + # No release-specific VERSION setting. + "KEY_PREFIX": "ietf:dt:agenda", + # Key function is default except with sha384-encoded key + "KEY_FUNCTION": lambda key, key_prefix, version: ( + f"{key_prefix}:{version}:{sha384(str(key).encode('utf8')).hexdigest()}" + ), + }, "proceedings": { "BACKEND": "django.core.cache.backends.dummy.DummyCache", # "BACKEND": "ietf.utils.cache.LenientMemcacheCache", @@ -1519,11 +1544,17 @@ def skip_unreadable_post(record): NOMCOM_APP_SECRET = b'\x9b\xdas1\xec\xd5\xa0SI~\xcb\xd4\xf5t\x99\xc4i\xd7\x9f\x0b\xa9\xe8\xfeY\x80$\x1e\x12tN:\x84' ALLOWED_HOSTS = ['*',] - + try: # see https://github.com/omarish/django-cprofile-middleware - import django_cprofile_middleware # pyflakes:ignore - MIDDLEWARE = MIDDLEWARE + ['django_cprofile_middleware.middleware.ProfilerMiddleware', ] + import django_cprofile_middleware # pyflakes:ignore + + MIDDLEWARE = MIDDLEWARE + [ + "django_cprofile_middleware.middleware.ProfilerMiddleware", + ] + DJANGO_CPROFILE_MIDDLEWARE_REQUIRE_STAFF = ( + False # Do not use this setting for a public site! + ) except ImportError: pass diff --git a/ietf/settings_testcrawl.py b/ietf/settings_testcrawl.py index 40744a228d..edb978757a 100644 --- a/ietf/settings_testcrawl.py +++ b/ietf/settings_testcrawl.py @@ -27,6 +27,9 @@ 'MAX_ENTRIES': 10000, }, }, + 'agenda': { + 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', + }, 'proceedings': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', }, diff --git a/k8s/settings_local.py b/k8s/settings_local.py index f8ffacc83f..0386dbbdf9 100644 --- a/k8s/settings_local.py +++ b/k8s/settings_local.py @@ -306,6 +306,16 @@ def _multiline_to_list(s): f"{key_prefix}:{version}:{sha384(str(key).encode('utf8')).hexdigest()}" ), }, + "agenda": { + "BACKEND": "ietf.utils.cache.LenientMemcacheCache", + "LOCATION": f"{MEMCACHED_HOST}:{MEMCACHED_PORT}", + # No release-specific VERSION setting. + "KEY_PREFIX": "ietf:dt:agenda", + # Key function is default except with sha384-encoded key + "KEY_FUNCTION": lambda key, key_prefix, version: ( + f"{key_prefix}:{version}:{sha384(str(key).encode('utf8')).hexdigest()}" + ), + }, "proceedings": { "BACKEND": "ietf.utils.cache.LenientMemcacheCache", "LOCATION": f"{MEMCACHED_HOST}:{MEMCACHED_PORT}", From f8be1436fb9570eb623c6408cbb755e39bb18b22 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Tue, 10 Feb 2026 13:33:39 -0400 Subject: [PATCH 005/102] fix: add id attr to liaisons ButtonWidget (#10389) --- ietf/liaisons/widgets.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/ietf/liaisons/widgets.py b/ietf/liaisons/widgets.py index 74368e83f2..48db8af0a3 100644 --- a/ietf/liaisons/widgets.py +++ b/ietf/liaisons/widgets.py @@ -26,7 +26,9 @@ def render(self, name, value, **kwargs): html += '%s' % conditional_escape(i) required_str = 'Please fill in %s to attach a new file' % conditional_escape(self.required_label) html += '%s' % conditional_escape(required_str) - html += '' % conditional_escape(self.label) + html += ''.format( + f"id_{name}", conditional_escape(self.label) + ) return mark_safe(html) From 4a024d9d64e36714523b2f9e04bdbc3005aefc03 Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Tue, 10 Feb 2026 12:25:43 -0600 Subject: [PATCH 006/102] fix: identify editorial drafts that should not expire (#10388) --- ietf/doc/expire.py | 50 ++++++++++++++++++++++++++++++++++------------ 1 file changed, 37 insertions(+), 13 deletions(-) diff --git a/ietf/doc/expire.py b/ietf/doc/expire.py index bf8523aa98..d42af628f8 100644 --- a/ietf/doc/expire.py +++ b/ietf/doc/expire.py @@ -38,22 +38,46 @@ def expirable_drafts(queryset=None): # Populate this first time through (but after django has been set up) if nonexpirable_states is None: # all IESG states except I-D Exists and Dead block expiry - nonexpirable_states = list(State.objects.filter(used=True, type="draft-iesg").exclude(slug__in=("idexists", "dead"))) + nonexpirable_states = list( + State.objects.filter(used=True, type="draft-iesg").exclude( + slug__in=("idexists", "dead") + ) + ) # sent to RFC Editor and RFC Published block expiry (the latter # shouldn't be possible for an active draft, though) - nonexpirable_states += list(State.objects.filter(used=True, type__in=("draft-stream-iab", "draft-stream-irtf", "draft-stream-ise"), slug__in=("rfc-edit", "pub"))) + nonexpirable_states += list( + State.objects.filter( + used=True, + type__in=( + "draft-stream-iab", + "draft-stream-irtf", + "draft-stream-ise", + "draft-stream-editorial", + ), + slug__in=("rfc-edit", "pub"), + ) + ) # other IRTF states that block expiration - nonexpirable_states += list(State.objects.filter(used=True, type_id="draft-stream-irtf", slug__in=("irsgpoll", "iesg-rev",))) - - return queryset.filter( - states__type="draft", states__slug="active" - ).exclude( - expires=None - ).exclude( - states__in=nonexpirable_states - ).exclude( - tags="rfc-rev" # under review by the RFC Editor blocks expiry - ).distinct() + nonexpirable_states += list( + State.objects.filter( + used=True, + type_id="draft-stream-irtf", + slug__in=( + "irsgpoll", + "iesg-rev", + ), + ) + ) + + return ( + queryset.filter(states__type="draft", states__slug="active") + .exclude(expires=None) + .exclude(states__in=nonexpirable_states) + .exclude( + tags="rfc-rev" # under review by the RFC Editor blocks expiry + ) + .distinct() + ) def get_soon_to_expire_drafts(days_of_warning): From 832c62e5c1f0cba736842cbe241236bfc8cf386f Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Wed, 11 Feb 2026 11:41:58 -0400 Subject: [PATCH 007/102] feat: format+has_errata, drop see_also for red API (#10392) * fix: has_errata field for red API * chore: explanatory comment * feat: accurate format list for red API * refactor: specify blob names in API * chore: remove see_also field * fix: finish removing see_also field --- ietf/doc/api.py | 27 ++++++++++++++++++++++----- ietf/doc/models.py | 18 ++++++++++++++++++ ietf/doc/serializers.py | 33 +++++++++++++++++++++++---------- 3 files changed, 63 insertions(+), 15 deletions(-) diff --git a/ietf/doc/api.py b/ietf/doc/api.py index 47e7e6fffd..6a4c0c9fd5 100644 --- a/ietf/doc/api.py +++ b/ietf/doc/api.py @@ -1,7 +1,17 @@ # Copyright The IETF Trust 2024-2026, All Rights Reserved """Doc API implementations""" -from django.db.models import OuterRef, Subquery, Prefetch, Value, JSONField, QuerySet +from django.db.models import ( + BooleanField, + Count, + JSONField, + OuterRef, + Prefetch, + Q, + QuerySet, + Subquery, + Value, +) from django.db.models.functions import TruncDate from django_filters import rest_framework as filters from rest_framework import filters as drf_filters @@ -133,11 +143,18 @@ def augment_rfc_queryset(queryset: QuerySet[Document]): ) .annotate(published=TruncDate("published_datetime", tzinfo=RPC_TZINFO)) .annotate( - # TODO implement these fake fields for real - see_also=Value([], output_field=JSONField()), - formats=Value(["txt", "xml"], output_field=JSONField()), + # Count of "verified-errata" tags will be 1 or 0, convert to Boolean + has_errata=Count( + "tags", + filter=Q( + tags__slug="verified-errata", + ), + output_field=BooleanField(), + ) + ) + .annotate( + # TODO implement this fake field for real keywords=Value(["keyword"], output_field=JSONField()), - errata=Value([], output_field=JSONField()), ) ) diff --git a/ietf/doc/models.py b/ietf/doc/models.py index 463aa6fd97..ec9a25add8 100644 --- a/ietf/doc/models.py +++ b/ietf/doc/models.py @@ -1284,6 +1284,24 @@ def action_holders_enabled(self): iesg_state = self.get_state('draft-iesg') return iesg_state and iesg_state.slug != 'idexists' + def formats(self): + """List of file formats available + + Only implemented for RFCs. Relies on StoredObject. + """ + if self.type_id != "rfc": + raise RuntimeError("Only allowed for type=rfc") + return [ + { + "fmt": Path(object_name).parts[0], + "name": object_name, + } + for object_name in StoredObject.objects.filter( + store="rfc", doc_name=self.name, doc_rev=self.rev + ).values_list("name", flat=True) + ] + + class DocumentURL(models.Model): doc = ForeignKey(Document) tag = ForeignKey(DocUrlTagName) diff --git a/ietf/doc/serializers.py b/ietf/doc/serializers.py index 05647d9ce1..e8d373164b 100644 --- a/ietf/doc/serializers.py +++ b/ietf/doc/serializers.py @@ -16,6 +16,7 @@ class RfcAuthorSerializer(serializers.ModelSerializer): """Serializer for an RfcAuthor / DocumentAuthor in a response""" + datatracker_person_path = serializers.URLField( source="person.get_absolute_url", required=False, @@ -36,7 +37,7 @@ class Meta: def to_representation(self, instance): """instance -> primitive data types - + Translates a DocumentAuthor into an equivalent RfcAuthor we can use the same serializer for either type. """ @@ -87,7 +88,15 @@ class DocIdentifierSerializer(serializers.Serializer): type RfcStatusSlugT = Literal[ - "std", "ps", "ds", "bcp", "inf", "exp", "hist", "unkn", "not-issued", + "std", + "ps", + "ds", + "bcp", + "inf", + "exp", + "hist", + "unkn", + "not-issued", ] @@ -188,11 +197,16 @@ class ContainingSubseriesSerializer(serializers.Serializer): type = serializers.CharField(source="source.type_id") +class RfcFormatSerializer(serializers.Serializer): + RFC_FORMATS = ("xml", "txt", "html", "pdf", "ps", "json", "notprepped") + + fmt = serializers.ChoiceField(choices=RFC_FORMATS) + name = serializers.CharField(help_text="Name of blob in the blob store") + + class RfcMetadataSerializer(serializers.ModelSerializer): """Serialize metadata of an RFC""" - RFC_FORMATS = ("xml", "txt", "html", "htmlized", "pdf", "ps") - number = serializers.IntegerField(source="rfc_number") published = serializers.DateField() status = RfcStatusSerializer(source="*") @@ -207,10 +221,11 @@ class RfcMetadataSerializer(serializers.ModelSerializer): updates = RelatedRfcSerializer(many=True, read_only=True) updated_by = ReverseRelatedRfcSerializer(many=True, read_only=True) subseries = ContainingSubseriesSerializer(many=True, read_only=True) - see_also = serializers.ListField(child=serializers.CharField(), read_only=True) - formats = serializers.MultipleChoiceField(choices=RFC_FORMATS) + formats = RfcFormatSerializer( + many=True, read_only=True, help_text="Available formats" + ) keywords = serializers.ListField(child=serializers.CharField(), read_only=True) - errata = serializers.ListField(child=serializers.CharField(), read_only=True) + has_errata = serializers.BooleanField(read_only=True) class Meta: model = Document @@ -230,15 +245,13 @@ class Meta: "updates", "updated_by", "subseries", - "see_also", "draft", "abstract", "formats", "keywords", - "errata", + "has_errata", ] - @extend_schema_field(RfcAuthorSerializer(many=True)) def get_authors(self, doc: Document): # If doc has any RfcAuthors, use those, otherwise fall back to DocumentAuthors From 66a1bf0a9ecae30036cf23fc07f6f26b27b1e94d Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Wed, 11 Feb 2026 15:00:57 -0400 Subject: [PATCH 008/102] chore: un-squelch bibtexparser DeprecationWarnings (#10395) --- ietf/settings.py | 2 -- requirements.txt | 2 +- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/ietf/settings.py b/ietf/settings.py index 899a377ad7..565e8825a9 100644 --- a/ietf/settings.py +++ b/ietf/settings.py @@ -36,8 +36,6 @@ warnings.filterwarnings("ignore", message="datetime.datetime.utcfromtimestamp\\(\\) is deprecated", module="oic.utils.time_util") warnings.filterwarnings("ignore", message="datetime.datetime.utcfromtimestamp\\(\\) is deprecated", module="pytz.tzinfo") warnings.filterwarnings("ignore", message="'instantiateVariableFont' is deprecated", module="weasyprint") -warnings.filterwarnings("ignore", category=DeprecationWarning, module="bibtexparser") # https://github.com/sciunto-org/python-bibtexparser/issues/502 -warnings.filterwarnings("ignore", category=DeprecationWarning, module="pyparsing") # https://github.com/sciunto-org/python-bibtexparser/issues/502 base_path = pathlib.Path(__file__).resolve().parent diff --git a/requirements.txt b/requirements.txt index 3f89f6f16c..cb583d5dc9 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,7 +4,7 @@ setuptools>=80.9.0 # Require this first, to prevent later errors aiosmtpd>=1.4.6 argon2-cffi>=25.1.0 # For the Argon2 password hasher option beautifulsoup4>=4.13.4 # Only used in tests -bibtexparser>=1.4.3 # Only used in tests +bibtexparser>=1.4.4 # Only used in tests bleach>=6.2.0 # project is deprecated but supported types-bleach>=6.2.0 boto3>=1.39.15 From 0b637ef4ace72f31a963b7603a8f69a253420810 Mon Sep 17 00:00:00 2001 From: jennifer-richards <19472766+jennifer-richards@users.noreply.github.com> Date: Wed, 11 Feb 2026 19:13:44 +0000 Subject: [PATCH 009/102] ci: update base image target version to 20260211T1901 --- dev/build/Dockerfile | 2 +- dev/build/TARGET_BASE | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/dev/build/Dockerfile b/dev/build/Dockerfile index 41ff295eec..71370fabee 100644 --- a/dev/build/Dockerfile +++ b/dev/build/Dockerfile @@ -1,4 +1,4 @@ -FROM ghcr.io/ietf-tools/datatracker-app-base:20260114T1756 +FROM ghcr.io/ietf-tools/datatracker-app-base:20260211T1901 LABEL maintainer="IETF Tools Team " ENV DEBIAN_FRONTEND=noninteractive diff --git a/dev/build/TARGET_BASE b/dev/build/TARGET_BASE index 3ad31c7e25..947f3790e4 100644 --- a/dev/build/TARGET_BASE +++ b/dev/build/TARGET_BASE @@ -1 +1 @@ -20260114T1756 +20260211T1901 From 492888b8a22113becc2cbe3900ba3294cfc6d7f6 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Thu, 12 Feb 2026 11:34:11 -0400 Subject: [PATCH 010/102] fix: handle doc_rev is None in Document.formats (#10401) Likely a temporary fix, but safer for quick deployment while we work on the bigger project. --- ietf/api/views_rpc.py | 2 +- ietf/doc/models.py | 12 +++++++++++- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/ietf/api/views_rpc.py b/ietf/api/views_rpc.py index 6b1799f654..2bf16480f2 100644 --- a/ietf/api/views_rpc.py +++ b/ietf/api/views_rpc.py @@ -487,7 +487,7 @@ def post(self, request): name=self._blob_destination(ftm), file=f, doc_name=rfc.name, - doc_rev=rfc.rev, # expect None, but match whatever it is + doc_rev=rfc.rev, # expect blank, but match whatever it is mtime=mtime, ) destination = self._fs_destination(ftm) diff --git a/ietf/doc/models.py b/ietf/doc/models.py index ec9a25add8..8f700bf496 100644 --- a/ietf/doc/models.py +++ b/ietf/doc/models.py @@ -12,6 +12,8 @@ from io import BufferedReader from pathlib import Path + +from django.db.models import Q from lxml import etree from typing import Optional, Protocol, TYPE_CHECKING, Union from weasyprint import HTML as wpHTML @@ -1291,13 +1293,21 @@ def formats(self): """ if self.type_id != "rfc": raise RuntimeError("Only allowed for type=rfc") + + # StoredObject.doc_rev can be null or "" to represent no rev. Match either + # of these when self.rev is "" (always expected to be the case for RFCs) + rev_q = Q(doc_rev=self.rev) + if self.rev == "": + rev_q |= Q(doc_rev__isnull=True) return [ { "fmt": Path(object_name).parts[0], "name": object_name, } for object_name in StoredObject.objects.filter( - store="rfc", doc_name=self.name, doc_rev=self.rev + rev_q, + store="rfc", + doc_name=self.name, ).values_list("name", flat=True) ] From 1b306eb7f3b9b45b3162101c7fc216ac7da3eab1 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Thu, 12 Feb 2026 13:49:22 -0400 Subject: [PATCH 011/102] feat: bofreq timestamp fixup task (#10402) * feat: utility to fix up bofreq timestamps * fix: don't fix -00 (+ logging) * feat: task * chore: disable test coverage for one-off task --- ietf/doc/tasks.py | 8 ++- ietf/doc/utils_bofreq.py | 143 ++++++++++++++++++++++++++++++++++++++- 2 files changed, 147 insertions(+), 4 deletions(-) diff --git a/ietf/doc/tasks.py b/ietf/doc/tasks.py index 02b7c2a07d..b463b9cecf 100644 --- a/ietf/doc/tasks.py +++ b/ietf/doc/tasks.py @@ -1,4 +1,4 @@ -# Copyright The IETF Trust 2024-2025, All Rights Reserved +# Copyright The IETF Trust 2024-2026, All Rights Reserved # # Celery task definitions # @@ -34,6 +34,7 @@ ensure_draft_bibxml_path_exists, investigate_fragment, ) +from .utils_bofreq import fixup_bofreq_timestamps @shared_task @@ -149,3 +150,8 @@ def rebuild_reference_relations_task(doc_names: list[str]): rebuild_reference_relations(doc, filenames) else: log.log(f"Found no content for {stem}") + + +@shared_task +def fixup_bofreq_timestamps_task(): # pragma: nocover + fixup_bofreq_timestamps() diff --git a/ietf/doc/utils_bofreq.py b/ietf/doc/utils_bofreq.py index aec8f60ad6..d01b039b8e 100644 --- a/ietf/doc/utils_bofreq.py +++ b/ietf/doc/utils_bofreq.py @@ -1,12 +1,149 @@ -# Copyright The IETF Trust 2021 All Rights Reserved +# Copyright The IETF Trust 2021-2026 All Rights Reserved +import datetime +from pathlib import Path -from ietf.doc.models import BofreqEditorDocEvent, BofreqResponsibleDocEvent +from django.conf import settings + +from ietf.doc.models import ( + BofreqEditorDocEvent, + BofreqResponsibleDocEvent, + DocEvent, + DocHistory, + Document, +) from ietf.person.models import Person +from ietf.utils import log + def bofreq_editors(bofreq): e = bofreq.latest_event(BofreqEditorDocEvent) return e.editors.all() if e else Person.objects.none() + def bofreq_responsible(bofreq): e = bofreq.latest_event(BofreqResponsibleDocEvent) - return e.responsible.all() if e else Person.objects.none() \ No newline at end of file + return e.responsible.all() if e else Person.objects.none() + + +def fixup_bofreq_timestamps(): # pragma: nocover + """Fixes bofreq event / document timestamps + + Timestamp errors resulted from the bug fixed by + https://github.com/ietf-tools/datatracker/pull/10333 + + Does not fix up -00 revs because the timestamps on these were not affected by + the bug. Replacing their timestamps creates a confusing event history because the + filesystem timestamp is usually a fraction of a second later than other events + created upon the initial rev creation. This causes the "New revision available" + event to appear _after_ these events in the history. Better to leave them as is. + """ + FIX_DEPLOYMENT_TIME = "2026-02-03T01:16:00+00:00" # 12.58.0 -> production + + def _get_doc_time(doc_name: str, rev: str): + path = Path(settings.BOFREQ_PATH) / f"{doc_name}-{rev}.md" + return datetime.datetime.fromtimestamp(path.stat().st_mtime, datetime.UTC) + + # Find affected DocEvents and DocHistories + new_bofreq_events = ( + DocEvent.objects.filter( + doc__type="bofreq", type="new_revision", time__lt=FIX_DEPLOYMENT_TIME + ) + .exclude(rev="00") # bug did not affect rev 00 events + .order_by("doc__name", "rev") + ) + log.log( + f"fixup_bofreq_timestamps: found {new_bofreq_events.count()} " + f"new_revision events before {FIX_DEPLOYMENT_TIME}" + ) + document_fixups = {} + for e in new_bofreq_events: + name = e.doc.name + rev = e.rev + filesystem_time = _get_doc_time(name, rev) + assert e.time < filesystem_time, ( + f"Rev {rev} event timestamp for {name} unexpectedly later than the " + "filesystem timestamp!" + ) + try: + dochistory = DocHistory.objects.filter( + name=name, time__lt=filesystem_time + ).get(rev=rev) + except DocHistory.MultipleObjectsReturned as err: + raise RuntimeError( + f"Multiple DocHistories for {name} rev {rev} exist earlier than the " + "filesystem timestamp!" + ) from err + except DocHistory.DoesNotExist as err: + if rev == "00": + # Unreachable because we don't adjust -00 revs, but could be needed + # if we did, in theory. In practice it's still not reached, but + # keeping the case for completeness. + dochistory = None + else: + raise RuntimeError( + f"No DocHistory for {name} rev {rev} exists earlier than the " + f"filesystem timestamp!" + ) from err + + if name not in document_fixups: + document_fixups[name] = [] + document_fixups[name].append( + { + "event": e, + "dochistory": dochistory, + "filesystem_time": filesystem_time, + } + ) + + # Now do the actual fixup + system_person = Person.objects.get(name="(System)") + for doc_name, fixups in document_fixups.items(): + bofreq = Document.objects.get(type="bofreq", name=doc_name) + log_msg_parts = [] + adjusted_revs = [] + for fixup in fixups: + event_to_fix = fixup["event"] + dh_to_fix = fixup["dochistory"] + new_time = fixup["filesystem_time"] + adjusted_revs.append(event_to_fix.rev) + + # Fix up the event + event_to_fix.time = new_time + event_to_fix.save() + log_msg_parts.append(f"rev {event_to_fix.rev} DocEvent") + + # Fix up the DocHistory + if dh_to_fix is not None: + dh_to_fix.time = new_time + dh_to_fix.save() + log_msg_parts.append(f"rev {dh_to_fix.rev} DocHistory") + + if event_to_fix.rev == bofreq.rev and bofreq.time < new_time: + # Update the Document without calling save(). Only update if + # the time has not changed so we don't inadvertently overwrite + # a concurrent update. + Document.objects.filter(pk=bofreq.pk, time=bofreq.time).update( + time=new_time + ) + bofreq.refresh_from_db() + if bofreq.rev == event_to_fix.rev: + log_msg_parts.append(f"rev {bofreq.rev} Document") + else: + log.log( + "fixup_bofreq_timestamps: WARNING: bofreq Document rev " + f"changed for {bofreq.name}" + ) + log.log(f"fixup_bofreq_timestamps: {bofreq.name}: " + ", ".join(log_msg_parts)) + + # Fix up the Document, if necessary, and add a record of the adjustment + DocEvent.objects.create( + type="added_comment", + by=system_person, + doc=bofreq, + rev=bofreq.rev, + desc=( + "Corrected inaccurate document and new revision event timestamps for " + + ("version " if len(adjusted_revs) == 1 else "versions ") + + ", ".join(adjusted_revs) + ), + ) From 4945809b7804c1f15de1b0340be269dd7e200f95 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Thu, 12 Feb 2026 13:56:46 -0400 Subject: [PATCH 012/102] chore(dev): update beat in docker-compose.yml (#10330) Fixes commented-out config --- docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index 2440faf121..ebe53cf95a 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -129,7 +129,7 @@ services: # but can be enabled by uncommenting the following. # # beat: -# image: ghcr.io/ietf-tools/datatracker-celery:latest +# image: "${COMPOSE_PROJECT_NAME}-celery" # init: true # environment: # CELERY_APP: ietf From 8005a8baa6ffb72c47d6e35f44c0e5d78b456a2d Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Fri, 13 Feb 2026 11:36:23 -0400 Subject: [PATCH 013/102] chore(dev): update docker-compose depends_on (#10410) * chore(dev): update docker-compose depends_on * chore(dev): another depends_on tweak app/celery don't actually use the blobstore container, but the Django config refers to it so we should probably depend on it anyway --- docker-compose.yml | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index ebe53cf95a..4c3f2f6b8e 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -13,9 +13,10 @@ services: # network_mode: service:db depends_on: + - blobdb + - blobstore - db - mq - - blobstore ipc: host @@ -79,7 +80,10 @@ services: command: - '--loglevel=INFO' depends_on: + - blobdb + - blobstore - db + - mq restart: unless-stopped stop_grace_period: 1m volumes: @@ -102,7 +106,10 @@ services: - '--concurrency=1' depends_on: + - blobdb + - blobstore - db + - mq restart: unless-stopped stop_grace_period: 1m volumes: From 8d804f3427b4d4c40aa6bfadba92a433bd468b26 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Fri, 20 Feb 2026 15:22:24 -0400 Subject: [PATCH 014/102] feat: button to push slide decks to Meetecho (#10431) * refactor: eliminate inline script Partially removes jQuery from this corner * chore: indicate whether slide updates were sent * feat: admin button to push slide decks to Meetecho * test: new test * test: cover interim case --- ietf/meeting/tests_views.py | 63 +++++++++++++++- ietf/meeting/urls.py | 3 +- ietf/meeting/views.py | 46 ++++++++++++ ietf/static/js/session_details.js | 53 ++++++++++++++ ietf/templates/meeting/session_details.html | 81 ++++++--------------- ietf/utils/meetecho.py | 48 +++++++++--- ietf/utils/tests_meetecho.py | 47 ++++++++++-- package.json | 1 + 8 files changed, 268 insertions(+), 74 deletions(-) create mode 100644 ietf/static/js/session_details.js diff --git a/ietf/meeting/tests_views.py b/ietf/meeting/tests_views.py index b94229d969..168999d0aa 100644 --- a/ietf/meeting/tests_views.py +++ b/ietf/meeting/tests_views.py @@ -4754,7 +4754,7 @@ def _approval_url(slidesub): 0, "second session proposed slides should be linked for approval", ) - + class EditScheduleListTests(TestCase): def setUp(self): @@ -7345,6 +7345,67 @@ def test_submit_and_approve_multiple_versions(self, mock_slides_manager_cls): fd.close() self.assertIn('third version', contents) + @override_settings( + MEETECHO_API_CONFIG="fake settings" + ) # enough to trigger API calls + @patch("ietf.meeting.views.SlidesManager") + def test_notify_meetecho_of_all_slides(self, mock_slides_manager_cls): + for meeting_type in ["ietf", "interim"]: + # Reset for the sake of the second iteration + self.client.logout() + mock_slides_manager_cls.reset_mock() + + session = SessionFactory(meeting__type_id=meeting_type) + meeting = session.meeting + + # bad meeting + url = urlreverse( + "ietf.meeting.views.notify_meetecho_of_all_slides", + kwargs={"num": 9999, "acronym": session.group.acronym}, + ) + login_testing_unauthorized(self, "secretary", url) + r = self.client.get(url) + self.assertEqual(r.status_code, 404) + r = self.client.post(url) + self.assertEqual(r.status_code, 404) + self.assertFalse(mock_slides_manager_cls.called) + self.client.logout() + + # good meeting + url = urlreverse( + "ietf.meeting.views.notify_meetecho_of_all_slides", + kwargs={"num": meeting.number, "acronym": session.group.acronym}, + ) + login_testing_unauthorized(self, "secretary", url) + r = self.client.get(url) + self.assertEqual(r.status_code, 405) + self.assertFalse(mock_slides_manager_cls.called) + mock_slides_manager = mock_slides_manager_cls.return_value + mock_slides_manager.send_update.return_value = True + r = self.client.post(url) + self.assertEqual(r.status_code, 302) + self.assertEqual(mock_slides_manager.send_update.call_count, 1) + self.assertEqual(mock_slides_manager.send_update.call_args, call(session)) + r = self.client.get(r["Location"]) + messages = list(r.context["messages"]) + self.assertEqual(len(messages), 1) + self.assertEqual( + str(messages[0]), f"Notified Meetecho about slides for {session}" + ) + + mock_slides_manager.send_update.reset_mock() + mock_slides_manager.send_update.return_value = False + r = self.client.post(url) + self.assertEqual(r.status_code, 302) + self.assertEqual(mock_slides_manager.send_update.call_count, 1) + self.assertEqual(mock_slides_manager.send_update.call_args, call(session)) + r = self.client.get(r["Location"]) + messages = list(r.context["messages"]) + self.assertEqual(len(messages), 1) + self.assertIn( + "No sessions were eligible for Meetecho slides update.", str(messages[0]) + ) + @override_settings(IETF_NOTES_URL='https://notes.ietf.org/') class ImportNotesTests(TestCase): diff --git a/ietf/meeting/urls.py b/ietf/meeting/urls.py index af36a6656c..a038e1cfe6 100644 --- a/ietf/meeting/urls.py +++ b/ietf/meeting/urls.py @@ -15,6 +15,7 @@ def get_redirect_url(self, *args, **kwargs): safe_for_all_meeting_types = [ url(r'^session/(?P[-a-z0-9]+)/?$', views.session_details), + url(r'^session/(?P[-a-z0-9]+)/send_slide_notifications$', views.notify_meetecho_of_all_slides), url(r'^session/(?P\d+)/drafts$', views.add_session_drafts), url(r'^session/(?P\d+)/recordings$', views.add_session_recordings), url(r'^session/(?P\d+)/attendance$', views.session_attendance), @@ -30,7 +31,7 @@ def get_redirect_url(self, *args, **kwargs): url(r'^session/(?P\d+)/doc/%(name)s/remove$' % settings.URL_REGEXPS, views.remove_sessionpresentation), url(r'^session/(?P\d+)\.ics$', views.agenda_ical), url(r'^sessions/(?P[-a-z0-9]+)\.ics$', views.agenda_ical), - url(r'^slidesubmission/(?P\d+)$', views.approve_proposed_slides) + url(r'^slidesubmission/(?P\d+)$', views.approve_proposed_slides), ] diff --git a/ietf/meeting/views.py b/ietf/meeting/views.py index 8dccda9c87..731dfad88f 100644 --- a/ietf/meeting/views.py +++ b/ietf/meeting/views.py @@ -5710,6 +5710,52 @@ def approve_proposed_slides(request, slidesubmission_id, num): }) +@role_required("Secretariat") +def notify_meetecho_of_all_slides(request, num, acronym): + """Notify meetecho of state of all slides for the group + + Respects the usual notification window around each session. Meetecho will ignore + notices outside that window anyway, so no sense sending them. + """ + meeting = get_meeting(num=num, type_in=None) # raises 404 + if request.method != "POST": + return HttpResponseNotAllowed( + content="Method not allowed", + content_type=f"text/plain; charset={settings.DEFAULT_CHARSET}", + permitted_methods=("POST",), + ) + scheduled_sessions = [ + session + for session in get_sessions(meeting.number, acronym) + if session.current_status == "sched" + ] + sm = SlidesManager(api_config=settings.MEETECHO_API_CONFIG) + updated = [] + for session in scheduled_sessions: + if sm.send_update(session): + updated.append(session) + if len(updated) > 0: + messages.success( + request, + f"Notified Meetecho about slides for {','.join(str(s) for s in updated)}", + ) + elif sm.slides_notify_time is not None: + messages.warning( + request, + "No sessions were eligible for Meetecho slides update. Updates are " + f"only sent within {sm.slides_notify_time} before or after the session.", + ) + else: + messages.warning( + request, + "No sessions were eligible for Meetecho slides update. Updates are " + "currently disabled.", + ) + return redirect( + "ietf.meeting.views.session_details", num=meeting.number, acronym=acronym + ) + + def import_session_minutes(request, session_id, num): """Import session minutes from the ietf.notes.org site diff --git a/ietf/static/js/session_details.js b/ietf/static/js/session_details.js new file mode 100644 index 0000000000..03d1b2d3d9 --- /dev/null +++ b/ietf/static/js/session_details.js @@ -0,0 +1,53 @@ +// Copyright The IETF Trust 2026, All Rights Reserved +// Relies on other scripts being loaded, see usage in session_details.html +document.addEventListener('DOMContentLoaded', () => { + // Init with best guess at local timezone. + ietf_timezone.set_tz_change_callback(timezone_changed) // cb is in upcoming.js + ietf_timezone.initialize('local') + + // Set up sortable elements if the user can manage materials + if (document.getElementById('can-manage-materials-flag')) { + const sortables = [] + const options = { + group: 'slides', + animation: 150, + handle: '.drag-handle', + onAdd: function (event) {onAdd(event)}, + onRemove: function (event) {onRemove(event)}, + onEnd: function (event) {onEnd(event)} + } + + function onAdd (event) { + const old_session = event.from.getAttribute('data-session') + const new_session = event.to.getAttribute('data-session') + $.post(event.to.getAttribute('data-add-to-session'), { + 'order': event.newIndex + 1, + 'name': event.item.getAttribute('name') + }) + $(event.item).find('td:eq(1)').find('a').each(function () { + $(this).attr('href', $(this).attr('href').replace(old_session, new_session)) + }) + } + + function onRemove (event) { + const old_session = event.from.getAttribute('data-session') + $.post(event.from.getAttribute('data-remove-from-session'), { + 'oldIndex': event.oldIndex + 1, + 'name': event.item.getAttribute('name') + }) + } + + function onEnd (event) { + if (event.to == event.from) { + $.post(event.from.getAttribute('data-reorder-in-session'), { + 'oldIndex': event.oldIndex + 1, + 'newIndex': event.newIndex + 1 + }) + } + } + + for (const elt of document.querySelectorAll('.slides tbody')) { + sortables.push(Sortable.create(elt, options)) + } + } +}) diff --git a/ietf/templates/meeting/session_details.html b/ietf/templates/meeting/session_details.html index 55fa3d3857..a4d9ba1090 100644 --- a/ietf/templates/meeting/session_details.html +++ b/ietf/templates/meeting/session_details.html @@ -1,5 +1,5 @@ {% extends "base.html" %} -{# Copyright The IETF Trust 2015, All Rights Reserved #} +{# Copyright The IETF Trust 2015-2026, All Rights Reserved #} {% load origin ietf_filters static %} {% block title %}{{ meeting }} : {{ group.acronym }}{% endblock %} {% block morecss %} @@ -53,69 +53,36 @@

Unscheduled Sessions

{% endif %} {% if forloop.last %}{% endif %} {% endfor %} + {% if user|has_role:"Secretariat" %} +
+
+ Secretariat Only +
+
+
+ {% csrf_token %} + +
+
+
+ {% endif %} + {% comment %} + The existence of an element with id canManageMaterialsFlag is checked in + session_details.js to determine whether it should init the sortable tables. + Not the most elegant approach, but it works. + {% endcomment %} + {% if can_manage_materials %}
{% endif %} {% endblock %} {% block js %} - {% if can_manage_materials %} - {% endif %} + {% endblock %} \ No newline at end of file diff --git a/ietf/utils/meetecho.py b/ietf/utils/meetecho.py index 7654f67cd1..943f3789ef 100644 --- a/ietf/utils/meetecho.py +++ b/ietf/utils/meetecho.py @@ -508,8 +508,13 @@ def _should_send_update(self, session): return (timeslot.time - self.slides_notify_time) < now < (timeslot.end_time() + self.slides_notify_time) def add(self, session: "Session", slides: "Document", order: int): + """Add a slide deck to the session + + Returns True if the update was sent, False if it was not sent because the + current time is outside the update window for the session. + """ if not self._should_send_update(session): - return + return False # Would like to confirm that session.presentations includes the slides Document, but we can't # (same problem regarding unsaved Documents discussed in the docstring) @@ -524,11 +529,16 @@ def add(self, session: "Session", slides: "Document", order: int): "order": order, } ) + return True def delete(self, session: "Session", slides: "Document"): - """Delete a slide deck from the session""" + """Delete a slide deck from the session + + Returns True if the update was sent, False if it was not sent because the + current time is outside the update window for the session. + """ if not self._should_send_update(session): - return + return False if session.presentations.filter(document=slides).exists(): # "order" problems are very likely to result if we delete slides that are actually still @@ -543,12 +553,17 @@ def delete(self, session: "Session", slides: "Document"): id=slides.pk, ) if session.presentations.filter(document__type_id="slides").exists(): - self.send_update(session) # adjust order to fill in the hole + self._send_update(session) # adjust order to fill in the hole + return True def revise(self, session: "Session", slides: "Document"): - """Replace existing deck with its current state""" + """Replace existing deck with its current state + + Returns True if the update was sent, False if it was not sent because the + current time is outside the update window for the session. + """ if not self._should_send_update(session): - return + return False sp = session.presentations.filter(document=slides).first() if sp is None: @@ -561,11 +576,13 @@ def revise(self, session: "Session", slides: "Document"): id=slides.pk, ) self.add(session, slides, order) # fill in the hole + return True - def send_update(self, session: "Session"): - if not self._should_send_update(session): - return - + def _send_update(self, session: "Session"): + """Notify of the current state of the session's slides (no time window check) + + This is a private helper - use send_update() (no leading underscore) instead. + """ self.api.update_slide_decks( wg_token=self.wg_token(session.group), session=str(session.pk), @@ -580,3 +597,14 @@ def send_update(self, session: "Session"): for deck in session.presentations.filter(document__type="slides") ] ) + + def send_update(self, session: "Session"): + """Notify of the current state of the session's slides + + Returns True if the update was sent, False if it was not sent because the + current time is outside the update window for the session. + """ + if not self._should_send_update(session): + return False + self._send_update(session) + return True diff --git a/ietf/utils/tests_meetecho.py b/ietf/utils/tests_meetecho.py index 502e936483..c076a3df74 100644 --- a/ietf/utils/tests_meetecho.py +++ b/ietf/utils/tests_meetecho.py @@ -547,7 +547,8 @@ def test_add(self, mock_add, mock_wg_token): sm = SlidesManager(settings.MEETECHO_API_CONFIG) session = SessionFactory() slides_doc = DocumentFactory(type_id="slides") - sm.add(session, slides_doc, 13) + retval = sm.add(session, slides_doc, 13) + self.assertIs(retval, True) self.assertTrue(mock_wg_token.called) self.assertTrue(mock_add.called) self.assertEqual( @@ -565,6 +566,14 @@ def test_add(self, mock_add, mock_wg_token): ), ) + # Test return value when no update is sent. Really ought to do a more + # careful test of the _should_send_update() method. + sm = SlidesManager( + settings.MEETECHO_API_CONFIG | {"slides_notify_time": None} + ) + retval = sm.add(session, slides_doc, 14) + self.assertIs(retval, False) + @patch("ietf.utils.meetecho.MeetechoAPI.update_slide_decks") @patch("ietf.utils.meetecho.MeetechoAPI.delete_slide_deck") def test_delete(self, mock_delete, mock_update, mock_wg_token): @@ -580,7 +589,8 @@ def test_delete(self, mock_delete, mock_update, mock_wg_token): sm.delete(session, slides_doc) # can't remove slides still attached to the session self.assertFalse(any([mock_wg_token.called, mock_delete.called, mock_update.called])) - sm.delete(session, removed_slides_doc) + retval = sm.delete(session, removed_slides_doc) + self.assertIs(retval, True) self.assertTrue(mock_wg_token.called) self.assertTrue(mock_delete.called) self.assertEqual( @@ -609,9 +619,18 @@ def test_delete(self, mock_delete, mock_update, mock_wg_token): # Delete the other session and check that we don't make the update call slides.delete() - sm.delete(session, slides_doc) + retval = sm.delete(session, slides_doc) + self.assertIs(retval, True) self.assertTrue(mock_delete.called) self.assertFalse(mock_update.called) + + # Test return value when no update is sent. Really ought to do a more + # careful test of the _should_send_update() method. + sm = SlidesManager( + settings.MEETECHO_API_CONFIG | {"slides_notify_time": None} + ) + retval = sm.delete(session, slides_doc) + self.assertIs(retval, False) @patch("ietf.utils.meetecho.MeetechoAPI.delete_slide_deck") @patch("ietf.utils.meetecho.MeetechoAPI.add_slide_deck") @@ -619,7 +638,8 @@ def test_revise(self, mock_add, mock_delete, mock_wg_token): sm = SlidesManager(settings.MEETECHO_API_CONFIG) slides = SessionPresentationFactory(document__type_id="slides", order=23) slides_doc = slides.document - sm.revise(slides.session, slides.document) + retval = sm.revise(slides.session, slides_doc) + self.assertIs(retval, True) self.assertTrue(mock_wg_token.called) self.assertTrue(mock_delete.called) self.assertEqual( @@ -642,13 +662,22 @@ def test_revise(self, mock_add, mock_delete, mock_wg_token): ), ) + # Test return value when no update is sent. Really ought to do a more + # careful test of the _should_send_update() method. + sm = SlidesManager( + settings.MEETECHO_API_CONFIG | {"slides_notify_time": None} + ) + retval = sm.revise(slides.session, slides_doc) + self.assertIs(retval, False) + @patch("ietf.utils.meetecho.MeetechoAPI.update_slide_decks") def test_send_update(self, mock_send_update, mock_wg_token): sm = SlidesManager(settings.MEETECHO_API_CONFIG) slides = SessionPresentationFactory(document__type_id="slides") SessionPresentationFactory(session=slides.session, document__type_id="agenda") - sm.send_update(slides.session) + retval = sm.send_update(slides.session) + self.assertIs(retval, True) self.assertTrue(mock_wg_token.called) self.assertTrue(mock_send_update.called) self.assertEqual( @@ -667,3 +696,11 @@ def test_send_update(self, mock_send_update, mock_wg_token): ] ) ) + + # Test return value when no update is sent. Really ought to do a more + # careful test of the _should_send_update() method. + sm = SlidesManager( + settings.MEETECHO_API_CONFIG | {"slides_notify_time": None} + ) + retval = sm.send_update(slides.session) + self.assertIs(retval, False) diff --git a/package.json b/package.json index e2e6fd7dab..fec29275b4 100644 --- a/package.json +++ b/package.json @@ -148,6 +148,7 @@ "ietf/static/js/moment.js", "ietf/static/js/password_strength.js", "ietf/static/js/select2.js", + "ietf/static/js/session_details.js", "ietf/static/js/session_details_form.js", "ietf/static/js/session_form.js", "ietf/static/js/session_request.js", From 619b2aee0f3b4acbc95a44f6fd3e8785163f6a93 Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Fri, 20 Feb 2026 13:22:49 -0600 Subject: [PATCH 015/102] fix: adjust draft-stream-ietf state descriptions per IESG (#10437) --- ...ge_draft_stream_ietf_state_descriptions.py | 57 +++++++++++++++++++ 1 file changed, 57 insertions(+) create mode 100644 ietf/doc/migrations/0031_change_draft_stream_ietf_state_descriptions.py diff --git a/ietf/doc/migrations/0031_change_draft_stream_ietf_state_descriptions.py b/ietf/doc/migrations/0031_change_draft_stream_ietf_state_descriptions.py new file mode 100644 index 0000000000..c664126da3 --- /dev/null +++ b/ietf/doc/migrations/0031_change_draft_stream_ietf_state_descriptions.py @@ -0,0 +1,57 @@ +# Copyright The IETF Trust 2026, All Rights Reserved + +from django.db import migrations + + +def forward(apps, schema_editor): + State = apps.get_model("doc", "State") + for name, desc in [ + ( + "Adopted by a WG", + "The individual submission document has been adopted by the Working Group (WG), but some administrative matter still needs to be completed (e.g., a WG document replacing this document with the typical naming convention of 'draft-ietf-wgname-topic-nn' has not yet been submitted).", + ), + ( + "WG Document", + "The document has been identified as a Working Group (WG) document and is under development per Section 7.2 of RFC2418.", + ), + ( + "Waiting for WG Chair Go-Ahead", + "The Working Group (WG) document has completed Working Group Last Call (WGLC), but the WG chairs are not yet ready to call consensus on the document. The reasons for this may include comments from the WGLC need to be responded to, or a revision to the document is needed.", + ), + ( + "Submitted to IESG for Publication", + "The Working Group (WG) document has been submitted to the Internet Engineering Steering Group (IESG) for evaluation and publication per Section 7.4 of RFC2418. See the “IESG State” or “RFC Editor State” for further details on the state of the document.", + ), + ]: + State.objects.filter(name=name).update(desc=desc, type="draft-stream-ietf") + + +def reverse(apps, schema_editor): + State = apps.get_model("doc", "State") + for name, desc in [ + ( + "Adopted by a WG", + "The individual submission document has been adopted by the Working Group (WG), but a WG document replacing this document with the typical naming convention of 'draft- ietf-wgname-topic-nn' has not yet been submitted.", + ), + ( + "WG Document", + "The document has been adopted by the Working Group (WG) and is under development. A document can only be adopted by one WG at a time. However, a document may be transferred between WGs.", + ), + ( + "Waiting for WG Chair Go-Ahead", + "The Working Group (WG) document has completed Working Group Last Call (WGLC), but the WG chair(s) are not yet ready to call consensus on the document. The reasons for this may include comments from the WGLC need to be responded to, or a revision to the document is needed", + ), + ( + "Submitted to IESG for Publication", + "The Working Group (WG) document has left the WG and been submitted to the Internet Engineering Steering Group (IESG) for evaluation and publication. See the “IESG State” or “RFC Editor State” for further details on the state of the document.", + ), + ]: + State.objects.filter(name=name).update(desc=desc, type="draft-stream-ietf") + + +class Migration(migrations.Migration): + dependencies = [ + ("doc", "0030_alter_dochistory_title_alter_document_title"), + ] + + operations = [migrations.RunPython(forward, reverse)] From c4be6318f73cbf896b5cc1f3416040e12b4611f4 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Fri, 20 Feb 2026 15:25:57 -0400 Subject: [PATCH 016/102] feat: more API fields+filtering; drop RfcAuthor.email field (#10432) * feat: RfcAuthorSerializer.email is current email * refactor: RfcAuthor email field -> property * feat: more RfcMetadataSerializer fields * shepherd email (with a fallback to the draft) * doc ad email * area ad emails * group list email * fix: filter RFCs by any group type * feat: filter by RFC numbers * fix: shepherd -> draft object in response JSON * fix: consistent filter naming * chore: migration * test: update test_notify_rfc_published * fix: RfcAuthor.email() -> Email, not str * fix: update RfcAuthorFactory * fix: consistent blank value in email() * fix: guard against non-prefetched queryset * test: fix nomcom test * refactor: name-addr -> addr for ad/shepherd Also falls back to current primary email for ad/shepherd if the email on record is inactive. --- ietf/api/tests_views_rpc.py | 25 +++++----- ietf/doc/admin.py | 4 +- ietf/doc/api.py | 10 +++- ietf/doc/factories.py | 1 - .../migrations/0031_remove_rfcauthor_email.py | 16 ++++++ ietf/doc/models.py | 6 ++- ietf/doc/serializers.py | 50 +++++++++++++++---- ietf/doc/views_doc.py | 2 +- ietf/group/serializers.py | 32 ++++++++++-- ietf/nomcom/tests.py | 1 - 10 files changed, 115 insertions(+), 32 deletions(-) create mode 100644 ietf/doc/migrations/0031_remove_rfcauthor_email.py diff --git a/ietf/api/tests_views_rpc.py b/ietf/api/tests_views_rpc.py index 09fb40bf6e..1fbb4c3f02 100644 --- a/ietf/api/tests_views_rpc.py +++ b/ietf/api/tests_views_rpc.py @@ -143,22 +143,23 @@ def test_notify_rfc_published(self): self.assertEqual(rfc.title, "RFC " + draft.title) self.assertEqual(rfc.documentauthor_set.count(), 0) self.assertEqual( - list( - rfc.rfcauthor_set.values( - "titlepage_name", - "is_editor", - "person", - "email", - "affiliation", - "country", - ) - ), + [ + { + "titlepage_name": ra.titlepage_name, + "is_editor": ra.is_editor, + "person": ra.person, + "email": ra.email, + "affiliation": ra.affiliation, + "country": ra.country, + } + for ra in rfc.rfcauthor_set.all() + ], [ { "titlepage_name": f"titlepage {author.name}", "is_editor": False, - "person": author.pk, - "email": author.email_address(), + "person": author, + "email": author.email(), "affiliation": "Some Affiliation", "country": "CA", } diff --git a/ietf/doc/admin.py b/ietf/doc/admin.py index f082418935..b604d4f096 100644 --- a/ietf/doc/admin.py +++ b/ietf/doc/admin.py @@ -242,6 +242,6 @@ def is_deleted(self, instance): class RfcAuthorAdmin(admin.ModelAdmin): list_display = ['id', 'document', 'titlepage_name', 'person', 'email', 'affiliation', 'country', 'order'] - search_fields = ['document__name', 'titlepage_name', 'person__name', 'email__address', 'affiliation', 'country'] - raw_id_fields = ["document", "person", "email"] + search_fields = ['document__name', 'titlepage_name', 'person__name', 'email', 'affiliation', 'country'] + raw_id_fields = ["document", "person"] admin.site.register(RfcAuthor, RfcAuthorAdmin) diff --git a/ietf/doc/api.py b/ietf/doc/api.py index 6a4c0c9fd5..75993f463e 100644 --- a/ietf/doc/api.py +++ b/ietf/doc/api.py @@ -42,13 +42,21 @@ class RfcLimitOffsetPagination(LimitOffsetPagination): max_limit = 500 +class NumberInFilter(filters.BaseInFilter, filters.NumberFilter): + """Filter against a comma-separated list of numbers""" + pass + + class RfcFilter(filters.FilterSet): published = filters.DateFromToRangeFilter() stream = filters.ModelMultipleChoiceFilter( queryset=StreamName.objects.filter(used=True) ) + number = NumberInFilter( + field_name="rfc_number" + ) group = filters.ModelMultipleChoiceFilter( - queryset=Group.objects.wgs(), + queryset=Group.objects.all(), field_name="group__acronym", to_field_name="acronym", ) diff --git a/ietf/doc/factories.py b/ietf/doc/factories.py index aad01be04f..bc38765446 100644 --- a/ietf/doc/factories.py +++ b/ietf/doc/factories.py @@ -391,7 +391,6 @@ class Meta: lambda obj: " ".join([obj.person.initials(), obj.person.last_name()]) ) person = factory.SubFactory('ietf.person.factories.PersonFactory') - email = factory.LazyAttribute(lambda obj: obj.person.email()) affiliation = factory.Faker('company') order = factory.LazyAttribute(lambda o: o.document.rfcauthor_set.count() + 1) diff --git a/ietf/doc/migrations/0031_remove_rfcauthor_email.py b/ietf/doc/migrations/0031_remove_rfcauthor_email.py new file mode 100644 index 0000000000..c4c1911bfe --- /dev/null +++ b/ietf/doc/migrations/0031_remove_rfcauthor_email.py @@ -0,0 +1,16 @@ +# Copyright The IETF Trust 2026, All Rights Reserved + +from django.db import migrations + + +class Migration(migrations.Migration): + dependencies = [ + ("doc", "0030_alter_dochistory_title_alter_document_title"), + ] + + operations = [ + migrations.RemoveField( + model_name="rfcauthor", + name="email", + ), + ] diff --git a/ietf/doc/models.py b/ietf/doc/models.py index 8f700bf496..cc28951be0 100644 --- a/ietf/doc/models.py +++ b/ietf/doc/models.py @@ -937,7 +937,6 @@ class RfcAuthor(models.Model): titlepage_name = models.CharField(max_length=128, blank=False) is_editor = models.BooleanField(default=False) person = ForeignKey(Person, null=True, blank=True, on_delete=models.PROTECT) - email = ForeignKey(Email, help_text="Email address used by author for submission", blank=True, null=True, on_delete=models.PROTECT) affiliation = models.CharField(max_length=100, blank=True, help_text="Organization/company used by author for submission") country = models.CharField(max_length=255, blank=True, help_text="Country used by author for submission") order = models.IntegerField(default=1) @@ -951,6 +950,11 @@ class Meta: models.Index(fields=["document", "order"]) ] + @property + def email(self) -> Email | None: + return self.person.email() if self.person else None + + class DocumentAuthorInfo(models.Model): person = ForeignKey(Person) # email should only be null for some historic documents diff --git a/ietf/doc/serializers.py b/ietf/doc/serializers.py index e8d373164b..b054b074d7 100644 --- a/ietf/doc/serializers.py +++ b/ietf/doc/serializers.py @@ -9,14 +9,20 @@ from drf_spectacular.utils import extend_schema_field from rest_framework import serializers -from ietf.group.serializers import GroupSerializer +from ietf.group.serializers import ( + AreaDirectorSerializer, + AreaSerializer, + GroupSerializer, +) from ietf.name.serializers import StreamNameSerializer +from ietf.utils import log from .models import Document, DocumentAuthor, RfcAuthor class RfcAuthorSerializer(serializers.ModelSerializer): """Serializer for an RfcAuthor / DocumentAuthor in a response""" + email = serializers.EmailField(source="email.address", read_only=True) datatracker_person_path = serializers.URLField( source="person.get_absolute_url", required=False, @@ -29,7 +35,7 @@ class Meta: "titlepage_name", "is_editor", "person", - "email", # relies on email.pk being email.address + "email", "affiliation", "country", "datatracker_person_path", @@ -48,7 +54,6 @@ def to_representation(self, instance): titlepage_name=document_author.person.plain_name(), is_editor=False, person=document_author.person, - email=document_author.email, affiliation=document_author.affiliation, country=document_author.country, order=document_author.order, @@ -174,10 +179,16 @@ def to_representation(self, instance: Document): return super().to_representation(instance=RfcStatus.from_document(instance)) +class ShepherdSerializer(serializers.Serializer): + email = serializers.EmailField(source="email_address") + + class RelatedDraftSerializer(serializers.Serializer): id = serializers.IntegerField(source="source.id") name = serializers.CharField(source="source.name") title = serializers.CharField(source="source.title") + shepherd = ShepherdSerializer(source="source.shepherd") + ad = AreaDirectorSerializer(source="source.ad") class RelatedRfcSerializer(serializers.Serializer): @@ -205,15 +216,23 @@ class RfcFormatSerializer(serializers.Serializer): class RfcMetadataSerializer(serializers.ModelSerializer): - """Serialize metadata of an RFC""" + """Serialize metadata of an RFC + + This needs to be called with a Document queryset that has been processed with + api.augment_rfc_queryset() or it very likely will not work. Some of the typing + refers to Document, but this should really be WithAnnotations[Document, ...]. + However, have not been able to make that work yet. + """ number = serializers.IntegerField(source="rfc_number") published = serializers.DateField() status = RfcStatusSerializer(source="*") authors = serializers.SerializerMethodField() group = GroupSerializer() - area = GroupSerializer(source="group.area", required=False) + area = AreaSerializer(source="group.area", required=False) stream = StreamNameSerializer() + ad = AreaDirectorSerializer(read_only=True) + group_list_email = serializers.EmailField(source="group.list_email", read_only=True) identifiers = serializers.SerializerMethodField() draft = serializers.SerializerMethodField() obsoletes = RelatedRfcSerializer(many=True, read_only=True) @@ -239,6 +258,8 @@ class Meta: "group", "area", "stream", + "ad", + "group_list_email", "identifiers", "obsoletes", "obsoleted_by", @@ -276,11 +297,20 @@ def get_identifiers(self, doc: Document): return DocIdentifierSerializer(instance=identifiers, many=True).data @extend_schema_field(RelatedDraftSerializer) - def get_draft(self, object): - try: - related_doc = object.drafts[0] - except IndexError: - return None + def get_draft(self, doc: Document): + if hasattr(doc, "drafts"): + # This is the expected case - drafts is added by a Prefetch in + # the augment_rfc_queryset() method. + try: + related_doc = doc.drafts[0] + except IndexError: + return None + else: + # Fallback in case augment_rfc_queryset() was not called + log.log( + f"Warning: {self.__class__}.get_draft() called without prefetched draft" + ) + related_doc = doc.came_from_draft() return RelatedDraftSerializer(related_doc).data diff --git a/ietf/doc/views_doc.py b/ietf/doc/views_doc.py index 0578da1b77..0ae7520681 100644 --- a/ietf/doc/views_doc.py +++ b/ietf/doc/views_doc.py @@ -1657,7 +1657,7 @@ def extract_name(s): doc.rfcauthor_set if doc.type_id == "rfc" and doc.rfcauthor_set.exists() else doc.documentauthor_set - ).select_related("person", "email").order_by("order") + ).select_related("person").prefetch_related("person__email_set").order_by("order") data["authors"] = [ { "name": author.titlepage_name if hasattr(author, "titlepage_name") else author.person.name, diff --git a/ietf/group/serializers.py b/ietf/group/serializers.py index 08e6bba81a..85f209019c 100644 --- a/ietf/group/serializers.py +++ b/ietf/group/serializers.py @@ -1,11 +1,37 @@ -# Copyright The IETF Trust 2024, All Rights Reserved +# Copyright The IETF Trust 2024-2026, All Rights Reserved """django-rest-framework serializers""" + +from drf_spectacular.utils import extend_schema_field from rest_framework import serializers -from .models import Group +from ietf.person.models import Email +from .models import Group, Role class GroupSerializer(serializers.ModelSerializer): class Meta: model = Group - fields = ["acronym", "name", "type"] + fields = ["acronym", "name", "type", "list_email"] + + +class AreaDirectorSerializer(serializers.Serializer): + """Serialize an area director + + Works with Email or Role + """ + + email = serializers.SerializerMethodField() + + @extend_schema_field(serializers.EmailField) + def get_email(self, instance: Email | Role): + if isinstance(instance, Role): + return instance.email.email_address() + return instance.email_address() + + +class AreaSerializer(serializers.ModelSerializer): + ads = AreaDirectorSerializer(many=True, read_only=True) + + class Meta: + model = Group + fields = ["acronym", "name", "type", "ads"] diff --git a/ietf/nomcom/tests.py b/ietf/nomcom/tests.py index b6e8c57da7..210788ce07 100644 --- a/ietf/nomcom/tests.py +++ b/ietf/nomcom/tests.py @@ -2528,7 +2528,6 @@ def test_get_qualified_author_queryset(self): document=rfc, person=people[0], titlepage_name="P. Zero", - email=people[0].email_set.first(), ) self.assertCountEqual( get_qualified_author_queryset(base_qs, now - 5 * one_year, now), From d7319030f3d3a38bf0c048e713bc1f068ca228ed Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Fri, 20 Feb 2026 16:12:43 -0400 Subject: [PATCH 017/102] chore: renumber migrations (#10441) --- ...remove_rfcauthor_email.py => 0032_remove_rfcauthor_email.py} | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename ietf/doc/migrations/{0031_remove_rfcauthor_email.py => 0032_remove_rfcauthor_email.py} (80%) diff --git a/ietf/doc/migrations/0031_remove_rfcauthor_email.py b/ietf/doc/migrations/0032_remove_rfcauthor_email.py similarity index 80% rename from ietf/doc/migrations/0031_remove_rfcauthor_email.py rename to ietf/doc/migrations/0032_remove_rfcauthor_email.py index c4c1911bfe..a0e147da59 100644 --- a/ietf/doc/migrations/0031_remove_rfcauthor_email.py +++ b/ietf/doc/migrations/0032_remove_rfcauthor_email.py @@ -5,7 +5,7 @@ class Migration(migrations.Migration): dependencies = [ - ("doc", "0030_alter_dochistory_title_alter_document_title"), + ("doc", "0031_change_draft_stream_ietf_state_descriptions"), ] operations = [ From abf5e0d97ca38ede129b218d3a895f9bb5ab441b Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Fri, 20 Feb 2026 17:45:31 -0400 Subject: [PATCH 018/102] fix: allow null for shepherd/ad (#10443) --- ietf/doc/serializers.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ietf/doc/serializers.py b/ietf/doc/serializers.py index b054b074d7..e42a6a0293 100644 --- a/ietf/doc/serializers.py +++ b/ietf/doc/serializers.py @@ -187,8 +187,8 @@ class RelatedDraftSerializer(serializers.Serializer): id = serializers.IntegerField(source="source.id") name = serializers.CharField(source="source.name") title = serializers.CharField(source="source.title") - shepherd = ShepherdSerializer(source="source.shepherd") - ad = AreaDirectorSerializer(source="source.ad") + shepherd = ShepherdSerializer(source="source.shepherd", allow_null=True) + ad = AreaDirectorSerializer(source="source.ad", allow_null=True) class RelatedRfcSerializer(serializers.Serializer): @@ -231,7 +231,7 @@ class RfcMetadataSerializer(serializers.ModelSerializer): group = GroupSerializer() area = AreaSerializer(source="group.area", required=False) stream = StreamNameSerializer() - ad = AreaDirectorSerializer(read_only=True) + ad = AreaDirectorSerializer(read_only=True, allow_null=True) group_list_email = serializers.EmailField(source="group.list_email", read_only=True) identifiers = serializers.SerializerMethodField() draft = serializers.SerializerMethodField() From 18902ff1be5746cafc958561e563f00be5f05176 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Tue, 24 Feb 2026 13:10:42 -0400 Subject: [PATCH 019/102] fix: more accurate group areas (red API) (#10462) * fix: RFC area only for ietf stream * fix: no ADs for inactive areas --- ietf/doc/serializers.py | 19 ++++++++++++++++++- ietf/group/serializers.py | 9 ++++++++- 2 files changed, 26 insertions(+), 2 deletions(-) diff --git a/ietf/doc/serializers.py b/ietf/doc/serializers.py index e42a6a0293..36076c30be 100644 --- a/ietf/doc/serializers.py +++ b/ietf/doc/serializers.py @@ -229,7 +229,7 @@ class RfcMetadataSerializer(serializers.ModelSerializer): status = RfcStatusSerializer(source="*") authors = serializers.SerializerMethodField() group = GroupSerializer() - area = AreaSerializer(source="group.area", required=False) + area = serializers.SerializerMethodField() stream = StreamNameSerializer() ad = AreaDirectorSerializer(read_only=True, allow_null=True) group_list_email = serializers.EmailField(source="group.list_email", read_only=True) @@ -287,6 +287,23 @@ def get_authors(self, doc: Document): many=True, ).data + @extend_schema_field(AreaSerializer(required=False)) + def get_area(self, doc: Document): + """Get area for the RFC + + This logic might be better moved to Document or a combination of Document + and Group. The current (2026-02-24) Group.area() method is not strict enough: + it does not limit to WG groups or IETF-stream documents. + """ + if doc.stream_id != "ietf": + return None + if doc.group is None: + return None + parent = doc.group.parent + if parent.type_id == "area": + return AreaSerializer(parent).data + return None + @extend_schema_field(DocIdentifierSerializer(many=True)) def get_identifiers(self, doc: Document): identifiers = [] diff --git a/ietf/group/serializers.py b/ietf/group/serializers.py index 85f209019c..4ba92232c5 100644 --- a/ietf/group/serializers.py +++ b/ietf/group/serializers.py @@ -30,8 +30,15 @@ def get_email(self, instance: Email | Role): class AreaSerializer(serializers.ModelSerializer): - ads = AreaDirectorSerializer(many=True, read_only=True) + ads = serializers.SerializerMethodField() class Meta: model = Group fields = ["acronym", "name", "type", "ads"] + + @extend_schema_field(AreaDirectorSerializer(many=True)) + def get_ads(self, area: Group): + return AreaDirectorSerializer( + area.ads() if area.is_active else Role.objects.none(), + many=True, + ).data From 07efd2b078a461da2eb7e197fc91f2ae0b45ac40 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Wed, 25 Feb 2026 12:58:57 -0400 Subject: [PATCH 020/102] fix: unbreak red API + group serializer tests (#10467) * test: group serializer tests * fix: Group.ads is a property * fix: no need for type in AreaSerializer --- ietf/group/serializers.py | 4 +- ietf/group/tests_serializers.py | 90 +++++++++++++++++++++++++++++++++ 2 files changed, 92 insertions(+), 2 deletions(-) create mode 100644 ietf/group/tests_serializers.py diff --git a/ietf/group/serializers.py b/ietf/group/serializers.py index 4ba92232c5..db3b37af48 100644 --- a/ietf/group/serializers.py +++ b/ietf/group/serializers.py @@ -34,11 +34,11 @@ class AreaSerializer(serializers.ModelSerializer): class Meta: model = Group - fields = ["acronym", "name", "type", "ads"] + fields = ["acronym", "name", "ads"] @extend_schema_field(AreaDirectorSerializer(many=True)) def get_ads(self, area: Group): return AreaDirectorSerializer( - area.ads() if area.is_active else Role.objects.none(), + area.ads if area.is_active else Role.objects.none(), many=True, ).data diff --git a/ietf/group/tests_serializers.py b/ietf/group/tests_serializers.py new file mode 100644 index 0000000000..bf29e6c8fd --- /dev/null +++ b/ietf/group/tests_serializers.py @@ -0,0 +1,90 @@ +# Copyright The IETF Trust 2026, All Rights Reserved +from ietf.group.factories import RoleFactory, GroupFactory +from ietf.group.serializers import ( + AreaDirectorSerializer, + AreaSerializer, + GroupSerializer, +) +from ietf.person.factories import EmailFactory +from ietf.utils.test_utils import TestCase + + +class GroupSerializerTests(TestCase): + def test_serializes(self): + wg = GroupFactory() + serialized = GroupSerializer(wg).data + self.assertEqual( + serialized, + { + "acronym": wg.acronym, + "name": wg.name, + "type": "wg", + "list_email": wg.list_email, + }, + ) + + +class AreaDirectorSerializerTests(TestCase): + def test_serializes_role(self): + """Should serialize a Role correctly""" + role = RoleFactory(group__type_id="area", name_id="ad") + serialized = AreaDirectorSerializer(role).data + self.assertEqual( + serialized, + {"email": role.email.email_address()}, + ) + + def test_serializes_email(self): + """Should serialize an Email correctly""" + email = EmailFactory() + serialized = AreaDirectorSerializer(email).data + self.assertEqual( + serialized, + {"email": email.email_address()}, + ) + + +class AreaSerializerTests(TestCase): + def test_serializes_active_area(self): + """Should serialize an active area correctly""" + area = GroupFactory(type_id="area", state_id="active") + serialized = AreaSerializer(area).data + self.assertEqual( + serialized, + { + "acronym": area.acronym, + "name": area.name, + "ads": [], + }, + ) + ad_roles = RoleFactory.create_batch(2, group=area, name_id="ad") + serialized = AreaSerializer(area).data + self.assertEqual(serialized["acronym"], area.acronym) + self.assertEqual(serialized["name"], area.name) + self.assertCountEqual( + serialized["ads"], + [{"email": ad.email.email_address()} for ad in ad_roles], + ) + + def test_serializes_inactive_area(self): + """Should serialize an inactive area correctly""" + area = GroupFactory(type_id="area", state_id="conclude") + serialized = AreaSerializer(area).data + self.assertEqual( + serialized, + { + "acronym": area.acronym, + "name": area.name, + "ads": [], + }, + ) + RoleFactory.create_batch(2, group=area, name_id="ad") + serialized = AreaSerializer(area).data + self.assertEqual( + serialized, + { + "acronym": area.acronym, + "name": area.name, + "ads": [], + }, + ) From b81249884877e20c6e311478fe25b472c869c555 Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Wed, 25 Feb 2026 13:24:30 -0600 Subject: [PATCH 021/102] feat: notify errata system of new rfc (#10465) * feat: notify errata system of new rfc * chore: ruff --- docker/configs/settings_local.py | 4 ++++ ietf/api/tests_views_rpc.py | 28 +++++++++++++++++++------ ietf/api/views_rpc.py | 9 +++++++- ietf/doc/tasks.py | 5 +++++ ietf/doc/utils_errata.py | 35 ++++++++++++++++++++++++++++++++ ietf/settings.py | 5 +++++ 6 files changed, 79 insertions(+), 7 deletions(-) create mode 100644 ietf/doc/utils_errata.py diff --git a/docker/configs/settings_local.py b/docker/configs/settings_local.py index e357ce3f73..1d4e6916b9 100644 --- a/docker/configs/settings_local.py +++ b/docker/configs/settings_local.py @@ -105,3 +105,7 @@ "ietf.api.red_api" : ["devtoken", "redtoken"], # Not a real secret "ietf.api.views_rpc" : ["devtoken"], # Not a real secret } + +# Errata system api configuration +ERRATA_METADATA_NOTIFICATION_URL = "http://host.docker.internal:8808/api/rfc_metadata_update/" +ERRATA_METADATA_NOTIFICATION_API_KEY = "not a real secret" diff --git a/ietf/api/tests_views_rpc.py b/ietf/api/tests_views_rpc.py index 1fbb4c3f02..6a5a5c9b88 100644 --- a/ietf/api/tests_views_rpc.py +++ b/ietf/api/tests_views_rpc.py @@ -9,9 +9,10 @@ from django.db.models.functions import Coalesce from django.test.utils import override_settings from django.urls import reverse as urlreverse +import mock from ietf.blobdb.models import Blob -from ietf.doc.factories import IndividualDraftFactory, WgDraftFactory, WgRfcFactory +from ietf.doc.factories import IndividualDraftFactory, RfcFactory, WgDraftFactory, WgRfcFactory from ietf.doc.models import RelatedDocument, Document from ietf.group.factories import RoleFactory, GroupFactory from ietf.person.factories import PersonFactory @@ -77,7 +78,8 @@ def test_draftviewset_references(self): self.assertEqual(refs[0]["name"], draft_bar.name) @override_settings(APP_API_TOKENS={"ietf.api.views_rpc": ["valid-token"]}) - def test_notify_rfc_published(self): + @mock.patch("ietf.doc.tasks.signal_update_rfc_metadata_task.delay") + def test_notify_rfc_published(self, mock_task_delay): url = urlreverse("ietf.api.purple_api.notify_rfc_published") area = GroupFactory(type_id="area") rfc_group = GroupFactory(type_id="wg") @@ -90,6 +92,8 @@ def test_notify_rfc_published(self): ) rfc_stream_id = "ise" assert isinstance(draft, Document), "WgDraftFactory should generate a Document" + updates = RfcFactory.create_batch(2) + obsoletes = RfcFactory.create_batch(2) unused_rfc_number = ( Document.objects.filter(rfc_number__isnull=False).aggregate( unused_rfc_number=Max("rfc_number") + 1 @@ -120,8 +124,8 @@ def test_notify_rfc_published(self): "pages": draft.pages + 10, "std_level": "ps", "ad": rfc_ad.pk, - "obsoletes": [], - "updates": [], + "obsoletes": [o.rfc_number for o in obsoletes], + "updates": [o.rfc_number for o in updates], "subseries": [], } r = self.client.post(url, data=post_data, format="json") @@ -172,13 +176,25 @@ def test_notify_rfc_published(self): self.assertEqual(rfc.pages, draft.pages + 10) self.assertEqual(rfc.std_level_id, "ps") self.assertEqual(rfc.ad, rfc_ad) - self.assertEqual(rfc.related_that_doc("obs"), []) - self.assertEqual(rfc.related_that_doc("updates"), []) + self.assertEqual(set(rfc.related_that_doc("obs")), set([o for o in obsoletes])) + self.assertEqual( + set(rfc.related_that_doc("updates")), set([o for o in updates]) + ) self.assertEqual(rfc.part_of(), []) self.assertEqual(draft.get_state().slug, "rfc") # todo test non-empty relationships # todo test references (when updating that is part of the handling) + self.assertTrue(mock_task_delay.called) + mock_args, mock_kwargs = mock_task_delay.call_args + self.assertIn("rfc_number_list", mock_kwargs) + expected_rfc_number_list = [rfc.rfc_number] + expected_rfc_number_list.extend( + [d.rfc_number for d in updates + obsoletes] + ) + expected_rfc_number_list = sorted(set(expected_rfc_number_list)) + self.assertEqual(mock_kwargs["rfc_number_list"], expected_rfc_number_list) + @override_settings(APP_API_TOKENS={"ietf.api.views_rpc": ["valid-token"]}) def test_upload_rfc_files(self): def _valid_post_data(): diff --git a/ietf/api/views_rpc.py b/ietf/api/views_rpc.py index 2bf16480f2..9273590b28 100644 --- a/ietf/api/views_rpc.py +++ b/ietf/api/views_rpc.py @@ -38,6 +38,7 @@ from ietf.doc.models import Document, DocHistory, RfcAuthor from ietf.doc.serializers import RfcAuthorSerializer from ietf.doc.storage_utils import remove_from_storage, store_file, exists_in_storage +from ietf.doc.tasks import signal_update_rfc_metadata_task from ietf.person.models import Email, Person @@ -362,7 +363,7 @@ def post(self, request): serializer.is_valid(raise_exception=True) # Create RFC try: - serializer.save() + rfc = serializer.save() except IntegrityError as err: if Document.objects.filter( rfc_number=serializer.validated_data["rfc_number"] @@ -375,6 +376,12 @@ def post(self, request): f"Unable to publish: {err}", code="unknown-integrity-error", ) + rfc_number_list = [rfc.rfc_number] + rfc_number_list.extend( + [d.rfc_number for d in rfc.related_that_doc(("updates", "obs"))] + ) + rfc_number_list = sorted(set(rfc_number_list)) + signal_update_rfc_metadata_task.delay(rfc_number_list=rfc_number_list) return Response(NotificationAckSerializer().data) diff --git a/ietf/doc/tasks.py b/ietf/doc/tasks.py index b463b9cecf..90f4c80af5 100644 --- a/ietf/doc/tasks.py +++ b/ietf/doc/tasks.py @@ -35,6 +35,7 @@ investigate_fragment, ) from .utils_bofreq import fixup_bofreq_timestamps +from .utils_errata import signal_update_rfc_metadata @shared_task @@ -155,3 +156,7 @@ def rebuild_reference_relations_task(doc_names: list[str]): @shared_task def fixup_bofreq_timestamps_task(): # pragma: nocover fixup_bofreq_timestamps() + +@shared_task +def signal_update_rfc_metadata_task(rfc_number_list=()): + signal_update_rfc_metadata(rfc_number_list) diff --git a/ietf/doc/utils_errata.py b/ietf/doc/utils_errata.py new file mode 100644 index 0000000000..539262151f --- /dev/null +++ b/ietf/doc/utils_errata.py @@ -0,0 +1,35 @@ +# Copyright The IETF Trust 2026, All Rights Reserved + +import requests + +from django.conf import settings + +from ietf.utils.log import log + + +def signal_update_rfc_metadata(rfc_number_list=()): + key = getattr(settings, "ERRATA_METADATA_NOTIFICATION_API_KEY", None) + if key is not None: + headers = {"X-Api-Key": settings.ERRATA_METADATA_NOTIFICATION_API_KEY} + post_dict = { + "rfc_number_list": list(rfc_number_list), + } + try: + response = requests.post( + settings.ERRATA_METADATA_NOTIFICATION_URL, + headers=headers, + json=post_dict, + timeout=settings.DEFAULT_REQUESTS_TIMEOUT, + ) + except requests.Timeout as e: + log( + f"POST request timed out for {settings.ERRATA_METADATA_NOTIFICATION_URL} ]: {e}" + ) + # raise RuntimeError(f'POST request timed out for {settings.ERRATA_METADATA_NOTIFICATION_URL}') from e + return + if response.status_code != 200: + log( + f"POST request failed for {settings.ERRATA_METADATA_NOTIFICATION_URL} ]: {response.status_code} {response.text}" + ) + else: + log("No API key configured for errata metadata notification, skipping") diff --git a/ietf/settings.py b/ietf/settings.py index 565e8825a9..71b110d762 100644 --- a/ietf/settings.py +++ b/ietf/settings.py @@ -1368,6 +1368,11 @@ def skip_unreadable_post(record): MEETECHO_AUDIO_STREAM_URL = "https://mp3.conf.meetecho.com/ietf{session.meeting.number}/{session.pk}.m3u" MEETECHO_SESSION_RECORDING_URL = "https://meetecho-player.ietf.org/playout/?session={session_label}" +# Errata system api configuration +# settings should provide +# ERRATA_METADATA_NOTIFICATION_URL +# ERRATA_METADATA_NOTIFICATION_API_KEY + # Put the production SECRET_KEY in settings_local.py, and also any other # sensitive or site-specific changes. DO NOT commit settings_local.py to svn. from ietf.settings_local import * # pyflakes:ignore pylint: disable=wildcard-import From da5614c4963c3dc4ff8e901c1edd888296219a0d Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Wed, 25 Feb 2026 18:14:47 -0400 Subject: [PATCH 022/102] test: avoid random fail in test_rfc_index (#10469) --- ietf/sync/tests.py | 1 + 1 file changed, 1 insertion(+) diff --git a/ietf/sync/tests.py b/ietf/sync/tests.py index bcc87a43aa..21d6cb5cd5 100644 --- a/ietf/sync/tests.py +++ b/ietf/sync/tests.py @@ -301,6 +301,7 @@ def test_rfc_index(self): ad=Person.objects.get(user__username='ad'), external_url="http://my-external-url.example.com", note="this is a note", + pages=54, # make sure this is not 42 ) DocumentAuthorFactory.create_batch(2, document=draft_doc) draft_doc.action_holders.add(draft_doc.ad) # not normally set, but add to be sure it's cleared From c1c24d012d23135725f0206dbe1a6be1e2a7fef4 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Fri, 27 Feb 2026 14:31:40 -0400 Subject: [PATCH 023/102] feat: RFC metadata update API (#10476) * feat: more editable RFC fields for API (WIP) Checkpoint commit! * chore: avoid requiring prefetch Makes some fields write-only to achieve this. * refactor: replace EditableRfcSerializer * fix: mark read-only field properly * refactor: SubseriesNameField * test: EditableRfcSerializer * refactor: DocEvent adjustment * feat: record person ids for authors * chore: adjust history message * fix: always save!! * fix: better msg formatting * fix: _almost_ always save!! * fix: lint * refactor: rename var --- ietf/api/serializers_rpc.py | 227 ++++++++++++++++++++++++++---- ietf/api/tests_serializers_rpc.py | 139 ++++++++++++++++++ ietf/api/views_rpc.py | 12 +- ietf/doc/serializers.py | 1 + ietf/doc/utils.py | 16 ++- 5 files changed, 361 insertions(+), 34 deletions(-) create mode 100644 ietf/api/tests_serializers_rpc.py diff --git a/ietf/api/serializers_rpc.py b/ietf/api/serializers_rpc.py index 34e2c791c0..d5f5363990 100644 --- a/ietf/api/serializers_rpc.py +++ b/ietf/api/serializers_rpc.py @@ -216,32 +216,24 @@ class Meta: read_only_fields = ["id", "name"] -class EditableRfcSerializer(serializers.ModelSerializer): - # Would be nice to reconcile this with ietf.doc.serializers.RfcSerializer. - # The purposes of that serializer (representing data for Red) and this one - # (accepting updates from Purple) are different enough that separate formats - # may be needed, but if not it'd be nice to have a single RfcSerializer that - # can serve both. - # - # For now, only handles authors - authors = RfcAuthorSerializer(many=True, min_length=1, source="rfcauthor_set") +def _update_authors(rfc, authors_data): + # Construct unsaved instances from validated author data + new_authors = [RfcAuthor(**authdata) for authdata in authors_data] + # Update the RFC with the new author set + with transaction.atomic(): + change_events = update_rfcauthors(rfc, new_authors) + for event in change_events: + event.save() + return change_events - class Meta: - model = Document - fields = ["id", "authors"] - def update(self, instance, validated_data): - assert isinstance(instance, Document) - authors_data = validated_data.pop("rfcauthor_set", None) - if authors_data is not None: - # Construct unsaved instances from validated author data - new_authors = [RfcAuthor(**ad) for ad in authors_data] - # Update the RFC with the new author set - with transaction.atomic(): - change_events = update_rfcauthors(instance, new_authors) - for event in change_events: - event.save() - return instance +class SubseriesNameField(serializers.RegexField): + + def __init__(self, **kwargs): + # pattern: no leading 0, finite length (arbitrarily set to 5 digits) + regex = r"^(bcp|std|fyi)[1-9][0-9]{0,4}$" + super().__init__(regex, **kwargs) + class RfcPubSerializer(serializers.ModelSerializer): @@ -283,13 +275,7 @@ class RfcPubSerializer(serializers.ModelSerializer): slug_field="rfc_number", queryset=Document.objects.filter(type_id="rfc"), ) - subseries = serializers.ListField( - child=serializers.RegexField( - required=False, - # pattern: no leading 0, finite length (arbitrarily set to 5 digits) - regex=r"^(bcp|std|fyi)[1-9][0-9]{0,4}$", - ) - ) + subseries = serializers.ListField(child=SubseriesNameField(required=False)) # N.b., authors is _not_ a field on Document! authors = RfcAuthorSerializer(many=True) @@ -327,6 +313,9 @@ def validate(self, data): ) return data + def update(self, instance, validated_data): + raise RuntimeError("Cannot update with this serializer") + def create(self, validated_data): """Publish an RFC""" published = validated_data.pop("published") @@ -515,6 +504,182 @@ def _create_rfc(self, validated_data): return rfc +class EditableRfcSerializer(serializers.ModelSerializer): + # Would be nice to reconcile this with ietf.doc.serializers.RfcSerializer. + # The purposes of that serializer (representing data for Red) and this one + # (accepting updates from Purple) are different enough that separate formats + # may be needed, but if not it'd be nice to have a single RfcSerializer that + # can serve both. + # + # Should also consider whether this and RfcPubSerializer should merge. + # + # Treats published and subseries fields as write-only. This isn't quite correct, + # but makes it easier and we don't currently use the serialized value except for + # debugging. + published = serializers.DateTimeField( + default_timezone=datetime.timezone.utc, + write_only=True, + ) + authors = RfcAuthorSerializer(many=True, min_length=1, source="rfcauthor_set") + subseries = serializers.ListField( + child=SubseriesNameField(required=False), + write_only=True, + ) + + class Meta: + model = Document + fields = [ + "published", + "title", + "authors", + "stream", + "abstract", + "pages", + "std_level", + "subseries", + ] + + def create(self, validated_data): + raise RuntimeError("Cannot create with this serializer") + + def update(self, instance, validated_data): + assert isinstance(instance, Document) + assert instance.type_id == "rfc" + rfc = instance # get better name + + system_person = Person.objects.get(name="(System)") + + # Remove data that needs special handling. Use a singleton object to detect + # missing values in case we ever support a value that needs None as an option. + omitted = object() + published = validated_data.pop("published", omitted) + subseries = validated_data.pop("subseries", omitted) + authors_data = validated_data.pop("rfcauthor_set", omitted) + + # Transaction to clean up if something fails + with transaction.atomic(): + # update the rfc Document itself + rfc_changes = [] + rfc_events = [] + + for attr, new_value in validated_data.items(): + old_value = getattr(rfc, attr) + if new_value != old_value: + rfc_changes.append( + f"changed {attr} to '{new_value}' from '{old_value}'" + ) + setattr(rfc, attr, new_value) + if len(rfc_changes) > 0: + rfc_change_summary = f"{', '.join(rfc_changes)}" + rfc_events.append( + DocEvent.objects.create( + doc=rfc, + rev=rfc.rev, + by=system_person, + type="sync_from_rfc_editor", + desc=f"Changed metadata: {rfc_change_summary}", + ) + ) + if authors_data is not omitted: + rfc_events.extend(_update_authors(instance, authors_data)) + + if published is not omitted: + published_event = rfc.latest_event(type="published_rfc") + if published_event is None: + # unexpected, but possible in theory + rfc_events.append( + DocEvent.objects.create( + doc=rfc, + rev=rfc.rev, + type="published_rfc", + time=published, + by=system_person, + desc="RFC published", + ) + ) + rfc_events.append( + DocEvent.objects.create( + doc=rfc, + rev=rfc.rev, + type="sync_from_rfc_editor", + by=system_person, + desc=( + f"Set publication timestamp to {published.isoformat()}" + ), + ) + ) + else: + original_pub_time = published_event.time + if published != original_pub_time: + published_event.time = published + published_event.save() + rfc_events.append( + DocEvent.objects.create( + doc=rfc, + rev=rfc.rev, + type="sync_from_rfc_editor", + by=system_person, + desc=( + f"Changed publication time to " + f"{published.isoformat()} from " + f"{original_pub_time.isoformat()}" + ) + ) + ) + + # update subseries relations + if subseries is not omitted: + for subseries_doc_name in subseries: + ss_slug = subseries_doc_name[:3] + subseries_doc, ss_doc_created = Document.objects.get_or_create( + type_id=ss_slug, name=subseries_doc_name + ) + if ss_doc_created: + subseries_doc.docevent_set.create( + type=f"{ss_slug}_doc_created", + by=system_person, + desc=f"Created {subseries_doc_name} via update of {rfc.name}", + ) + _, ss_rel_created = subseries_doc.relateddocument_set.get_or_create( + relationship_id="contains", target=rfc + ) + if ss_rel_created: + subseries_doc.docevent_set.create( + type="sync_from_rfc_editor", + by=system_person, + desc=f"Added {rfc.name} to {subseries_doc.name}", + ) + rfc_events.append( + rfc.docevent_set.create( + type="sync_from_rfc_editor", + by=system_person, + desc=f"Added {rfc.name} to {subseries_doc.name}", + ) + ) + # Delete subseries relations that are no longer current + stale_subseries_relations = rfc.relations_that("contains").exclude( + source__name__in=subseries + ) + for stale_relation in stale_subseries_relations: + stale_subseries_doc = stale_relation.source + rfc_events.append( + rfc.docevent_set.create( + type="sync_from_rfc_editor", + by=system_person, + desc=f"Removed {rfc.name} from {stale_subseries_doc.name}", + ) + ) + stale_subseries_doc.docevent_set.create( + type="sync_from_rfc_editor", + by=system_person, + desc=f"Removed {rfc.name} from {stale_subseries_doc.name}", + ) + stale_subseries_relations.delete() + if len(rfc_events) > 0: + rfc.save_with_history(rfc_events) + return rfc + + class RfcFileSerializer(serializers.Serializer): # The structure of this serializer is constrained by what openapi-generator-cli's # python generator can correctly serialize as multipart/form-data. It does not diff --git a/ietf/api/tests_serializers_rpc.py b/ietf/api/tests_serializers_rpc.py new file mode 100644 index 0000000000..1babb4c30f --- /dev/null +++ b/ietf/api/tests_serializers_rpc.py @@ -0,0 +1,139 @@ +# Copyright The IETF Trust 2026, All Rights Reserved +from django.utils import timezone + +from ietf.utils.test_utils import TestCase +from ietf.doc.models import Document +from ietf.doc.factories import WgRfcFactory +from .serializers_rpc import EditableRfcSerializer + + +class EditableRfcSerializerTests(TestCase): + def test_create(self): + serializer = EditableRfcSerializer( + data={ + "published": timezone.now(), + "title": "Yadda yadda yadda", + "authors": [ + { + "titlepage_name": "B. Fett", + "is_editor": False, + "affiliation": "DBA Galactic Empire", + "country": "", + }, + ], + "stream": "ietf", + "abstract": "A long time ago in a galaxy far, far away...", + "pages": 3, + "std_level": "inf", + "subseries": ["fyi999"], + } + ) + self.assertTrue(serializer.is_valid()) + with self.assertRaises(RuntimeError, msg="serializer does not allow create()"): + serializer.save() + + def test_update(self): + rfc = WgRfcFactory(pages=10) + serializer = EditableRfcSerializer( + instance=rfc, + data={ + "published": timezone.now(), + "title": "Yadda yadda yadda", + "authors": [ + { + "titlepage_name": "B. Fett", + "is_editor": False, + "affiliation": "DBA Galactic Empire", + "country": "", + }, + ], + "stream": "ise", + "abstract": "A long time ago in a galaxy far, far away...", + "pages": 3, + "std_level": "inf", + "subseries": ["fyi999"], + }, + ) + self.assertTrue(serializer.is_valid()) + result = serializer.save() + result.refresh_from_db() + self.assertEqual(result.title, "Yadda yadda yadda") + self.assertEqual( + list( + result.rfcauthor_set.values( + "titlepage_name", "is_editor", "affiliation", "country" + ) + ), + [ + { + "titlepage_name": "B. Fett", + "is_editor": False, + "affiliation": "DBA Galactic Empire", + "country": "", + }, + ], + ) + self.assertEqual(result.stream_id, "ise") + self.assertEqual( + result.abstract, "A long time ago in a galaxy far, far away..." + ) + self.assertEqual(result.pages, 3) + self.assertEqual(result.std_level_id, "inf") + self.assertEqual( + result.part_of(), + [Document.objects.get(name="fyi999")], + ) + + def test_partial_update(self): + # We could test other permutations of fields, but authors is a partial update + # we know we are going to use, so verifying that one in particular. + rfc = WgRfcFactory(pages=10, abstract="do or do not", title="padawan") + serializer = EditableRfcSerializer( + partial=True, + instance=rfc, + data={ + "authors": [ + { + "titlepage_name": "B. Fett", + "is_editor": False, + "affiliation": "DBA Galactic Empire", + "country": "", + }, + ], + }, + ) + self.assertTrue(serializer.is_valid()) + result = serializer.save() + result.refresh_from_db() + self.assertEqual(rfc.title, "padawan") + self.assertEqual( + list( + result.rfcauthor_set.values( + "titlepage_name", "is_editor", "affiliation", "country" + ) + ), + [ + { + "titlepage_name": "B. Fett", + "is_editor": False, + "affiliation": "DBA Galactic Empire", + "country": "", + }, + ], + ) + self.assertEqual(result.stream_id, "ietf") + self.assertEqual(result.abstract, "do or do not") + self.assertEqual(result.pages, 10) + self.assertEqual(result.std_level_id, "ps") + self.assertEqual(result.part_of(), []) + + # Test only a field on the Document itself to be sure that it works + serializer = EditableRfcSerializer( + partial=True, + instance=rfc, + data={"title": "jedi master"}, + ) + self.assertTrue(serializer.is_valid()) + result = serializer.save() + result.refresh_from_db() + self.assertEqual(rfc.title, "jedi master") diff --git a/ietf/api/views_rpc.py b/ietf/api/views_rpc.py index 9273590b28..8862bbf866 100644 --- a/ietf/api/views_rpc.py +++ b/ietf/api/views_rpc.py @@ -35,7 +35,7 @@ NotificationAckSerializer, RfcPubSerializer, RfcFileSerializer, EditableRfcSerializer, ) -from ietf.doc.models import Document, DocHistory, RfcAuthor +from ietf.doc.models import Document, DocHistory, RfcAuthor, DocEvent from ietf.doc.serializers import RfcAuthorSerializer from ietf.doc.storage_utils import remove_from_storage, store_file, exists_in_storage from ietf.doc.tasks import signal_update_rfc_metadata_task @@ -279,6 +279,16 @@ class RfcViewSet(mixins.UpdateModelMixin, viewsets.GenericViewSet): lookup_field = "rfc_number" serializer_class = EditableRfcSerializer + def perform_update(self, serializer): + DocEvent.objects.create( + doc=serializer.instance, + rev=serializer.instance.rev, + by=Person.objects.get(name="(System)"), + type="sync_from_rfc_editor", + desc="Metadata update from RFC Editor", + ) + super().perform_update(serializer) + @action(detail=False, serializer_class=OriginalStreamSerializer) def rfc_original_stream(self, request): rfcs = self.get_queryset().annotate( diff --git a/ietf/doc/serializers.py b/ietf/doc/serializers.py index 36076c30be..a7ea640be8 100644 --- a/ietf/doc/serializers.py +++ b/ietf/doc/serializers.py @@ -27,6 +27,7 @@ class RfcAuthorSerializer(serializers.ModelSerializer): source="person.get_absolute_url", required=False, help_text="URL for person link (relative to datatracker base URL)", + read_only=True, ) class Meta: diff --git a/ietf/doc/utils.py b/ietf/doc/utils.py index 42fab7d472..396b3fcfa4 100644 --- a/ietf/doc/utils.py +++ b/ietf/doc/utils.py @@ -740,14 +740,26 @@ def _rfcauthor_from_documentauthor(docauthor: DocumentAuthor) -> RfcAuthor: new_author.document = rfc new_author.order = order + 1 new_author.save() - changes.append(f'Added "{new_author.titlepage_name}" as author') + if new_author.person_id is not None: + person_desc = f"Person {new_author.person_id}" + else: + person_desc = "no Person linked" + changes.append( + f'Added "{new_author.titlepage_name}" ({person_desc}) as author' + ) # Any authors left in original_authors are no longer in the list, so remove them for removed_author in original_authors: # Skip actual removal of old authors if we are converting from the # DocumentAuthor models - the original_authors were just stand-ins anyway. if not converting_from_docauthors: removed_author.delete() - changes.append(f'Removed "{removed_author.titlepage_name}" as author') + if removed_author.person_id is not None: + person_desc = f"Person {removed_author.person_id}" + else: + person_desc = "no Person linked" + changes.append( + f'Removed "{removed_author.titlepage_name}" ({person_desc}) as author' + ) # Create DocEvents, but leave it up to caller to save if by is None: by = Person.objects.get(name="(System)") From 481054511b9f07a47c41f854105e00616e61d3e2 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Tue, 3 Mar 2026 11:36:45 -0400 Subject: [PATCH 024/102] feat: add area to FullDraftSerializer (#10487) * refactor: Document.area() + serializer * feat: add area to FullDraftSerializer --- ietf/api/serializers_rpc.py | 3 +++ ietf/doc/models.py | 16 ++++++++++++++++ ietf/doc/serializers.py | 19 +------------------ 3 files changed, 20 insertions(+), 18 deletions(-) diff --git a/ietf/api/serializers_rpc.py b/ietf/api/serializers_rpc.py index d5f5363990..e51b917be4 100644 --- a/ietf/api/serializers_rpc.py +++ b/ietf/api/serializers_rpc.py @@ -27,6 +27,7 @@ update_rfcauthors, ) from ietf.group.models import Group +from ietf.group.serializers import AreaSerializer from ietf.name.models import StreamName, StdLevelName from ietf.person.models import Person from ietf.utils import log @@ -115,6 +116,7 @@ class FullDraftSerializer(serializers.ModelSerializer): name = serializers.CharField(max_length=255) title = serializers.CharField(max_length=255) group = serializers.SlugRelatedField(slug_field="acronym", read_only=True) + area = AreaSerializer(read_only=True) # Other fields we need to add / adjust source_format = serializers.SerializerMethodField() @@ -133,6 +135,7 @@ class Meta: "stream", "title", "group", + "area", "abstract", "pages", "source_format", diff --git a/ietf/doc/models.py b/ietf/doc/models.py index cc28951be0..f1b319367e 100644 --- a/ietf/doc/models.py +++ b/ietf/doc/models.py @@ -1147,6 +1147,22 @@ def request_closed_time(self, review_req): e = self.latest_event(ReviewRequestDocEvent, type="closed_review_request", review_request=review_req) return e.time if e and e.time else None + @property + def area(self) -> Group | None: + """Get area for document, if one exists + + None for non-IETF-stream documents. N.b., this is stricter than Group.area() and + uses different logic from Document.area_acronym(). + """ + if self.stream_id != "ietf": + return None + if self.group is None: + return None + parent = self.group.parent + if parent.type_id == "area": + return parent + return None + def area_acronym(self): g = self.group if g: diff --git a/ietf/doc/serializers.py b/ietf/doc/serializers.py index a7ea640be8..139ae9aa7e 100644 --- a/ietf/doc/serializers.py +++ b/ietf/doc/serializers.py @@ -230,7 +230,7 @@ class RfcMetadataSerializer(serializers.ModelSerializer): status = RfcStatusSerializer(source="*") authors = serializers.SerializerMethodField() group = GroupSerializer() - area = serializers.SerializerMethodField() + area = AreaSerializer(read_only=True) stream = StreamNameSerializer() ad = AreaDirectorSerializer(read_only=True, allow_null=True) group_list_email = serializers.EmailField(source="group.list_email", read_only=True) @@ -288,23 +288,6 @@ def get_authors(self, doc: Document): many=True, ).data - @extend_schema_field(AreaSerializer(required=False)) - def get_area(self, doc: Document): - """Get area for the RFC - - This logic might be better moved to Document or a combination of Document - and Group. The current (2026-02-24) Group.area() method is not strict enough: - it does not limit to WG groups or IETF-stream documents. - """ - if doc.stream_id != "ietf": - return None - if doc.group is None: - return None - parent = doc.group.parent - if parent.type_id == "area": - return AreaSerializer(parent).data - return None - @extend_schema_field(DocIdentifierSerializer(many=True)) def get_identifiers(self, doc: Document): identifiers = [] From 47d3734955071d1ccc54787698e751c74ce4d303 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 4 Mar 2026 12:32:36 -0400 Subject: [PATCH 025/102] chore(deps): bump types-pytz from 2025.2.0.20250809 to 2025.2.0.20251108 (#10424) Bumps [types-pytz](https://github.com/typeshed-internal/stub_uploader) from 2025.2.0.20250809 to 2025.2.0.20251108. - [Commits](https://github.com/typeshed-internal/stub_uploader/commits) --- updated-dependencies: - dependency-name: types-pytz dependency-version: 2025.2.0.20251108 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index cb583d5dc9..3d54b104ee 100644 --- a/requirements.txt +++ b/requirements.txt @@ -74,7 +74,7 @@ python-magic==0.4.18 # Versions beyond the yanked .19 and .20 introduce form pymemcache>=4.0.0 # for django.core.cache.backends.memcached.PyMemcacheCache python-mimeparse>=2.0.0 # from TastyPie pytz==2025.2 # Pinned as changes need to be vetted for their effect on Meeting fields -types-pytz==2025.2.0.20250809 # match pytz version +types-pytz==2025.2.0.20251108 # match pytz version requests>=2.32.4 types-requests>=2.32.4 requests-mock>=1.12.1 From 1799245dc6ce82301b0790412957ccfa19910dc1 Mon Sep 17 00:00:00 2001 From: jennifer-richards <19472766+jennifer-richards@users.noreply.github.com> Date: Wed, 4 Mar 2026 16:45:46 +0000 Subject: [PATCH 026/102] ci: update base image target version to 20260304T1633 --- dev/build/Dockerfile | 2 +- dev/build/TARGET_BASE | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/dev/build/Dockerfile b/dev/build/Dockerfile index 71370fabee..ce1828052e 100644 --- a/dev/build/Dockerfile +++ b/dev/build/Dockerfile @@ -1,4 +1,4 @@ -FROM ghcr.io/ietf-tools/datatracker-app-base:20260211T1901 +FROM ghcr.io/ietf-tools/datatracker-app-base:20260304T1633 LABEL maintainer="IETF Tools Team " ENV DEBIAN_FRONTEND=noninteractive diff --git a/dev/build/TARGET_BASE b/dev/build/TARGET_BASE index 947f3790e4..6be54fb6b0 100644 --- a/dev/build/TARGET_BASE +++ b/dev/build/TARGET_BASE @@ -1 +1 @@ -20260211T1901 +20260304T1633 From 7f28542c82e2c51210daf77ca10f9682c0ea709d Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Mon, 9 Mar 2026 14:02:57 -0300 Subject: [PATCH 027/102] fix: finish dropping email as RfcAuthor field (#10512) * fix: fix admin / Document.author_list() * fix: update RfcAuthorResource email is still accessible, but read only * fix: admin search by RfcAuthor email --- ietf/doc/admin.py | 4 +++- ietf/doc/models.py | 16 +++++++++++----- ietf/doc/resources.py | 2 +- 3 files changed, 15 insertions(+), 7 deletions(-) diff --git a/ietf/doc/admin.py b/ietf/doc/admin.py index b604d4f096..0d04e8db3a 100644 --- a/ietf/doc/admin.py +++ b/ietf/doc/admin.py @@ -241,7 +241,9 @@ def is_deleted(self, instance): admin.site.register(StoredObject, StoredObjectAdmin) class RfcAuthorAdmin(admin.ModelAdmin): + # the email field in the list_display/readonly_fields works through a @property list_display = ['id', 'document', 'titlepage_name', 'person', 'email', 'affiliation', 'country', 'order'] - search_fields = ['document__name', 'titlepage_name', 'person__name', 'email', 'affiliation', 'country'] + search_fields = ['document__name', 'titlepage_name', 'person__name', 'person__email__address', 'affiliation', 'country'] raw_id_fields = ["document", "person"] + readonly_fields = ["email"] admin.site.register(RfcAuthor, RfcAuthorAdmin) diff --git a/ietf/doc/models.py b/ietf/doc/models.py index f1b319367e..868bc4ac47 100644 --- a/ietf/doc/models.py +++ b/ietf/doc/models.py @@ -466,11 +466,12 @@ def author_persons(self): def author_list(self): """List of author emails""" - author_qs = ( - self.rfcauthor_set - if self.type_id == "rfc" and self.rfcauthor_set.exists() - else self.documentauthor_set - ).select_related("email").order_by("order") + if self.type_id == "rfc" and self.rfcauthor_set.exists(): + author_qs = self.rfcauthor_set.select_related("person").order_by("order") + else: + author_qs = self.documentauthor_set.select_related("email").order_by( + "order" + ) best_addresses = [] for author in author_qs: if author.email: @@ -953,6 +954,11 @@ class Meta: @property def email(self) -> Email | None: return self.person.email() if self.person else None + + def format_for_titlepage(self): + if self.is_editor: + return f"{self.titlepage_name}, Ed." + return self.titlepage_name class DocumentAuthorInfo(models.Model): diff --git a/ietf/doc/resources.py b/ietf/doc/resources.py index 556465a522..1d86df78d0 100644 --- a/ietf/doc/resources.py +++ b/ietf/doc/resources.py @@ -897,7 +897,7 @@ class Meta: class RfcAuthorResource(ModelResource): document = ToOneField(DocumentResource, 'document') person = ToOneField(PersonResource, 'person', null=True) - email = ToOneField(EmailResource, 'email', null=True) + email = ToOneField(EmailResource, 'email', null=True, readonly=True) class Meta: queryset = RfcAuthor.objects.all() serializer = api.Serializer() From 809e7682db30279cb715f47c89ae546e320c9c76 Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Wed, 11 Mar 2026 13:07:27 -0500 Subject: [PATCH 028/102] chore: remove task explorer from devcontainer (#10532) --- .devcontainer/devcontainer.json | 1 - 1 file changed, 1 deletion(-) diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 2cfff78853..e4964e8909 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -32,7 +32,6 @@ "mutantdino.resourcemonitor", "oderwat.indent-rainbow", "redhat.vscode-yaml", - "spmeesseman.vscode-taskexplorer", "ms-python.pylint", "charliermarsh.ruff" ], From d4a594ddd4a9dd0bd575465748627f7fea68aac3 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Wed, 11 Mar 2026 15:12:22 -0300 Subject: [PATCH 029/102] feat: rfc-index generation (#10526) * chore: stand-in red_bucket STORAGE for dev * feat: rfc index text generation (WIP) Text generation works. Also includes XML generation that is not yet converted. Based on Kesara's implementations in the purple repo. * feat: rfc index XML generation (WIP) * feat: Document.keywords + migration * feat: keywords API * feat: keywords in rfc-index.xml * fix: better stream/area/wg_acronym Still some disagreements, not sure if that's data or logic driven * fix: NON WORKING GROUP logic May need more attention * fix: add rev to draft name * fix: interleave unpublished RFC records * fix: lint * refactor: use lxml * fix: multi-paragraph abstracts * feat: RFCINDEX_MATCH_LEGACY_XML option * fix: zero pad DOIs * fix: better NON WORKING GROUP id * fix: reorder elements * refactor: extract repeated code * refactor: unify DOI generation * fix: modern DOI proxy URL for ATOM feed * refactor: settings.RFC_EDITOR_ERRATA_BASE_URL Drop unused settings.RFC_EDITOR_ERRATA_URL * chore: real red_bucket storage cfg * fix: handle missing json for prod/dev/test * chore: straighten out S3 saving * chore(dev): FileSystemStorage for red_bucket dev (commented out) * chore: configurable bucket path for JSON inputs * test: tests_rfcindex.py Not great coverage, but exercises the generators a bit. * fix: lint + consistent var naming * test: improve test coverage / testability * fix: lint --- docker/configs/settings_local.py | 11 + ietf/api/serializers_rpc.py | 2 + ietf/doc/api.py | 6 - ietf/doc/factories.py | 6 + ietf/doc/feeds.py | 9 +- ...3_dochistory_keywords_document_keywords.py | 31 ++ ietf/doc/models.py | 21 + ietf/doc/serializers.py | 4 +- ietf/doc/views_doc.py | 4 +- ietf/settings.py | 10 +- ietf/settings_test.py | 8 +- ietf/sync/rfcindex.py | 480 ++++++++++++++++++ ietf/sync/tests_rfcindex.py | 230 +++++++++ ietf/templates/sync/rfc-index.txt | 69 +++ k8s/settings_local.py | 33 ++ 15 files changed, 907 insertions(+), 17 deletions(-) create mode 100644 ietf/doc/migrations/0033_dochistory_keywords_document_keywords.py create mode 100644 ietf/sync/rfcindex.py create mode 100644 ietf/sync/tests_rfcindex.py create mode 100644 ietf/templates/sync/rfc-index.txt diff --git a/docker/configs/settings_local.py b/docker/configs/settings_local.py index 1d4e6916b9..94adc516a4 100644 --- a/docker/configs/settings_local.py +++ b/docker/configs/settings_local.py @@ -101,6 +101,17 @@ ), } +# For dev on rfc-index generation, create a red_bucket/ directory in the project root +# and uncomment these settings. Generated files will appear in this directory. To +# generate an accurate index, put up-to-date copies of unusable-rfc-numbers.json, +# april-first-rfc-numbers.json, and publication-std-levels.json in this directory +# before generating the index. +# +# STORAGES["red_bucket"] = { +# "BACKEND": "django.core.files.storage.FileSystemStorage", +# "OPTIONS": {"location": "red_bucket"}, +# } + APP_API_TOKENS = { "ietf.api.red_api" : ["devtoken", "redtoken"], # Not a real secret "ietf.api.views_rpc" : ["devtoken"], # Not a real secret diff --git a/ietf/api/serializers_rpc.py b/ietf/api/serializers_rpc.py index e51b917be4..c17cbc64ce 100644 --- a/ietf/api/serializers_rpc.py +++ b/ietf/api/serializers_rpc.py @@ -300,6 +300,7 @@ class Meta: "obsoletes", "updates", "subseries", + "keywords", ] def validate(self, data): @@ -540,6 +541,7 @@ class Meta: "pages", "std_level", "subseries", + "keywords", ] def create(self, validated_data): diff --git a/ietf/doc/api.py b/ietf/doc/api.py index 75993f463e..73fff6b27f 100644 --- a/ietf/doc/api.py +++ b/ietf/doc/api.py @@ -4,13 +4,11 @@ from django.db.models import ( BooleanField, Count, - JSONField, OuterRef, Prefetch, Q, QuerySet, Subquery, - Value, ) from django.db.models.functions import TruncDate from django_filters import rest_framework as filters @@ -160,10 +158,6 @@ def augment_rfc_queryset(queryset: QuerySet[Document]): output_field=BooleanField(), ) ) - .annotate( - # TODO implement this fake field for real - keywords=Value(["keyword"], output_field=JSONField()), - ) ) diff --git a/ietf/doc/factories.py b/ietf/doc/factories.py index bc38765446..1a178c6f31 100644 --- a/ietf/doc/factories.py +++ b/ietf/doc/factories.py @@ -311,6 +311,12 @@ class Meta: def desc(self): return 'New version available %s-%s'%(self.doc.name,self.rev) +class PublishedRfcDocEventFactory(DocEventFactory): + class Meta: + model = DocEvent + type = "published_rfc" + doc = factory.SubFactory(WgRfcFactory) + class StateDocEventFactory(DocEventFactory): class Meta: model = StateDocEvent diff --git a/ietf/doc/feeds.py b/ietf/doc/feeds.py index 500ed3cb18..afe96cf0df 100644 --- a/ietf/doc/feeds.py +++ b/ietf/doc/feeds.py @@ -1,5 +1,4 @@ -# Copyright The IETF Trust 2007-2020, All Rights Reserved -# -*- coding: utf-8 -*- +# Copyright The IETF Trust 2007-2026, All Rights Reserved import debug # pyflakes:ignore @@ -263,9 +262,11 @@ def item_extra_kwargs(self, item): ) extra.update({"media_contents": media_contents}) - extra.update({"doi": "10.17487/%s" % item.name.upper()}) extra.update( - {"doiuri": "http://dx.doi.org/10.17487/%s" % item.name.upper()} + { + "doi": item.doi, + "doiuri": f"https://doi.org/{item.doi}", + } ) # R104 Publisher (Mandatory - but we need a string from them first) diff --git a/ietf/doc/migrations/0033_dochistory_keywords_document_keywords.py b/ietf/doc/migrations/0033_dochistory_keywords_document_keywords.py new file mode 100644 index 0000000000..5e2513e15a --- /dev/null +++ b/ietf/doc/migrations/0033_dochistory_keywords_document_keywords.py @@ -0,0 +1,31 @@ +# Copyright The IETF Trust 2026, All Rights Reserved + +from django.db import migrations, models +import ietf.doc.models + + +class Migration(migrations.Migration): + dependencies = [ + ("doc", "0032_remove_rfcauthor_email"), + ] + + operations = [ + migrations.AddField( + model_name="dochistory", + name="keywords", + field=models.JSONField( + default=list, + max_length=1000, + validators=[ietf.doc.models.validate_doc_keywords], + ), + ), + migrations.AddField( + model_name="document", + name="keywords", + field=models.JSONField( + default=list, + max_length=1000, + validators=[ietf.doc.models.validate_doc_keywords], + ), + ), + ] diff --git a/ietf/doc/models.py b/ietf/doc/models.py index 868bc4ac47..7b23a62c45 100644 --- a/ietf/doc/models.py +++ b/ietf/doc/models.py @@ -13,6 +13,7 @@ from io import BufferedReader from pathlib import Path +from django.core.exceptions import ValidationError from django.db.models import Q from lxml import etree from typing import Optional, Protocol, TYPE_CHECKING, Union @@ -109,6 +110,15 @@ class Meta: IESG_STATCHG_CONFLREV_ACTIVE_STATES = ("iesgeval", "defer") IESG_SUBSTATE_TAGS = ('ad-f-up', 'need-rev', 'extpty') + +def validate_doc_keywords(value): + if ( + not isinstance(value, list | tuple | set) + or not all(isinstance(elt, str) for elt in value) + ): + raise ValidationError("Value must be an array of strings") + + class DocumentInfo(models.Model): """Any kind of document. Draft, RFC, Charter, IPR Statement, Liaison Statement""" time = models.DateTimeField(default=timezone.now) # should probably have auto_now=True @@ -142,6 +152,17 @@ class DocumentInfo(models.Model): uploaded_filename = models.TextField(blank=True) note = models.TextField(blank=True) rfc_number = models.PositiveIntegerField(blank=True, null=True) # only valid for type="rfc" + keywords = models.JSONField( + default=list, + max_length=1000, + validators=[validate_doc_keywords], + ) + + @property + def doi(self) -> str | None: + if self.type_id == "rfc" and self.rfc_number is not None: + return f"{settings.IETF_DOI_PREFIX}/RFC{self.rfc_number:04d}" + return None def file_extension(self): if not hasattr(self, '_cached_extension'): diff --git a/ietf/doc/serializers.py b/ietf/doc/serializers.py index 139ae9aa7e..3651670962 100644 --- a/ietf/doc/serializers.py +++ b/ietf/doc/serializers.py @@ -291,9 +291,9 @@ def get_authors(self, doc: Document): @extend_schema_field(DocIdentifierSerializer(many=True)) def get_identifiers(self, doc: Document): identifiers = [] - if doc.rfc_number: + if doc.doi: identifiers.append( - DocIdentifier(type="doi", value=f"10.17487/RFC{doc.rfc_number:04d}") + DocIdentifier(type="doi", value=doc.doi) ) return DocIdentifierSerializer(instance=identifiers, many=True).data diff --git a/ietf/doc/views_doc.py b/ietf/doc/views_doc.py index 0ae7520681..c1f6352ac3 100644 --- a/ietf/doc/views_doc.py +++ b/ietf/doc/views_doc.py @@ -1285,9 +1285,7 @@ def document_bibtex(request, name, rev=None): break elif doc.type_id == "rfc": - # This needs to be replaced with a lookup, as the mapping may change - # over time. - doi = f"10.17487/RFC{doc.rfc_number:04d}" + doi = doc.doi if doc.is_dochistory(): latest_event = doc.latest_event(type='new_revision', rev=rev) diff --git a/ietf/settings.py b/ietf/settings.py index 71b110d762..e0b4f20118 100644 --- a/ietf/settings.py +++ b/ietf/settings.py @@ -838,6 +838,11 @@ def skip_unreadable_post(record): "slides", ] +# Other storages +STORAGES["red_bucket"] = { + "BACKEND": "django.core.files.storage.InMemoryStorage", + "OPTIONS": {"location": "red_bucket"}, +} # Override this in settings_local.py if needed # *_PATH variables ends with a slash/ . @@ -932,10 +937,11 @@ def skip_unreadable_post(record): RFC_EDITOR_QUEUE_URL = "https://www.rfc-editor.org/queue2.xml" RFC_EDITOR_INDEX_URL = "https://www.rfc-editor.org/rfc/rfc-index.xml" RFC_EDITOR_ERRATA_JSON_URL = "https://www.rfc-editor.org/errata.json" -RFC_EDITOR_ERRATA_URL = "https://www.rfc-editor.org/errata_search.php?rfc={rfc_number}" RFC_EDITOR_INLINE_ERRATA_URL = "https://www.rfc-editor.org/rfc/inline-errata/rfc{rfc_number}.html" +RFC_EDITOR_ERRATA_BASE_URL = "https://www.rfc-editor.org/errata/" RFC_EDITOR_INFO_BASE_URL = "https://www.rfc-editor.org/info/" + # NomCom Tool settings ROLODEX_URL = "" NOMCOM_PUBLIC_KEYS_DIR = '/a/www/nomcom/public_keys/' @@ -1570,3 +1576,5 @@ def skip_unreadable_post(record): YOUTUBE_DOMAINS = ['www.youtube.com', 'youtube.com', 'youtu.be', 'm.youtube.com', 'youtube-nocookie.com', 'www.youtube-nocookie.com'] + +IETF_DOI_PREFIX = "10.17487" diff --git a/ietf/settings_test.py b/ietf/settings_test.py index 6479069db0..1f5a7e8ddc 100755 --- a/ietf/settings_test.py +++ b/ietf/settings_test.py @@ -14,7 +14,7 @@ import shutil import tempfile from ietf.settings import * # pyflakes:ignore -from ietf.settings import ORIG_AUTH_PASSWORD_VALIDATORS +from ietf.settings import ORIG_AUTH_PASSWORD_VALIDATORS, STORAGES import debug # pyflakes:ignore debug.debug = True @@ -114,3 +114,9 @@ def tempdir_with_cleanup(**kwargs): AUTH_PASSWORD_VALIDATORS = ORIG_AUTH_PASSWORD_VALIDATORS except NameError: pass + +# Use InMemoryStorage for red bucket storage +STORAGES["red_bucket"] = { + "BACKEND": "django.core.files.storage.InMemoryStorage", + "OPTIONS": {"location": "red_bucket"}, +} diff --git a/ietf/sync/rfcindex.py b/ietf/sync/rfcindex.py new file mode 100644 index 0000000000..b15846094f --- /dev/null +++ b/ietf/sync/rfcindex.py @@ -0,0 +1,480 @@ +# Copyright The IETF Trust 2026, All Rights Reserved +import json +from collections import defaultdict +from collections.abc import Container +from dataclasses import dataclass +from io import StringIO, BytesIO +from itertools import chain +from operator import attrgetter, itemgetter +from pathlib import Path +from textwrap import fill +from urllib.parse import urljoin + +from django.conf import settings +from lxml import etree + +from django.core.files.storage import storages +from django.db import models +from django.db.models.functions import Substr, Cast +from django.template.loader import render_to_string +from django.utils import timezone + +from ietf.doc.models import Document +from ietf.name.models import StdLevelName +from ietf.utils.log import log + +FORMATS_FOR_INDEX = ["txt", "html", "pdf", "xml", "ps"] + + +def format_rfc_number(n): + """Format an RFC number (or subseries doc number) + + Set settings.RFCINDEX_MATCH_LEGACY_XML=True for the legacy (leading-zero) format. + That is for debugging only - tests will fail. + """ + if getattr(settings, "RFCINDEX_MATCH_LEGACY_XML", False): + return format(n, "04") + else: + return format(n) + + +def errata_url(rfc: Document): + return urljoin(settings.RFC_EDITOR_ERRATA_BASE_URL + "/", f"rfc{rfc.rfc_number}") + + +def save_to_red_bucket(filename: str, content: BytesIO | StringIO): + red_bucket = storages["red_bucket"] + bucket_path = str(Path(getattr(settings, "RFCINDEX_OUTPUT_PATH", "")) / filename) + if getattr(settings, "RFCINDEX_DELETE_THEN_WRITE", True): + # Django 4.2's FileSystemStorage does not support allow_overwrite. + red_bucket.delete(bucket_path) + red_bucket.save(bucket_path, content) + log(f"Saved {bucket_path} in red_bucket storage") + + +@dataclass +class UnusableRfcNumber: + rfc_number: int + comment: str + + +def get_unusable_rfc_numbers() -> list[UnusableRfcNumber]: + FILENAME = "unusable-rfc-numbers.json" + bucket_path = str(Path(getattr(settings, "RFCINDEX_INPUT_PATH", "")) / FILENAME) + try: + with storages["red_bucket"].open(bucket_path) as urn_file: + records = json.load(urn_file) + except FileNotFoundError: + if settings.SERVER_MODE == "development": + log( + f"Unable to open {bucket_path} in red_bucket storage. This is okay in dev " + "but generated rfc-index will not agree with RFC Editor values." + ) # pragma: no cover + return [] # pragma: no cover + log(f"Error: unable to open {bucket_path} in red_bucket storage") + raise + except json.JSONDecodeError: + log(f"Error: unable to parse {bucket_path} in red_bucket storage") + if settings.SERVER_MODE == "development": + return [] # pragma: no cover + raise + assert all(isinstance(record["number"], int) for record in records) + assert all(isinstance(record["comment"], str) for record in records) + return [ + UnusableRfcNumber(rfc_number=record["number"], comment=record["comment"]) + for record in sorted(records, key=itemgetter("number")) + ] + + +def get_april1_rfc_numbers() -> Container[int]: + FILENAME = "april-first-rfc-numbers.json" + bucket_path = str(Path(getattr(settings, "RFCINDEX_INPUT_PATH", "")) / FILENAME) + try: + with storages["red_bucket"].open(bucket_path) as urn_file: + records = json.load(urn_file) + except FileNotFoundError: + if settings.SERVER_MODE == "development": + log( + f"Unable to open {bucket_path} in red_bucket storage. This is okay in dev " + "but generated rfc-index will not agree with RFC Editor values." + ) # pragma: no cover + return [] # pragma: no cover + log(f"Error: unable to open {bucket_path} in red_bucket storage") + raise + except json.JSONDecodeError: + log(f"Error: unable to parse {bucket_path} in red_bucket storage") + if settings.SERVER_MODE == "development": + return [] # pragma: no cover + raise + assert all(isinstance(record, int) for record in records) + return records + + +def get_publication_std_levels() -> dict[int, StdLevelName]: + FILENAME = "publication-std-levels.json" + bucket_path = str(Path(getattr(settings, "RFCINDEX_INPUT_PATH", "")) / FILENAME) + values: dict[int, StdLevelName] = {} + try: + with storages["red_bucket"].open(bucket_path) as urn_file: + records = json.load(urn_file) + except FileNotFoundError: + if settings.SERVER_MODE == "development": + log( + f"Unable to open {bucket_path} in red_bucket storage. This is okay in dev " + "but generated rfc-index will not agree with RFC Editor values." + ) # pragma: no cover + # intentionally fall through instead of return here + else: + log(f"Error: unable to open {bucket_path} in red_bucket storage") + raise + except json.JSONDecodeError: + log(f"Error: unable to parse {bucket_path} in red_bucket storage") + if settings.SERVER_MODE != "development": + raise + else: + assert all(isinstance(record["number"], int) for record in records) + values = { + record["number"]: StdLevelName.objects.get( + slug=record["publication_std_level"] + ) + for record in records + } + # defaultdict to return "unknown" for any missing values + unknown_std_level = StdLevelName.objects.get(slug="unkn") + return defaultdict(lambda: unknown_std_level, values) + + +def format_ordering(rfc_number): + if rfc_number < 8650: + ordering = ["txt", "ps", "pdf", "html", "xml"] + else: + ordering = ["html", "txt", "ps", "pdf", "xml"] + return ordering.index # return the method + + +def get_rfc_text_index_entries(): + """Returns RFC entries for rfc-index.txt""" + entries = [] + april1_rfc_numbers = get_april1_rfc_numbers() + published_rfcs = Document.objects.filter(type_id="rfc").order_by("rfc_number") + rfcs = sorted( + chain(published_rfcs, get_unusable_rfc_numbers()), key=attrgetter("rfc_number") + ) + for rfc in rfcs: + if isinstance(rfc, UnusableRfcNumber): + entries.append(f"{format_rfc_number(rfc.rfc_number)} Not Issued.") + else: + assert isinstance(rfc, Document) + authors = ", ".join( + author.format_for_titlepage() for author in rfc.rfcauthor_set.all() + ) + published_at = rfc.pub_date() + date = ( + published_at.strftime("1 %B %Y") + if rfc.rfc_number in april1_rfc_numbers + else published_at.strftime("%B %Y") + ) + + # formats + formats = ", ".join( + sorted( + [ + format["fmt"] + for format in rfc.formats() + if format["fmt"] in FORMATS_FOR_INDEX + ], + key=format_ordering(rfc.rfc_number), + ) + ).upper() + + # obsoletes + obsoletes = "" + obsoletes_documents = sorted( + rfc.related_that_doc("obs"), + key=attrgetter("rfc_number"), + ) + if len(obsoletes_documents) > 0: + obsoletes_names = ", ".join( + f"RFC{format_rfc_number(doc.rfc_number)}" + for doc in obsoletes_documents + ) + obsoletes = f" (Obsoletes {obsoletes_names})" + + # obsoleted by + obsoleted_by = "" + obsoleted_by_documents = sorted( + rfc.related_that("obs"), + key=attrgetter("rfc_number"), + ) + if len(obsoleted_by_documents) > 0: + obsoleted_by_names = ", ".join( + f"RFC{format_rfc_number(doc.rfc_number)}" + for doc in obsoleted_by_documents + ) + obsoleted_by = f" (Obsoleted by {obsoleted_by_names})" + + # updates + updates = "" + updates_documents = sorted( + rfc.related_that_doc("updates"), + key=attrgetter("rfc_number"), + ) + if len(updates_documents) > 0: + updates_names = ", ".join( + f"RFC{format_rfc_number(doc.rfc_number)}" + for doc in updates_documents + ) + updates = f" (Updates {updates_names})" + + # updated by + updated_by = "" + updated_by_documents = sorted( + rfc.related_that("updates"), + key=attrgetter("rfc_number"), + ) + if len(updated_by_documents) > 0: + updated_by_names = ", ".join( + f"RFC{format_rfc_number(doc.rfc_number)}" + for doc in updated_by_documents + ) + updated_by = f" (Updated by {updated_by_names})" + + doc_relations = f"{obsoletes}{obsoleted_by}{updates}{updated_by} " + + # subseries + subseries = ",".join( + f"{container.type.slug}{format_rfc_number(int(container.name[3:]))}" + for container in rfc.part_of() + ).upper() + if subseries: + subseries = f"(Also {subseries}) " + + entry = fill( + ( + f"{format_rfc_number(rfc.rfc_number)} {rfc.title}. {authors}. {date}. " + f"(Format: {formats}){doc_relations}{subseries}" + f"(Status: {str(rfc.std_level).upper()}) " + f"(DOI: {rfc.doi})" + ), + width=73, + subsequent_indent=" " * 5, + ) + entries.append(entry) + + return entries + + +def add_subseries_xml_index_entries(rfc_index, ss_type, include_all=False): + """Add subseries entries for rfc-index.xml""" + # subseries docs annotated with numeric number + ss_docs = list( + Document.objects.filter(type_id=ss_type) + .annotate( + number=Cast( + Substr("name", 4, None), + output_field=models.IntegerField(), + ) + ) + .order_by("-number") + ) + if len(ss_docs) == 0: + return # very much not expected + highest_number = ss_docs[0].number + for ss_number in range(1, highest_number + 1): + if ss_docs[-1].number == ss_number: + this_ss_doc = ss_docs.pop() + contained_rfcs = this_ss_doc.contains() + else: + contained_rfcs = [] + if len(contained_rfcs) == 0 and not include_all: + continue + entry = etree.SubElement(rfc_index, f"{ss_type}-entry") + etree.SubElement( + entry, "doc-id" + ).text = f"{ss_type.upper()}{format_rfc_number(ss_number)}" + if len(contained_rfcs) > 0: + is_also = etree.SubElement(entry, "is-also") + for rfc in sorted(contained_rfcs, key=attrgetter("rfc_number")): + etree.SubElement( + is_also, "doc-id" + ).text = f"RFC{format_rfc_number(rfc.rfc_number)}" + + +def add_related_xml_index_entries(root: etree.Element, rfc: Document, tag: str): + relation_getter = { + "obsoletes": lambda doc: doc.related_that_doc("obs"), + "obsoleted-by": lambda doc: doc.related_that("obs"), + "updates": lambda doc: doc.related_that_doc("updates"), + "updated-by": lambda doc: doc.related_that("updates"), + } + related_docs = sorted( + relation_getter[tag](rfc), + key=attrgetter("rfc_number"), + ) + if len(related_docs) > 0: + element = etree.SubElement(root, tag) + for doc in related_docs: + etree.SubElement( + element, "doc-id" + ).text = f"RFC{format_rfc_number(doc.rfc_number)}" + + +def add_rfc_xml_index_entries(rfc_index): + """Add RFC entries for rfc-index.xml""" + entries = [] + april1_rfc_numbers = get_april1_rfc_numbers() + publication_statuses = get_publication_std_levels() + + published_rfcs = Document.objects.filter(type_id="rfc").order_by("rfc_number") + + # Iterators for unpublished and published, both sorted by number + unpublished_iter = iter(get_unusable_rfc_numbers()) + published_iter = iter(published_rfcs) + + # Prime the next_* values + next_unpublished = next(unpublished_iter, None) + next_published = next(published_iter, None) + + while next_published is not None or next_unpublished is not None: + if next_unpublished is not None and ( + next_published is None + or next_unpublished.rfc_number < next_published.rfc_number + ): + entry = etree.SubElement(rfc_index, "rfc-not-issued-entry") + etree.SubElement( + entry, "doc-id" + ).text = f"RFC{format_rfc_number(next_unpublished.rfc_number)}" + entries.append(entry) + next_unpublished = next(unpublished_iter, None) + continue + + rfc = next_published # hang on to this + next_published = next(published_iter, None) # prep for next iteration + entry = etree.SubElement(rfc_index, "rfc-entry") + + etree.SubElement( + entry, "doc-id" + ).text = f"RFC{format_rfc_number(rfc.rfc_number)}" + etree.SubElement(entry, "title").text = rfc.title + + for author in rfc.rfcauthor_set.all(): + author_element = etree.SubElement(entry, "author") + etree.SubElement(author_element, "name").text = author.titlepage_name + if author.is_editor: + etree.SubElement(author_element, "title").text = "Editor" + + date = etree.SubElement(entry, "date") + published_at = rfc.pub_date() + etree.SubElement(date, "month").text = published_at.strftime("%B") + if rfc.rfc_number in april1_rfc_numbers: + etree.SubElement(date, "day").text = str(published_at.day) + etree.SubElement(date, "year").text = str(published_at.year) + + format_ = etree.SubElement(entry, "format") + fmts = [ff["fmt"] for ff in rfc.formats() if ff["fmt"] in FORMATS_FOR_INDEX] + for fmt in sorted(fmts, key=format_ordering(rfc.rfc_number)): + match_legacy = getattr(settings, "RFCINDEX_MATCH_LEGACY_XML", False) + etree.SubElement(format_, "file-format").text = ( + "ASCII" if match_legacy and fmt == "txt" else fmt.upper() + ) + + etree.SubElement(entry, "page-count").text = str(rfc.pages) + + if len(rfc.keywords) > 0: + keywords = etree.SubElement(entry, "keywords") + for keyword in rfc.keywords: + etree.SubElement(keywords, "kw").text = keyword.strip() + + if rfc.abstract: + abstract = etree.SubElement(entry, "abstract") + for paragraph in rfc.abstract.split("\n\n"): + etree.SubElement(abstract, "p").text = paragraph.strip() + + draft = rfc.came_from_draft() + if draft is not None: + etree.SubElement(entry, "draft").text = f"{draft.name}-{draft.rev}" + + part_of_documents = rfc.part_of() + if len(part_of_documents) > 0: + is_also = etree.SubElement(entry, "is-also") + for doc in part_of_documents: + etree.SubElement(is_also, "doc-id").text = doc.name.upper() + + add_related_xml_index_entries(entry, rfc, "obsoletes") + add_related_xml_index_entries(entry, rfc, "obsoleted-by") + add_related_xml_index_entries(entry, rfc, "updates") + add_related_xml_index_entries(entry, rfc, "updated-by") + + etree.SubElement(entry, "current-status").text = rfc.std_level.name.upper() + etree.SubElement(entry, "publication-status").text = publication_statuses[ + rfc.rfc_number + ].name.upper() + etree.SubElement(entry, "stream").text = ( + "INDEPENDENT" if rfc.stream_id == "ise" else rfc.stream.name + ) + + # Add area / wg_acronym + if rfc.stream_id == "ietf": + if rfc.group.type_id in ["individ", "area"]: + etree.SubElement(entry, "wg_acronym").text = "NON WORKING GROUP" + else: + if rfc.area is not None: + etree.SubElement(entry, "area").text = rfc.area.acronym + if rfc.group: + etree.SubElement(entry, "wg_acronym").text = rfc.group.acronym + + if rfc.tags.filter(slug="errata").exists(): + etree.SubElement(entry, "errata-url").text = errata_url(rfc) + etree.SubElement(entry, "doi").text = rfc.doi + entries.append(entry) + + +def create_rfc_txt_index(): + """Create text index of published documents""" + DATE_FMT = "%m/%d/%Y" + created_on = timezone.now().strftime(DATE_FMT) + log("Creating rfc-index.txt") + index = render_to_string( + "sync/rfc-index.txt", + { + "created_on": created_on, + "rfcs": get_rfc_text_index_entries(), + }, + ) + save_to_red_bucket("rfc-index.txt", StringIO(index)) + + +def create_rfc_xml_index(): + """Create XML index of published documents""" + XSI_NAMESPACE = "http://www.w3.org/2001/XMLSchema-instance" + XSI = "{" + XSI_NAMESPACE + "}" + + log("Creating rfc-index.xml") + rfc_index = etree.Element( + "rfc-index", + nsmap={ + None: "https://www.rfc-editor.org/rfc-index", + "xsi": XSI_NAMESPACE, + }, + attrib={ + XSI + "schemaLocation": ( + "https://www.rfc-editor.org/rfc-index " + "https://www.rfc-editor.org/rfc-index.xsd" + ), + }, + ) + + # add data + add_subseries_xml_index_entries(rfc_index, "bcp", include_all=True) + add_subseries_xml_index_entries(rfc_index, "fyi") + add_rfc_xml_index_entries(rfc_index) + add_subseries_xml_index_entries(rfc_index, "std") + + # make it pretty + pretty_index = etree.tostring( + rfc_index, + encoding="utf-8", + xml_declaration=True, + pretty_print=4, + ) + save_to_red_bucket("rfc-index.xml", BytesIO(pretty_index)) diff --git a/ietf/sync/tests_rfcindex.py b/ietf/sync/tests_rfcindex.py new file mode 100644 index 0000000000..b0a8712fe1 --- /dev/null +++ b/ietf/sync/tests_rfcindex.py @@ -0,0 +1,230 @@ +# Copyright The IETF Trust 2026, All Rights Reserved +import json +from io import BytesIO, StringIO +from unittest import mock + +from django.core.files.storage import storages +from django.test.utils import override_settings +from lxml import etree + +from ietf.doc.factories import PublishedRfcDocEventFactory, IndividualRfcFactory +from ietf.name.models import DocTagName +from ietf.sync.rfcindex import ( + create_rfc_txt_index, + create_rfc_xml_index, + format_rfc_number, + save_to_red_bucket, get_unusable_rfc_numbers, get_april1_rfc_numbers, + get_publication_std_levels, +) +from ietf.utils.test_utils import TestCase + + +class RfcIndexTests(TestCase): + """Tests of rfc-index generation + + Tests are limited and should cover more cases. Needs: + * test of subseries docs + * test of related docs (obsoletes/updates + reverse directions) + * more thorough validation of index contents + + Be careful when calling create_rfc_txt_index() or create_rfc_xml_index(). These + will save to a storage by default, which can introduce cross-talk between tests. + Best to patch that method with a mock. + """ + + def setUp(self): + super().setUp() + red_bucket = storages["red_bucket"] + + # Create an unused RFC number + red_bucket.save( + "input/unusable-rfc-numbers.json", + StringIO(json.dumps([{"number": 123, "comment": ""}])), + ) + + # actual April 1 RFC + self.april_fools_rfc = PublishedRfcDocEventFactory( + time="2020-04-01T12:00:00Z", + doc=IndividualRfcFactory( + name="rfc4560", + rfc_number=4560, + stream_id="ise", + std_level_id="inf", + ), + ).doc + # Set up a JSON file to flag the April 1 RFC + red_bucket.save( + "input/april-first-rfc-numbers.json", + StringIO(json.dumps([self.april_fools_rfc.rfc_number])), + ) + + # non-April Fools RFC that happens to have been published on April 1 + self.rfc = PublishedRfcDocEventFactory( + time="2021-04-01T12:00:00Z", + doc__name="rfc10000", + doc__rfc_number=10000, + doc__std_level_id="std", + ).doc + self.rfc.tags.add(DocTagName.objects.get(slug="errata")) + + # Set up a publication-std-levels.json file to indicate the publication + # standard of self.rfc as different from its current value + red_bucket.save( + "input/publication-std-levels.json", + StringIO( + json.dumps( + [{"number": self.rfc.rfc_number, "publication_std_level": "ps"}] + ) + ), + ) + + def tearDown(self): + red_bucket = storages["red_bucket"] + red_bucket.delete("input/unusable-rfc-numbers.json") + red_bucket.delete("input/april-first-rfc-numbers.json") + red_bucket.delete("input/publication-std-levels.json") + super().tearDown() + + @override_settings(RFCINDEX_INPUT_PATH="input/") + @mock.patch("ietf.sync.rfcindex.save_to_red_bucket") + def test_create_rfc_txt_index(self, mock_save): + create_rfc_txt_index() + self.assertEqual(mock_save.call_count, 1) + self.assertEqual(mock_save.call_args[0][0], "rfc-index.txt") + contents = mock_save.call_args[0][1].read() + self.assertIn( + "123 Not Issued.", + contents, + ) + # No zero prefix! + self.assertNotIn( + "0123 Not Issued.", + contents, + ) + self.assertIn( + f"{self.april_fools_rfc.rfc_number} {self.april_fools_rfc.title}", + contents, + ) + self.assertIn("1 April 2020", contents) # from the April 1 RFC + self.assertIn( + f"{self.rfc.rfc_number} {self.rfc.title}", + contents, + ) + self.assertIn("April 2021", contents) # from the non-April 1 RFC + self.assertNotIn("1 April 2021", contents) + + @override_settings(RFCINDEX_INPUT_PATH="input/") + @mock.patch("ietf.sync.rfcindex.save_to_red_bucket") + def test_create_rfc_xml_index(self, mock_save): + create_rfc_xml_index() + self.assertEqual(mock_save.call_count, 1) + self.assertEqual(mock_save.call_args[0][0], "rfc-index.xml") + contents = mock_save.call_args[0][1].read() + ns = "{https://www.rfc-editor.org/rfc-index}" # NOT an f-string + index = etree.fromstring(contents) + + # We can aspire to validating the schema - currently does not conform because + # XSD expects 4-digit RFC numbers (etc). + # + # xmlschema = etree.XMLSchema(etree.fromstring( + # Path(__file__).with_name("rfc-index.xsd").read_bytes()) + # ) + # xmlschema.assertValid(index) + + children = list(index) # elements as list + # Should be one rfc-not-issued-entry + self.assertEqual(len(children), 3) + self.assertEqual( + [ + c.find(f"{ns}doc-id").text + for c in children + if c.tag == f"{ns}rfc-not-issued-entry" + ], + ["RFC123"], + ) + # Should be two rfc-entries + rfc_entries = { + c.find(f"{ns}doc-id").text: c for c in children if c.tag == f"{ns}rfc-entry" + } + + # Check the April Fool's entry + april_fools_entry = rfc_entries[self.april_fools_rfc.name.upper()] + self.assertEqual( + april_fools_entry.find(f"{ns}title").text, + self.april_fools_rfc.title, + ) + self.assertEqual( + [(c.tag, c.text) for c in april_fools_entry.find(f"{ns}date")], + [(f"{ns}month", "April"), (f"{ns}day", "1"), (f"{ns}year", "2020")], + ) + self.assertEqual( + april_fools_entry.find(f"{ns}current-status").text, + "INFORMATIONAL", + ) + self.assertEqual( + april_fools_entry.find(f"{ns}publication-status").text, + "UNKNOWN", + ) + + # Check the Regular entry + rfc_entry = rfc_entries[self.rfc.name.upper()] + self.assertEqual(rfc_entry.find(f"{ns}title").text, self.rfc.title) + self.assertEqual( + rfc_entry.find(f"{ns}current-status").text, "INTERNET STANDARD" + ) + self.assertEqual( + rfc_entry.find(f"{ns}publication-status").text, "PROPOSED STANDARD" + ) + self.assertEqual( + [(c.tag, c.text) for c in rfc_entry.find(f"{ns}date")], + [(f"{ns}month", "April"), (f"{ns}year", "2021")], + ) + + +class HelperTests(TestCase): + def test_format_rfc_number(self): + self.assertEqual(format_rfc_number(10), "10") + with override_settings(RFCINDEX_MATCH_LEGACY_XML=True): + self.assertEqual(format_rfc_number(10), "0010") + + def test_save_to_red_bucket(self): + red_bucket = storages["red_bucket"] + with override_settings(RFCINDEX_DELETE_THEN_WRITE=False): + save_to_red_bucket("test", StringIO("contents")) + with red_bucket.open("test", "r") as f: + self.assertEqual(f.read(), "contents") + with override_settings(RFCINDEX_DELETE_THEN_WRITE=True): + save_to_red_bucket("test", BytesIO(b"new contents")) + with red_bucket.open("test", "r") as f: + self.assertEqual(f.read(), "new contents") + red_bucket.delete("test") # clean up like a good child + + def test_get_unusable_rfc_numbers_raises(self): + """get_unusable_rfc_numbers should bail on errors""" + with self.assertRaises(FileNotFoundError): + get_unusable_rfc_numbers() + red_bucket = storages["red_bucket"] + red_bucket.save("unusable-rfc-numbers.json", StringIO("not json")) + with self.assertRaises(json.JSONDecodeError): + get_unusable_rfc_numbers() + red_bucket.delete("unusable-rfc-numbers.json") + + def test_get_april1_rfc_numbers_raises(self): + """get_april1_rfc_numbers should bail on errors""" + with self.assertRaises(FileNotFoundError): + get_april1_rfc_numbers() + red_bucket = storages["red_bucket"] + red_bucket.save("april-first-rfc-numbers.json", StringIO("not json")) + with self.assertRaises(json.JSONDecodeError): + get_april1_rfc_numbers() + red_bucket.delete("april-first-rfc-numbers.json") + + def test_get_publication_std_levels_raises(self): + """get_publication_std_levels should bail on errors""" + with self.assertRaises(FileNotFoundError): + get_publication_std_levels() + red_bucket = storages["red_bucket"] + red_bucket.save("publication-std-levels.json", StringIO("not json")) + with self.assertRaises(json.JSONDecodeError): + get_publication_std_levels() + red_bucket.delete("publication-std-levels.json") diff --git a/ietf/templates/sync/rfc-index.txt b/ietf/templates/sync/rfc-index.txt new file mode 100644 index 0000000000..0f01ddfa90 --- /dev/null +++ b/ietf/templates/sync/rfc-index.txt @@ -0,0 +1,69 @@ + + +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + RFC INDEX + ------------- + +(CREATED ON: {{created_on}}.) + +This file contains citations for all RFCs in numeric order. + +RFC citations appear in this format: + + #### Title of RFC. Author 1, Author 2, Author 3. Issue date. + (Format: ASCII) (Obsoletes xxx) (Obsoleted by xxx) (Updates xxx) + (Updated by xxx) (Also FYI ####) (Status: ssssss) (DOI: ddd) + +or + + #### Not Issued. + +For example: + + 1129 Internet Time Synchronization: The Network Time Protocol. D.L. + Mills. October 1989. (Format: TXT, PS, PDF, HTML) (Also RFC1119) + (Status: INFORMATIONAL) (DOI: 10.17487/RFC1129) + +Key to citations: + +#### is the RFC number. + +Following the RFC number are the title, the author(s), and the +publication date of the RFC. Each of these is terminated by a period. + +Following the number are the title (terminated with a period), the +author, or list of authors (terminated with a period), and the date +(terminated with a period). + +The format follows in parentheses. One or more of the following formats +are listed: text (TXT), PostScript (PS), Portable Document Format +(PDF), HTML, XML. + +Obsoletes xxxx refers to other RFCs that this one replaces; +Obsoleted by xxxx refers to RFCs that have replaced this one. +Updates xxxx refers to other RFCs that this one merely updates (but +does not replace); Updated by xxxx refers to RFCs that have updated +(but not replaced) this one. Generally, only immediately succeeding +and/or preceding RFCs are indicated, not the entire history of each +related earlier or later RFC in a related series. + +The (Also FYI ##) or (Also STD ##) or (Also BCP ##) phrase gives the +equivalent FYI, STD, or BCP number if the RFC is also in those +document sub-series. The Status field gives the document's +current status (see RFC 2026). The (DOI ddd) field gives the +Digital Object Identifier. + +RFCs may be obtained in a number of ways, using HTTP, FTP, or email. +See the RFC Editor Web page http://www.rfc-editor.org + +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + RFC INDEX + --------- + + + +{% for rfc in rfcs %}{{rfc|safe}} + +{% endfor %} diff --git a/k8s/settings_local.py b/k8s/settings_local.py index 0386dbbdf9..5ca4ba5cd9 100644 --- a/k8s/settings_local.py +++ b/k8s/settings_local.py @@ -417,6 +417,39 @@ def _multiline_to_list(s): ), } +# Configure storage for the red bucket - assume it uses the same credentials as +# other blobs +_red_bucket_name = os.environ.get("DATATRACKER_BLOB_STORE_RED_BUCKET_NAME", "").strip() +if _red_bucket_name == "": + raise RuntimeError("DATATRACKER_BLOB_STORE_RED_BUCKET_NAME must be set") + +STORAGES["red_bucket"] = { + "BACKEND": "storages.backends.s3.S3Storage", + "OPTIONS": dict( + endpoint_url=_blob_store_endpoint_url, + access_key=_blob_store_access_key, + secret_key=_blob_store_secret_key, + security_token=None, + client_config=botocore.config.Config( + request_checksum_calculation="when_required", + response_checksum_validation="when_required", + signature_version="s3v4", + connect_timeout=_blob_store_connect_timeout, + read_timeout=_blob_store_read_timeout, + retries={"total_max_attempts": _blob_store_max_attempts}, + ), + verify=False, + bucket_name=_red_bucket_name, + ), +} +RFCINDEX_DELETE_THEN_WRITE = False # S3Storage allows file_overwrite by default +RFCINDEX_OUTPUT_PATH = os.environ.get( + "DATATRACKER_RFCINDEX_OUTPUT_PATH", "other/" +) +RFCINDEX_INPUT_PATH = os.environ.get( + "DATATRACKR_RFCINDEX_INPUT_PATH", "" +) + # Configure the blobdb app for artifact storage _blobdb_replication_enabled = ( os.environ.get("DATATRACKER_BLOBDB_REPLICATION_ENABLED", "true").lower() == "true" From c226749c301fbecfd5503ebd66ba3692187d2946 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Thu, 12 Mar 2026 16:31:56 -0300 Subject: [PATCH 030/102] feat: task + API for rfc-index creation (#10537) * chore: fix typo in k8s/settings_local.py * feat: refresh_rfc_index() API * fix: use ContentFile, manually encode str Works better with S3Storage * chore(dev): expose blobstore on fixed ports Simplifies connecting purple to the blob store * chore(dev): typo * test: fix + test encoding more carefully * test: cover the new url --- .devcontainer/docker-compose.extend.yml | 4 +-- docker/docker-compose.extend.yml | 4 +-- ietf/api/tests_views_rpc.py | 15 ++++++++++ ietf/api/urls_rpc.py | 5 ++++ ietf/api/views_rpc.py | 16 +++++++++++ ietf/sync/rfcindex.py | 17 ++++++----- ietf/sync/tasks.py | 10 ++++++- ietf/sync/tests_rfcindex.py | 38 ++++++++++++++----------- k8s/settings_local.py | 2 +- 9 files changed, 82 insertions(+), 29 deletions(-) diff --git a/.devcontainer/docker-compose.extend.yml b/.devcontainer/docker-compose.extend.yml index a92f42bc6d..ce1ce259fd 100644 --- a/.devcontainer/docker-compose.extend.yml +++ b/.devcontainer/docker-compose.extend.yml @@ -14,8 +14,8 @@ services: network_mode: service:db blobstore: ports: - - '9000' - - '9001' + - '9000:9000' + - '9001:9001' volumes: datatracker-vscode-ext: diff --git a/docker/docker-compose.extend.yml b/docker/docker-compose.extend.yml index a69a453110..12ebe447d5 100644 --- a/docker/docker-compose.extend.yml +++ b/docker/docker-compose.extend.yml @@ -18,8 +18,8 @@ services: - '5433' blobstore: ports: - - '9000' - - '9001' + - '9000:9000' + - '9001:9001' celery: volumes: - .:/workspace diff --git a/ietf/api/tests_views_rpc.py b/ietf/api/tests_views_rpc.py index 6a5a5c9b88..7ab8778d28 100644 --- a/ietf/api/tests_views_rpc.py +++ b/ietf/api/tests_views_rpc.py @@ -363,3 +363,18 @@ def _valid_post_data(): headers={"X-Api-Key": "valid-token"}, ) self.assertEqual(r.status_code, 200) # conflict + + @override_settings(APP_API_TOKENS={"ietf.api.views_rpc": ["valid-token"]}) + @mock.patch("ietf.api.views_rpc.create_rfc_index_task") + def test_refresh_rfc_index(self, mock_task): + url = urlreverse("ietf.api.purple_api.refresh_rfc_index") + response = self.client.get(url) + self.assertEqual(response.status_code, 403) + response = self.client.get(url, headers={"X-Api-Key": "invalid-token"}) + self.assertEqual(response.status_code, 403) + response = self.client.get(url, headers={"X-Api-Key": "valid-token"}) + self.assertEqual(response.status_code, 405) + self.assertFalse(mock_task.delay.called) + response = self.client.post(url, headers={"X-Api-Key": "valid-token"}) + self.assertEqual(response.status_code, 202) + self.assertTrue(mock_task.delay.called) diff --git a/ietf/api/urls_rpc.py b/ietf/api/urls_rpc.py index 9d41ac137f..8555610dc3 100644 --- a/ietf/api/urls_rpc.py +++ b/ietf/api/urls_rpc.py @@ -30,6 +30,11 @@ views_rpc.RfcPubFilesView.as_view(), name="ietf.api.purple_api.upload_rfc_files", ), + path( + r"rfc_index/refresh/", + views_rpc.RfcIndexView.as_view(), + name="ietf.api.purple_api.refresh_rfc_index", + ), path(r"subject//person/", views_rpc.SubjectPersonView.as_view()), ] diff --git a/ietf/api/views_rpc.py b/ietf/api/views_rpc.py index 8862bbf866..c7ae699005 100644 --- a/ietf/api/views_rpc.py +++ b/ietf/api/views_rpc.py @@ -40,6 +40,7 @@ from ietf.doc.storage_utils import remove_from_storage, store_file, exists_in_storage from ietf.doc.tasks import signal_update_rfc_metadata_task from ietf.person.models import Email, Person +from ietf.sync.tasks import create_rfc_index_task class Conflict(APIException): @@ -516,3 +517,18 @@ def post(self, request): shutil.move(ftm, destination) return Response(NotificationAckSerializer().data) + + +class RfcIndexView(APIView): + api_key_endpoint = "ietf.api.views_rpc" + + @extend_schema( + operation_id="refresh_rfc_index", + summary="Refresh rfc-index files", + description="Requests creation of rfc-index.xml and rfc-index.txt files", + responses={202: None}, + request=None, + ) + def post(self, request): + create_rfc_index_task.delay() + return Response(status=202) diff --git a/ietf/sync/rfcindex.py b/ietf/sync/rfcindex.py index b15846094f..63c2044931 100644 --- a/ietf/sync/rfcindex.py +++ b/ietf/sync/rfcindex.py @@ -3,7 +3,6 @@ from collections import defaultdict from collections.abc import Container from dataclasses import dataclass -from io import StringIO, BytesIO from itertools import chain from operator import attrgetter, itemgetter from pathlib import Path @@ -11,6 +10,7 @@ from urllib.parse import urljoin from django.conf import settings +from django.core.files.base import ContentFile from lxml import etree from django.core.files.storage import storages @@ -28,7 +28,7 @@ def format_rfc_number(n): """Format an RFC number (or subseries doc number) - + Set settings.RFCINDEX_MATCH_LEGACY_XML=True for the legacy (leading-zero) format. That is for debugging only - tests will fail. """ @@ -42,13 +42,16 @@ def errata_url(rfc: Document): return urljoin(settings.RFC_EDITOR_ERRATA_BASE_URL + "/", f"rfc{rfc.rfc_number}") -def save_to_red_bucket(filename: str, content: BytesIO | StringIO): +def save_to_red_bucket(filename: str, content: str | bytes): red_bucket = storages["red_bucket"] bucket_path = str(Path(getattr(settings, "RFCINDEX_OUTPUT_PATH", "")) / filename) if getattr(settings, "RFCINDEX_DELETE_THEN_WRITE", True): # Django 4.2's FileSystemStorage does not support allow_overwrite. red_bucket.delete(bucket_path) - red_bucket.save(bucket_path, content) + red_bucket.save( + bucket_path, + ContentFile(content if isinstance(content, bytes) else content.encode("utf-8")), + ) log(f"Saved {bucket_path} in red_bucket storage") @@ -76,7 +79,7 @@ def get_unusable_rfc_numbers() -> list[UnusableRfcNumber]: except json.JSONDecodeError: log(f"Error: unable to parse {bucket_path} in red_bucket storage") if settings.SERVER_MODE == "development": - return [] # pragma: no cover + return [] # pragma: no cover raise assert all(isinstance(record["number"], int) for record in records) assert all(isinstance(record["comment"], str) for record in records) @@ -441,7 +444,7 @@ def create_rfc_txt_index(): "rfcs": get_rfc_text_index_entries(), }, ) - save_to_red_bucket("rfc-index.txt", StringIO(index)) + save_to_red_bucket("rfc-index.txt", index) def create_rfc_xml_index(): @@ -477,4 +480,4 @@ def create_rfc_xml_index(): xml_declaration=True, pretty_print=4, ) - save_to_red_bucket("rfc-index.xml", BytesIO(pretty_index)) + save_to_red_bucket("rfc-index.xml", pretty_index) diff --git a/ietf/sync/tasks.py b/ietf/sync/tasks.py index fc75a056ed..4c84dc581e 100644 --- a/ietf/sync/tasks.py +++ b/ietf/sync/tasks.py @@ -1,4 +1,4 @@ -# Copyright The IETF Trust 2024, All Rights Reserved +# Copyright The IETF Trust 2024-2026, All Rights Reserved # # Celery task definitions # @@ -18,6 +18,7 @@ from ietf.sync import iana from ietf.sync import rfceditor from ietf.sync.rfceditor import MIN_QUEUE_RESULTS, parse_queue, update_drafts_from_queue +from ietf.sync.rfcindex import create_rfc_txt_index, create_rfc_xml_index from ietf.sync.utils import build_from_file_content, load_rfcs_into_blobdb, rsync_helper from ietf.utils import log from ietf.utils.timezone import date_today @@ -272,3 +273,10 @@ def load_rfcs_into_blobdb_task(start: int, end: int): if end > 11000: # Arbitrarily chosen end = 11000 load_rfcs_into_blobdb(list(range(start, end + 1))) + + +@shared_task +def create_rfc_index_task(): + create_rfc_txt_index() + create_rfc_xml_index() + diff --git a/ietf/sync/tests_rfcindex.py b/ietf/sync/tests_rfcindex.py index b0a8712fe1..e682c016f5 100644 --- a/ietf/sync/tests_rfcindex.py +++ b/ietf/sync/tests_rfcindex.py @@ -1,8 +1,8 @@ # Copyright The IETF Trust 2026, All Rights Reserved import json -from io import BytesIO, StringIO from unittest import mock +from django.core.files.base import ContentFile from django.core.files.storage import storages from django.test.utils import override_settings from lxml import etree @@ -13,7 +13,9 @@ create_rfc_txt_index, create_rfc_xml_index, format_rfc_number, - save_to_red_bucket, get_unusable_rfc_numbers, get_april1_rfc_numbers, + save_to_red_bucket, + get_unusable_rfc_numbers, + get_april1_rfc_numbers, get_publication_std_levels, ) from ietf.utils.test_utils import TestCase @@ -39,7 +41,7 @@ def setUp(self): # Create an unused RFC number red_bucket.save( "input/unusable-rfc-numbers.json", - StringIO(json.dumps([{"number": 123, "comment": ""}])), + ContentFile(json.dumps([{"number": 123, "comment": ""}])), ) # actual April 1 RFC @@ -55,7 +57,7 @@ def setUp(self): # Set up a JSON file to flag the April 1 RFC red_bucket.save( "input/april-first-rfc-numbers.json", - StringIO(json.dumps([self.april_fools_rfc.rfc_number])), + ContentFile(json.dumps([self.april_fools_rfc.rfc_number])), ) # non-April Fools RFC that happens to have been published on April 1 @@ -71,7 +73,7 @@ def setUp(self): # standard of self.rfc as different from its current value red_bucket.save( "input/publication-std-levels.json", - StringIO( + ContentFile( json.dumps( [{"number": self.rfc.rfc_number, "publication_std_level": "ps"}] ) @@ -91,7 +93,8 @@ def test_create_rfc_txt_index(self, mock_save): create_rfc_txt_index() self.assertEqual(mock_save.call_count, 1) self.assertEqual(mock_save.call_args[0][0], "rfc-index.txt") - contents = mock_save.call_args[0][1].read() + contents = mock_save.call_args[0][1] + self.assertTrue(isinstance(contents, str)) self.assertIn( "123 Not Issued.", contents, @@ -119,7 +122,8 @@ def test_create_rfc_xml_index(self, mock_save): create_rfc_xml_index() self.assertEqual(mock_save.call_count, 1) self.assertEqual(mock_save.call_args[0][0], "rfc-index.xml") - contents = mock_save.call_args[0][1].read() + contents = mock_save.call_args[0][1] + self.assertTrue(isinstance(contents, bytes)) ns = "{https://www.rfc-editor.org/rfc-index}" # NOT an f-string index = etree.fromstring(contents) @@ -190,13 +194,15 @@ def test_format_rfc_number(self): def test_save_to_red_bucket(self): red_bucket = storages["red_bucket"] with override_settings(RFCINDEX_DELETE_THEN_WRITE=False): - save_to_red_bucket("test", StringIO("contents")) - with red_bucket.open("test", "r") as f: - self.assertEqual(f.read(), "contents") + save_to_red_bucket("test", "contents \U0001f600") + # Read as binary and explicitly decode to confirm encoding + with red_bucket.open("test", "rb") as f: + self.assertEqual(f.read().decode("utf-8"), "contents \U0001f600") with override_settings(RFCINDEX_DELETE_THEN_WRITE=True): - save_to_red_bucket("test", BytesIO(b"new contents")) - with red_bucket.open("test", "r") as f: - self.assertEqual(f.read(), "new contents") + save_to_red_bucket("test", "new contents \U0001fae0".encode("utf-8")) + # Read as binary and explicitly decode to confirm encoding + with red_bucket.open("test", "rb") as f: + self.assertEqual(f.read().decode("utf-8"), "new contents \U0001fae0") red_bucket.delete("test") # clean up like a good child def test_get_unusable_rfc_numbers_raises(self): @@ -204,7 +210,7 @@ def test_get_unusable_rfc_numbers_raises(self): with self.assertRaises(FileNotFoundError): get_unusable_rfc_numbers() red_bucket = storages["red_bucket"] - red_bucket.save("unusable-rfc-numbers.json", StringIO("not json")) + red_bucket.save("unusable-rfc-numbers.json", ContentFile("not json")) with self.assertRaises(json.JSONDecodeError): get_unusable_rfc_numbers() red_bucket.delete("unusable-rfc-numbers.json") @@ -214,7 +220,7 @@ def test_get_april1_rfc_numbers_raises(self): with self.assertRaises(FileNotFoundError): get_april1_rfc_numbers() red_bucket = storages["red_bucket"] - red_bucket.save("april-first-rfc-numbers.json", StringIO("not json")) + red_bucket.save("april-first-rfc-numbers.json", ContentFile("not json")) with self.assertRaises(json.JSONDecodeError): get_april1_rfc_numbers() red_bucket.delete("april-first-rfc-numbers.json") @@ -224,7 +230,7 @@ def test_get_publication_std_levels_raises(self): with self.assertRaises(FileNotFoundError): get_publication_std_levels() red_bucket = storages["red_bucket"] - red_bucket.save("publication-std-levels.json", StringIO("not json")) + red_bucket.save("publication-std-levels.json", ContentFile("not json")) with self.assertRaises(json.JSONDecodeError): get_publication_std_levels() red_bucket.delete("publication-std-levels.json") diff --git a/k8s/settings_local.py b/k8s/settings_local.py index 5ca4ba5cd9..56e395c5ac 100644 --- a/k8s/settings_local.py +++ b/k8s/settings_local.py @@ -447,7 +447,7 @@ def _multiline_to_list(s): "DATATRACKER_RFCINDEX_OUTPUT_PATH", "other/" ) RFCINDEX_INPUT_PATH = os.environ.get( - "DATATRACKR_RFCINDEX_INPUT_PATH", "" + "DATATRACKER_RFCINDEX_INPUT_PATH", "" ) # Configure the blobdb app for artifact storage From 2c59afe783216285b9695e99ee64547fe4e66469 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Fri, 13 Mar 2026 16:14:38 -0300 Subject: [PATCH 031/102] fix: drop stale obs/updates in rfced sync (#10543) * fix: drop stale obs/updates in rfced sync * refactor: partial revert, orig was safer --- ietf/sync/rfceditor.py | 73 +++++++++++++++++++++++++++++------------- 1 file changed, 50 insertions(+), 23 deletions(-) diff --git a/ietf/sync/rfceditor.py b/ietf/sync/rfceditor.py index cdcdeb5989..aa0e643b20 100644 --- a/ietf/sync/rfceditor.py +++ b/ietf/sync/rfceditor.py @@ -636,43 +636,70 @@ def update_docs_from_rfc_index( ) rfc_published = True - def parse_relation_list(l): - res = [] - for x in l: - for a in Document.objects.filter(name=x.lower(), type_id="rfc"): - if a not in res: - res.append(a) - return res - - for x in parse_relation_list(obsoletes): - if not RelatedDocument.objects.filter( - source=doc, target=x, relationship=relationship_obsoletes + def parse_relation_list(rel_list: list[str]) -> list[Document]: + return list( + Document.objects.filter( + name__in=[name.strip().lower() for name in rel_list], + type_id="rfc" + ) + ) + + # Create missing obsoletes relations + docs_this_obsoletes = parse_relation_list(obsoletes) + for obs_doc in docs_this_obsoletes: + if not doc.relateddocument_set.filter( + target=obs_doc, relationship=relationship_obsoletes ): - r = RelatedDocument.objects.create( - source=doc, target=x, relationship=relationship_obsoletes + r = doc.relateddocument_set.create( + target=obs_doc, relationship=relationship_obsoletes ) rfc_changes.append( - "created {rel_name} relation between {src_name} and {tgt_name}".format( + "created {rel_name} relation between {src} and {tgt}".format( rel_name=r.relationship.name.lower(), - src_name=prettify_std_name(r.source.name), - tgt_name=prettify_std_name(r.target.name), + src=prettify_std_name(r.source.name), + tgt=prettify_std_name(r.target.name), ) ) + # Remove stale obsoletes relations + for r in doc.relateddocument_set.filter( + relationship=relationship_obsoletes + ).exclude(target_id__in=[d.pk for d in docs_this_obsoletes]): + r.delete() + rfc_changes.append( + "removed {rel_name} relation between {src} and {tgt}".format( + rel_name=r.relationship.name.lower(), + src=prettify_std_name(r.source.name), + tgt=prettify_std_name(r.target.name), + ) + ) - for x in parse_relation_list(updates): + docs_this_updates = parse_relation_list(updates) + for upd_doc in docs_this_updates: if not RelatedDocument.objects.filter( - source=doc, target=x, relationship=relationship_updates + source=doc, target=upd_doc, relationship=relationship_updates ): - r = RelatedDocument.objects.create( - source=doc, target=x, relationship=relationship_updates + r = doc.relateddocument_set.create( + target=upd_doc, relationship=relationship_updates ) rfc_changes.append( - "created {rel_name} relation between {src_name} and {tgt_name}".format( + "created {rel_name} relation between {src} and {tgt}".format( rel_name=r.relationship.name.lower(), - src_name=prettify_std_name(r.source.name), - tgt_name=prettify_std_name(r.target.name), + src=prettify_std_name(r.source.name), + tgt=prettify_std_name(r.target.name), ) ) + # Remove stale updates relations + for r in doc.relateddocument_set.filter( + relationship=relationship_updates + ).exclude(target_id__in=[d.pk for d in docs_this_updates]): + r.delete() + rfc_changes.append( + "removed {rel_name} relation between {src} and {tgt}".format( + rel_name=r.relationship.name.lower(), + src=prettify_std_name(r.source.name), + tgt=prettify_std_name(r.target.name), + ) + ) if also: # recondition also to have proper subseries document names: From 76fd25a1f39093a214be8ac2e0a9ed452beb7a47 Mon Sep 17 00:00:00 2001 From: Tianyi Gao Date: Sat, 14 Mar 2026 12:19:51 +0800 Subject: [PATCH 032/102] fix: wording in id_expired_email template (#10154) --- ietf/templates/doc/draft/id_expired_email.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ietf/templates/doc/draft/id_expired_email.txt b/ietf/templates/doc/draft/id_expired_email.txt index afbf253ee2..161146a301 100644 --- a/ietf/templates/doc/draft/id_expired_email.txt +++ b/ietf/templates/doc/draft/id_expired_email.txt @@ -1,4 +1,4 @@ -{% autoescape off %}{{ doc.file_tag|safe }} was just expired. +{% autoescape off %}{{ doc.file_tag|safe }} just expired. This Internet-Draft is in the state "{{ state }}" in the Datatracker. From 9646edc20378e101ab48ff24253861fc5ea78fe9 Mon Sep 17 00:00:00 2001 From: Rudi Matz Date: Sun, 15 Mar 2026 02:17:40 +0800 Subject: [PATCH 033/102] feat: add author affiliation in serializer (#10549) --- ietf/api/serializers_rpc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ietf/api/serializers_rpc.py b/ietf/api/serializers_rpc.py index c17cbc64ce..a18dc588c4 100644 --- a/ietf/api/serializers_rpc.py +++ b/ietf/api/serializers_rpc.py @@ -103,7 +103,7 @@ class DocumentAuthorSerializer(serializers.ModelSerializer): class Meta: model = DocumentAuthor - fields = ["person", "plain_name"] + fields = ["person", "plain_name", "affiliation"] def get_plain_name(self, document_author: DocumentAuthor) -> str: return document_author.person.plain_name() From 36fa518ec387f425b1f11f6f9040a73e8f61df30 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 16 Mar 2026 10:02:47 -0400 Subject: [PATCH 034/102] chore(deps): bump the npm group across /dev/deploy-to-container with 5 updates (#10560) Bumps the npm group with 5 updates in the /dev/deploy-to-container directory: | Package | From | To | | --- | --- | --- | | [dockerode](https://github.com/apocas/dockerode) | `4.0.6` | `4.0.9` | | [fs-extra](https://github.com/jprichardson/node-fs-extra) | `11.3.0` | `11.3.4` | | [nanoid](https://github.com/ai/nanoid) | `5.1.5` | `5.1.7` | | [slugify](https://github.com/simov/slugify) | `1.6.6` | `1.6.8` | | [tar](https://github.com/isaacs/node-tar) | `7.4.3` | `7.5.11` | Updates `dockerode` from 4.0.6 to 4.0.9 - [Release notes](https://github.com/apocas/dockerode/releases) - [Commits](https://github.com/apocas/dockerode/compare/v4.0.6...v4.0.9) Updates `fs-extra` from 11.3.0 to 11.3.4 - [Changelog](https://github.com/jprichardson/node-fs-extra/blob/master/CHANGELOG.md) - [Commits](https://github.com/jprichardson/node-fs-extra/compare/11.3.0...11.3.4) Updates `nanoid` from 5.1.5 to 5.1.7 - [Release notes](https://github.com/ai/nanoid/releases) - [Changelog](https://github.com/ai/nanoid/blob/main/CHANGELOG.md) - [Commits](https://github.com/ai/nanoid/compare/5.1.5...5.1.7) Updates `slugify` from 1.6.6 to 1.6.8 - [Changelog](https://github.com/simov/slugify/blob/master/CHANGELOG.md) - [Commits](https://github.com/simov/slugify/compare/v1.6.6...v1.6.8) Updates `tar` from 7.4.3 to 7.5.11 - [Release notes](https://github.com/isaacs/node-tar/releases) - [Changelog](https://github.com/isaacs/node-tar/blob/main/CHANGELOG.md) - [Commits](https://github.com/isaacs/node-tar/compare/v7.4.3...v7.5.11) --- updated-dependencies: - dependency-name: dockerode dependency-version: 4.0.9 dependency-type: direct:production update-type: version-update:semver-patch dependency-group: npm - dependency-name: fs-extra dependency-version: 11.3.4 dependency-type: direct:production update-type: version-update:semver-patch dependency-group: npm - dependency-name: nanoid dependency-version: 5.1.7 dependency-type: direct:production update-type: version-update:semver-patch dependency-group: npm - dependency-name: slugify dependency-version: 1.6.8 dependency-type: direct:production update-type: version-update:semver-patch dependency-group: npm - dependency-name: tar dependency-version: 7.5.11 dependency-type: direct:production update-type: version-update:semver-minor dependency-group: npm ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- dev/deploy-to-container/package-lock.json | 733 +++------------------- dev/deploy-to-container/package.json | 10 +- 2 files changed, 76 insertions(+), 667 deletions(-) diff --git a/dev/deploy-to-container/package-lock.json b/dev/deploy-to-container/package-lock.json index 0954ec9af4..b62109f0e2 100644 --- a/dev/deploy-to-container/package-lock.json +++ b/dev/deploy-to-container/package-lock.json @@ -6,12 +6,12 @@ "": { "name": "deploy-to-container", "dependencies": { - "dockerode": "^4.0.6", - "fs-extra": "^11.3.0", - "nanoid": "5.1.5", + "dockerode": "^4.0.9", + "fs-extra": "^11.3.4", + "nanoid": "5.1.7", "nanoid-dictionary": "5.0.0", - "slugify": "1.6.6", - "tar": "^7.4.3", + "slugify": "1.6.8", + "tar": "^7.5.11", "yargs": "^17.7.2" }, "engines": { @@ -52,95 +52,6 @@ "node": ">=6" } }, - "node_modules/@isaacs/cliui": { - "version": "8.0.2", - "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", - "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", - "dependencies": { - "string-width": "^5.1.2", - "string-width-cjs": "npm:string-width@^4.2.0", - "strip-ansi": "^7.0.1", - "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", - "wrap-ansi": "^8.1.0", - "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/@isaacs/cliui/node_modules/ansi-regex": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", - "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-regex?sponsor=1" - } - }, - "node_modules/@isaacs/cliui/node_modules/ansi-styles": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", - "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/@isaacs/cliui/node_modules/emoji-regex": { - "version": "9.2.2", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", - "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==" - }, - "node_modules/@isaacs/cliui/node_modules/string-width": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", - "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", - "dependencies": { - "eastasianwidth": "^0.2.0", - "emoji-regex": "^9.2.2", - "strip-ansi": "^7.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/@isaacs/cliui/node_modules/strip-ansi": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", - "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", - "dependencies": { - "ansi-regex": "^6.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/strip-ansi?sponsor=1" - } - }, - "node_modules/@isaacs/cliui/node_modules/wrap-ansi": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", - "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", - "dependencies": { - "ansi-styles": "^6.1.0", - "string-width": "^5.0.1", - "strip-ansi": "^7.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, "node_modules/@isaacs/fs-minipass": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/@isaacs/fs-minipass/-/fs-minipass-4.0.1.tgz", @@ -161,15 +72,6 @@ "url": "https://opencollective.com/js-sdsl" } }, - "node_modules/@pkgjs/parseargs": { - "version": "0.11.0", - "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", - "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", - "optional": true, - "engines": { - "node": ">=14" - } - }, "node_modules/@protobufjs/aspromise": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz", @@ -263,11 +165,6 @@ "safer-buffer": "~2.1.0" } }, - "node_modules/balanced-match": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", - "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" - }, "node_modules/base64-js": { "version": "1.5.1", "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", @@ -285,8 +182,7 @@ "type": "consulting", "url": "https://feross.org/support" } - ], - "license": "MIT" + ] }, "node_modules/bcrypt-pbkdf": { "version": "1.0.2", @@ -301,21 +197,12 @@ "version": "4.1.0", "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", - "license": "MIT", "dependencies": { "buffer": "^5.5.0", "inherits": "^2.0.4", "readable-stream": "^3.4.0" } }, - "node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", - "dependencies": { - "balanced-match": "^1.0.0" - } - }, "node_modules/buffer": { "version": "5.7.1", "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", @@ -334,7 +221,6 @@ "url": "https://feross.org/support" } ], - "license": "MIT", "dependencies": { "base64-js": "^1.3.1", "ieee754": "^1.1.13" @@ -352,8 +238,7 @@ "node_modules/chownr": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz", - "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==", - "license": "ISC" + "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==" }, "node_modules/cliui": { "version": "8.0.1", @@ -398,19 +283,6 @@ "node": ">=10.0.0" } }, - "node_modules/cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/debug": { "version": "4.4.0", "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", @@ -444,38 +316,31 @@ } }, "node_modules/dockerode": { - "version": "4.0.6", - "resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.6.tgz", - "integrity": "sha512-FbVf3Z8fY/kALB9s+P9epCpWhfi/r0N2DgYYcYpsAUlaTxPjdsitsFobnltb+lyCgAIvf9C+4PSWlTnHlJMf1w==", - "license": "Apache-2.0", + "version": "4.0.9", + "resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.9.tgz", + "integrity": "sha512-iND4mcOWhPaCNh54WmK/KoSb35AFqPAUWFMffTQcp52uQt36b5uNwEJTSXntJZBbeGad72Crbi/hvDIv6us/6Q==", "dependencies": { "@balena/dockerignore": "^1.0.2", "@grpc/grpc-js": "^1.11.1", "@grpc/proto-loader": "^0.7.13", "docker-modem": "^5.0.6", "protobufjs": "^7.3.2", - "tar-fs": "~2.1.2", + "tar-fs": "^2.1.4", "uuid": "^10.0.0" }, "engines": { "node": ">= 8.0" } }, - "node_modules/eastasianwidth": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", - "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==" - }, "node_modules/emoji-regex": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" }, "node_modules/end-of-stream": { - "version": "1.4.4", - "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", - "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", - "license": "MIT", + "version": "1.4.5", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.5.tgz", + "integrity": "sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==", "dependencies": { "once": "^1.4.0" } @@ -488,32 +353,15 @@ "node": ">=6" } }, - "node_modules/foreground-child": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.1.1.tgz", - "integrity": "sha512-TMKDUnIte6bfb5nWv7V/caI169OHgvwjb7V4WkeUvbQQdjr5rWKqHFiKWb/fcOwB+CzBT+qbWjvj+DVwRskpIg==", - "dependencies": { - "cross-spawn": "^7.0.0", - "signal-exit": "^4.0.1" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/fs-constants": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz", - "integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==", - "license": "MIT" + "integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==" }, "node_modules/fs-extra": { - "version": "11.3.0", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.3.0.tgz", - "integrity": "sha512-Z4XaCL6dUDHfP/jT25jJKMmtxvuwbkrD1vNSMFlo9lNLY2c5FHYSQgHPRZUjAB26TpDEoW9HCOgplrdbaPV/ew==", - "license": "MIT", + "version": "11.3.4", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.3.4.tgz", + "integrity": "sha512-CTXd6rk/M3/ULNQj8FBqBWHYBVYybQ3VPBw0xGKFe3tuH7ytT6ACnvzpIQ3UZtB8yvUKC2cXn1a+x+5EVQLovA==", "dependencies": { "graceful-fs": "^4.2.0", "jsonfile": "^6.0.1", @@ -531,27 +379,6 @@ "node": "6.* || 8.* || >= 10.*" } }, - "node_modules/glob": { - "version": "10.3.12", - "resolved": "https://registry.npmjs.org/glob/-/glob-10.3.12.tgz", - "integrity": "sha512-TCNv8vJ+xz4QiqTpfOJA7HvYv+tNIRHKfUWw/q+v2jdgN4ebz+KY9tGx5J4rHP0o84mNP+ApH66HRX8us3Khqg==", - "dependencies": { - "foreground-child": "^3.1.0", - "jackspeak": "^2.3.6", - "minimatch": "^9.0.1", - "minipass": "^7.0.4", - "path-scurry": "^1.10.2" - }, - "bin": { - "glob": "dist/esm/bin.mjs" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/graceful-fs": { "version": "4.2.10", "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz", @@ -574,8 +401,7 @@ "type": "consulting", "url": "https://feross.org/support" } - ], - "license": "BSD-3-Clause" + ] }, "node_modules/inherits": { "version": "2.0.4", @@ -590,28 +416,6 @@ "node": ">=8" } }, - "node_modules/isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" - }, - "node_modules/jackspeak": { - "version": "2.3.6", - "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-2.3.6.tgz", - "integrity": "sha512-N3yCS/NegsOBokc8GAdM8UcmfsKiSS8cipheD/nivzr700H+nsMOxJjQnvwOcRYVuFkdH0wGUvW2WbXGmrZGbQ==", - "dependencies": { - "@isaacs/cliui": "^8.0.2" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - }, - "optionalDependencies": { - "@pkgjs/parseargs": "^0.11.0" - } - }, "node_modules/jsonfile": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", @@ -633,28 +437,6 @@ "resolved": "https://registry.npmjs.org/long/-/long-5.2.4.tgz", "integrity": "sha512-qtzLbJE8hq7VabR3mISmVGtoXP8KGc2Z/AT8OuqlYD7JTR3oqrgwdjnk07wpj1twXxYmgDXgoKVWUG/fReSzHg==" }, - "node_modules/lru-cache": { - "version": "10.2.2", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.2.2.tgz", - "integrity": "sha512-9hp3Vp2/hFQUiIwKo8XCeFVnrg8Pk3TYNPIR7tJADKi5YfcF7vEaK7avFHTlSy3kOKYaJQaalfEo6YuXdceBOQ==", - "engines": { - "node": "14 || >=16.14" - } - }, - "node_modules/minimatch": { - "version": "9.0.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.4.tgz", - "integrity": "sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw==", - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/minipass": { "version": "7.1.2", "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", @@ -664,36 +446,20 @@ } }, "node_modules/minizlib": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.0.1.tgz", - "integrity": "sha512-umcy022ILvb5/3Djuu8LWeqUa8D68JaBzlttKeMWen48SjabqS3iY5w/vzeMzMUNhLDifyhbOwKDSznB1vvrwg==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.1.0.tgz", + "integrity": "sha512-KZxYo1BUkWD2TVFLr0MQoM8vUUigWD3LlD83a/75BqC+4qE0Hb1Vo5v1FgcfaNXvfXzr+5EhQ6ing/CaBijTlw==", "dependencies": { - "minipass": "^7.0.4", - "rimraf": "^5.0.5" + "minipass": "^7.1.2" }, "engines": { "node": ">= 18" } }, - "node_modules/mkdirp": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz", - "integrity": "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==", - "bin": { - "mkdirp": "dist/cjs/src/bin.js" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/mkdirp-classic": { "version": "0.5.3", "resolved": "https://registry.npmjs.org/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz", - "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==", - "license": "MIT" + "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==" }, "node_modules/ms": { "version": "2.1.3", @@ -709,16 +475,15 @@ "optional": true }, "node_modules/nanoid": { - "version": "5.1.5", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.1.5.tgz", - "integrity": "sha512-Ir/+ZpE9fDsNH0hQ3C68uyThDXzYcim2EqcZ8zn8Chtt1iylPT9xXJB0kPCnqzgcEGikO9RxSrh63MsmVCU7Fw==", + "version": "5.1.7", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.1.7.tgz", + "integrity": "sha512-ua3NDgISf6jdwezAheMOk4mbE1LXjm1DfMUDMuJf4AqxLFK3ccGpgWizwa5YV7Yz9EpXwEaWoRXSb/BnV0t5dQ==", "funding": [ { "type": "github", "url": "https://github.com/sponsors/ai" } ], - "license": "MIT", "bin": { "nanoid": "bin/nanoid.js" }, @@ -736,34 +501,10 @@ "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", - "license": "ISC", "dependencies": { "wrappy": "1" } }, - "node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "engines": { - "node": ">=8" - } - }, - "node_modules/path-scurry": { - "version": "1.10.2", - "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.10.2.tgz", - "integrity": "sha512-7xTavNy5RQXnsjANvVvMkEjvloOinkAjv/Z6Ildz9v2RinZ4SBKTWFOVRbaF8p0vpHnyjV/UwNDdKuUv6M5qcA==", - "dependencies": { - "lru-cache": "^10.2.0", - "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/protobufjs": { "version": "7.4.0", "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.4.0.tgz", @@ -788,10 +529,9 @@ } }, "node_modules/pump": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.2.tgz", - "integrity": "sha512-tUPXtzlGM8FE3P0ZL6DVs/3P58k9nk8/jZeQCurTJylQA8qFYzHFfhBJkuqyE0FifOsQ0uKWekiZ5g8wtr28cw==", - "license": "MIT", + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.4.tgz", + "integrity": "sha512-VS7sjc6KR7e1ukRFhQSY5LM2uBWAUPiOPa/A3mkKmiMwSmRFUITt0xuj+/lesgnCv+dPIEYlkzrcyXgquIHMcA==", "dependencies": { "end-of-stream": "^1.1.0", "once": "^1.3.1" @@ -818,23 +558,6 @@ "node": ">=0.10.0" } }, - "node_modules/rimraf": { - "version": "5.0.5", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-5.0.5.tgz", - "integrity": "sha512-CqDakW+hMe/Bz202FPEymy68P+G50RfMQK+Qo5YUqc9SPipvbGjCGKd0RSKEelbsfQuw3g5NZDSrlZZAJurH1A==", - "dependencies": { - "glob": "^10.3.7" - }, - "bin": { - "rimraf": "dist/esm/bin.mjs" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/safe-buffer": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", @@ -860,40 +583,10 @@ "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", "license": "MIT" }, - "node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "engines": { - "node": ">=8" - } - }, - "node_modules/signal-exit": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", - "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/slugify": { - "version": "1.6.6", - "resolved": "https://registry.npmjs.org/slugify/-/slugify-1.6.6.tgz", - "integrity": "sha512-h+z7HKHYXj6wJU+AnS/+IH8Uh9fdcX1Lrhg1/VMdf9PwoBQXFcXiAdsy2tSK0P6gKwJLXp02r90ahUCqHk9rrw==", + "version": "1.6.8", + "resolved": "https://registry.npmjs.org/slugify/-/slugify-1.6.8.tgz", + "integrity": "sha512-HVk9X1E0gz3mSpoi60h/saazLKXKaZThMLU3u/aNwoYn8/xQyX2MGxL0ui2eaokkD7tF+Zo+cKTHUbe1mmmGzA==", "engines": { "node": ">=8.0.0" } @@ -942,20 +635,6 @@ "node": ">=8" } }, - "node_modules/string-width-cjs": { - "name": "string-width", - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/strip-ansi": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", @@ -967,28 +646,15 @@ "node": ">=8" } }, - "node_modules/strip-ansi-cjs": { - "name": "strip-ansi", - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/tar": { - "version": "7.4.3", - "resolved": "https://registry.npmjs.org/tar/-/tar-7.4.3.tgz", - "integrity": "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw==", + "version": "7.5.11", + "resolved": "https://registry.npmjs.org/tar/-/tar-7.5.11.tgz", + "integrity": "sha512-ChjMH33/KetonMTAtpYdgUFr0tbz69Fp2v7zWxQfYZX4g5ZN2nOBXm1R2xyA+lMIKrLKIoKAwFj93jE/avX9cQ==", "dependencies": { "@isaacs/fs-minipass": "^4.0.0", "chownr": "^3.0.0", "minipass": "^7.1.2", - "minizlib": "^3.0.1", - "mkdirp": "^3.0.1", + "minizlib": "^3.1.0", "yallist": "^5.0.0" }, "engines": { @@ -996,10 +662,9 @@ } }, "node_modules/tar-fs": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.2.tgz", - "integrity": "sha512-EsaAXwxmx8UB7FRKqeozqEPop69DXcmYwTQwXvyAPF352HJsPdkVhvTaDPYqfNgruveJIJy3TA2l+2zj8LJIJA==", - "license": "MIT", + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.4.tgz", + "integrity": "sha512-mDAjwmZdh7LTT6pNleZ05Yt65HC3E+NiQzl672vQG38jIrehtJk/J3mNwIg+vShQPcLF/LV7CMnDW6vjj6sfYQ==", "dependencies": { "chownr": "^1.1.1", "mkdirp-classic": "^0.5.2", @@ -1011,7 +676,6 @@ "version": "2.2.0", "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.2.0.tgz", "integrity": "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==", - "license": "MIT", "dependencies": { "bl": "^4.0.3", "end-of-stream": "^1.4.1", @@ -1067,20 +731,6 @@ "uuid": "dist/bin/uuid" } }, - "node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/wrap-ansi": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", @@ -1097,28 +747,10 @@ "url": "https://github.com/chalk/wrap-ansi?sponsor=1" } }, - "node_modules/wrap-ansi-cjs": { - "name": "wrap-ansi", - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, "node_modules/wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", - "license": "ISC" + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" }, "node_modules/y18n": { "version": "5.0.8", @@ -1188,64 +820,6 @@ "yargs": "^17.7.2" } }, - "@isaacs/cliui": { - "version": "8.0.2", - "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", - "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", - "requires": { - "string-width": "^5.1.2", - "string-width-cjs": "npm:string-width@^4.2.0", - "strip-ansi": "^7.0.1", - "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", - "wrap-ansi": "^8.1.0", - "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" - }, - "dependencies": { - "ansi-regex": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", - "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==" - }, - "ansi-styles": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", - "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==" - }, - "emoji-regex": { - "version": "9.2.2", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", - "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==" - }, - "string-width": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", - "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", - "requires": { - "eastasianwidth": "^0.2.0", - "emoji-regex": "^9.2.2", - "strip-ansi": "^7.0.1" - } - }, - "strip-ansi": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", - "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", - "requires": { - "ansi-regex": "^6.0.1" - } - }, - "wrap-ansi": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", - "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", - "requires": { - "ansi-styles": "^6.1.0", - "string-width": "^5.0.1", - "strip-ansi": "^7.0.1" - } - } - } - }, "@isaacs/fs-minipass": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/@isaacs/fs-minipass/-/fs-minipass-4.0.1.tgz", @@ -1259,12 +833,6 @@ "resolved": "https://registry.npmjs.org/@js-sdsl/ordered-map/-/ordered-map-4.4.2.tgz", "integrity": "sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw==" }, - "@pkgjs/parseargs": { - "version": "0.11.0", - "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", - "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", - "optional": true - }, "@protobufjs/aspromise": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz", @@ -1348,11 +916,6 @@ "safer-buffer": "~2.1.0" } }, - "balanced-match": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", - "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" - }, "base64-js": { "version": "1.5.1", "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", @@ -1376,14 +939,6 @@ "readable-stream": "^3.4.0" } }, - "brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", - "requires": { - "balanced-match": "^1.0.0" - } - }, "buffer": { "version": "5.7.1", "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", @@ -1437,16 +992,6 @@ "nan": "^2.19.0" } }, - "cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "requires": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - } - }, "debug": { "version": "4.4.0", "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", @@ -1467,33 +1012,28 @@ } }, "dockerode": { - "version": "4.0.6", - "resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.6.tgz", - "integrity": "sha512-FbVf3Z8fY/kALB9s+P9epCpWhfi/r0N2DgYYcYpsAUlaTxPjdsitsFobnltb+lyCgAIvf9C+4PSWlTnHlJMf1w==", + "version": "4.0.9", + "resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.9.tgz", + "integrity": "sha512-iND4mcOWhPaCNh54WmK/KoSb35AFqPAUWFMffTQcp52uQt36b5uNwEJTSXntJZBbeGad72Crbi/hvDIv6us/6Q==", "requires": { "@balena/dockerignore": "^1.0.2", "@grpc/grpc-js": "^1.11.1", "@grpc/proto-loader": "^0.7.13", "docker-modem": "^5.0.6", "protobufjs": "^7.3.2", - "tar-fs": "~2.1.2", + "tar-fs": "^2.1.4", "uuid": "^10.0.0" } }, - "eastasianwidth": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", - "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==" - }, "emoji-regex": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" }, "end-of-stream": { - "version": "1.4.4", - "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", - "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", + "version": "1.4.5", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.5.tgz", + "integrity": "sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==", "requires": { "once": "^1.4.0" } @@ -1503,24 +1043,15 @@ "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==" }, - "foreground-child": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.1.1.tgz", - "integrity": "sha512-TMKDUnIte6bfb5nWv7V/caI169OHgvwjb7V4WkeUvbQQdjr5rWKqHFiKWb/fcOwB+CzBT+qbWjvj+DVwRskpIg==", - "requires": { - "cross-spawn": "^7.0.0", - "signal-exit": "^4.0.1" - } - }, "fs-constants": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz", "integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==" }, "fs-extra": { - "version": "11.3.0", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.3.0.tgz", - "integrity": "sha512-Z4XaCL6dUDHfP/jT25jJKMmtxvuwbkrD1vNSMFlo9lNLY2c5FHYSQgHPRZUjAB26TpDEoW9HCOgplrdbaPV/ew==", + "version": "11.3.4", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.3.4.tgz", + "integrity": "sha512-CTXd6rk/M3/ULNQj8FBqBWHYBVYybQ3VPBw0xGKFe3tuH7ytT6ACnvzpIQ3UZtB8yvUKC2cXn1a+x+5EVQLovA==", "requires": { "graceful-fs": "^4.2.0", "jsonfile": "^6.0.1", @@ -1532,18 +1063,6 @@ "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==" }, - "glob": { - "version": "10.3.12", - "resolved": "https://registry.npmjs.org/glob/-/glob-10.3.12.tgz", - "integrity": "sha512-TCNv8vJ+xz4QiqTpfOJA7HvYv+tNIRHKfUWw/q+v2jdgN4ebz+KY9tGx5J4rHP0o84mNP+ApH66HRX8us3Khqg==", - "requires": { - "foreground-child": "^3.1.0", - "jackspeak": "^2.3.6", - "minimatch": "^9.0.1", - "minipass": "^7.0.4", - "path-scurry": "^1.10.2" - } - }, "graceful-fs": { "version": "4.2.10", "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz", @@ -1564,20 +1083,6 @@ "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==" }, - "isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" - }, - "jackspeak": { - "version": "2.3.6", - "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-2.3.6.tgz", - "integrity": "sha512-N3yCS/NegsOBokc8GAdM8UcmfsKiSS8cipheD/nivzr700H+nsMOxJjQnvwOcRYVuFkdH0wGUvW2WbXGmrZGbQ==", - "requires": { - "@isaacs/cliui": "^8.0.2", - "@pkgjs/parseargs": "^0.11.0" - } - }, "jsonfile": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", @@ -1597,38 +1102,19 @@ "resolved": "https://registry.npmjs.org/long/-/long-5.2.4.tgz", "integrity": "sha512-qtzLbJE8hq7VabR3mISmVGtoXP8KGc2Z/AT8OuqlYD7JTR3oqrgwdjnk07wpj1twXxYmgDXgoKVWUG/fReSzHg==" }, - "lru-cache": { - "version": "10.2.2", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.2.2.tgz", - "integrity": "sha512-9hp3Vp2/hFQUiIwKo8XCeFVnrg8Pk3TYNPIR7tJADKi5YfcF7vEaK7avFHTlSy3kOKYaJQaalfEo6YuXdceBOQ==" - }, - "minimatch": { - "version": "9.0.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.4.tgz", - "integrity": "sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw==", - "requires": { - "brace-expansion": "^2.0.1" - } - }, "minipass": { "version": "7.1.2", "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==" }, "minizlib": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.0.1.tgz", - "integrity": "sha512-umcy022ILvb5/3Djuu8LWeqUa8D68JaBzlttKeMWen48SjabqS3iY5w/vzeMzMUNhLDifyhbOwKDSznB1vvrwg==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.1.0.tgz", + "integrity": "sha512-KZxYo1BUkWD2TVFLr0MQoM8vUUigWD3LlD83a/75BqC+4qE0Hb1Vo5v1FgcfaNXvfXzr+5EhQ6ing/CaBijTlw==", "requires": { - "minipass": "^7.0.4", - "rimraf": "^5.0.5" + "minipass": "^7.1.2" } }, - "mkdirp": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz", - "integrity": "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==" - }, "mkdirp-classic": { "version": "0.5.3", "resolved": "https://registry.npmjs.org/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz", @@ -1646,9 +1132,9 @@ "optional": true }, "nanoid": { - "version": "5.1.5", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.1.5.tgz", - "integrity": "sha512-Ir/+ZpE9fDsNH0hQ3C68uyThDXzYcim2EqcZ8zn8Chtt1iylPT9xXJB0kPCnqzgcEGikO9RxSrh63MsmVCU7Fw==" + "version": "5.1.7", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.1.7.tgz", + "integrity": "sha512-ua3NDgISf6jdwezAheMOk4mbE1LXjm1DfMUDMuJf4AqxLFK3ccGpgWizwa5YV7Yz9EpXwEaWoRXSb/BnV0t5dQ==" }, "nanoid-dictionary": { "version": "5.0.0", @@ -1663,20 +1149,6 @@ "wrappy": "1" } }, - "path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==" - }, - "path-scurry": { - "version": "1.10.2", - "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.10.2.tgz", - "integrity": "sha512-7xTavNy5RQXnsjANvVvMkEjvloOinkAjv/Z6Ildz9v2RinZ4SBKTWFOVRbaF8p0vpHnyjV/UwNDdKuUv6M5qcA==", - "requires": { - "lru-cache": "^10.2.0", - "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" - } - }, "protobufjs": { "version": "7.4.0", "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.4.0.tgz", @@ -1697,9 +1169,9 @@ } }, "pump": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.2.tgz", - "integrity": "sha512-tUPXtzlGM8FE3P0ZL6DVs/3P58k9nk8/jZeQCurTJylQA8qFYzHFfhBJkuqyE0FifOsQ0uKWekiZ5g8wtr28cw==", + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.4.tgz", + "integrity": "sha512-VS7sjc6KR7e1ukRFhQSY5LM2uBWAUPiOPa/A3mkKmiMwSmRFUITt0xuj+/lesgnCv+dPIEYlkzrcyXgquIHMcA==", "requires": { "end-of-stream": "^1.1.0", "once": "^1.3.1" @@ -1720,14 +1192,6 @@ "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==" }, - "rimraf": { - "version": "5.0.5", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-5.0.5.tgz", - "integrity": "sha512-CqDakW+hMe/Bz202FPEymy68P+G50RfMQK+Qo5YUqc9SPipvbGjCGKd0RSKEelbsfQuw3g5NZDSrlZZAJurH1A==", - "requires": { - "glob": "^10.3.7" - } - }, "safe-buffer": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", @@ -1738,28 +1202,10 @@ "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" }, - "shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "requires": { - "shebang-regex": "^3.0.0" - } - }, - "shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==" - }, - "signal-exit": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", - "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==" - }, "slugify": { - "version": "1.6.6", - "resolved": "https://registry.npmjs.org/slugify/-/slugify-1.6.6.tgz", - "integrity": "sha512-h+z7HKHYXj6wJU+AnS/+IH8Uh9fdcX1Lrhg1/VMdf9PwoBQXFcXiAdsy2tSK0P6gKwJLXp02r90ahUCqHk9rrw==" + "version": "1.6.8", + "resolved": "https://registry.npmjs.org/slugify/-/slugify-1.6.8.tgz", + "integrity": "sha512-HVk9X1E0gz3mSpoi60h/saazLKXKaZThMLU3u/aNwoYn8/xQyX2MGxL0ui2eaokkD7tF+Zo+cKTHUbe1mmmGzA==" }, "split-ca": { "version": "1.0.1", @@ -1795,16 +1241,6 @@ "strip-ansi": "^6.0.1" } }, - "string-width-cjs": { - "version": "npm:string-width@4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "requires": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - } - }, "strip-ansi": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", @@ -1813,24 +1249,15 @@ "ansi-regex": "^5.0.1" } }, - "strip-ansi-cjs": { - "version": "npm:strip-ansi@6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "requires": { - "ansi-regex": "^5.0.1" - } - }, "tar": { - "version": "7.4.3", - "resolved": "https://registry.npmjs.org/tar/-/tar-7.4.3.tgz", - "integrity": "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw==", + "version": "7.5.11", + "resolved": "https://registry.npmjs.org/tar/-/tar-7.5.11.tgz", + "integrity": "sha512-ChjMH33/KetonMTAtpYdgUFr0tbz69Fp2v7zWxQfYZX4g5ZN2nOBXm1R2xyA+lMIKrLKIoKAwFj93jE/avX9cQ==", "requires": { "@isaacs/fs-minipass": "^4.0.0", "chownr": "^3.0.0", "minipass": "^7.1.2", - "minizlib": "^3.0.1", - "mkdirp": "^3.0.1", + "minizlib": "^3.1.0", "yallist": "^5.0.0" }, "dependencies": { @@ -1842,9 +1269,9 @@ } }, "tar-fs": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.2.tgz", - "integrity": "sha512-EsaAXwxmx8UB7FRKqeozqEPop69DXcmYwTQwXvyAPF352HJsPdkVhvTaDPYqfNgruveJIJy3TA2l+2zj8LJIJA==", + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.4.tgz", + "integrity": "sha512-mDAjwmZdh7LTT6pNleZ05Yt65HC3E+NiQzl672vQG38jIrehtJk/J3mNwIg+vShQPcLF/LV7CMnDW6vjj6sfYQ==", "requires": { "chownr": "^1.1.1", "mkdirp-classic": "^0.5.2", @@ -1889,14 +1316,6 @@ "resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz", "integrity": "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==" }, - "which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "requires": { - "isexe": "^2.0.0" - } - }, "wrap-ansi": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", @@ -1907,16 +1326,6 @@ "strip-ansi": "^6.0.0" } }, - "wrap-ansi-cjs": { - "version": "npm:wrap-ansi@7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "requires": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - } - }, "wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", diff --git a/dev/deploy-to-container/package.json b/dev/deploy-to-container/package.json index 09716c3094..1c95a4540c 100644 --- a/dev/deploy-to-container/package.json +++ b/dev/deploy-to-container/package.json @@ -2,12 +2,12 @@ "name": "deploy-to-container", "type": "module", "dependencies": { - "dockerode": "^4.0.6", - "fs-extra": "^11.3.0", - "nanoid": "5.1.5", + "dockerode": "^4.0.9", + "fs-extra": "^11.3.4", + "nanoid": "5.1.7", "nanoid-dictionary": "5.0.0", - "slugify": "1.6.6", - "tar": "^7.4.3", + "slugify": "1.6.8", + "tar": "^7.5.11", "yargs": "^17.7.2" }, "engines": { From dcce2df0300879078690a2fbd3522602d467cf38 Mon Sep 17 00:00:00 2001 From: Lars Eggert Date: Fri, 20 Mar 2026 12:03:18 +0900 Subject: [PATCH 035/102] feat: add attendance summary and pie chart to meeting attendees page (#10481) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: add attendance summary and pie chart to meeting attendees page For IETF meetings ≥ 118, the attendees proceedings page now shows an Onsite / Remote / Total summary row matching the counts displayed on registration.ietf.org, together with a "View chart" button that opens a Bootstrap modal containing a Highcharts pie chart. * Split out attendees-chart.js --- ietf/meeting/tests_views.py | 13 +++++ ietf/meeting/views.py | 33 +++++++++-- ietf/static/js/attendees-chart.js | 58 +++++++++++++++++++ .../meeting/proceedings_attendees.html | 53 ++++++++++++++++- package.json | 1 + 5 files changed, 151 insertions(+), 7 deletions(-) create mode 100644 ietf/static/js/attendees-chart.js diff --git a/ietf/meeting/tests_views.py b/ietf/meeting/tests_views.py index 168999d0aa..258ffe554c 100644 --- a/ietf/meeting/tests_views.py +++ b/ietf/meeting/tests_views.py @@ -9007,6 +9007,8 @@ def test_proceedings_attendees(self): - assert onsite checkedin=True appears, not onsite checkedin=False - assert remote attended appears, not remote not attended - prefer onsite checkedin=True to remote attended when same person has both + - summary stats row shows correct counts + - chart data JSON is embedded with correct values """ m = MeetingFactory(type_id='ietf', date=datetime.date(2023, 11, 4), number="118") @@ -9028,6 +9030,17 @@ def test_proceedings_attendees(self): text = q('#id_attendees tbody tr').text().replace('\n', ' ') self.assertEqual(text, f"A Person {areg.affiliation} {areg.country_code} onsite C Person {creg.affiliation} {creg.country_code} remote") + # Summary stats row: Onsite / Remote / Total (matches registration.ietf.org) + self.assertContains(response, 'Onsite:') + self.assertContains(response, 'Remote:') + self.assertContains(response, 'Total:') + self.assertContains(response, '1') # onsite and remote + self.assertContains(response, '2') # total + + # Chart data embedded in page + chart_json = json.loads(q('#attendees-chart-data').text()) + self.assertEqual(chart_json['type'], [['Onsite', 1], ['Remote', 1]]) + def test_proceedings_overview(self): '''Test proceedings IETF Overview page. Note: old meetings aren't supported so need to add a new meeting then test. diff --git a/ietf/meeting/views.py b/ietf/meeting/views.py index 731dfad88f..67a81305b4 100644 --- a/ietf/meeting/views.py +++ b/ietf/meeting/views.py @@ -109,7 +109,7 @@ from ietf.meeting.utils import get_activity_stats, post_process, create_recording, delete_recording from ietf.meeting.utils import participants_for_meeting, generate_bluesheet, bluesheet_data, save_bluesheet from ietf.message.utils import infer_message -from ietf.name.models import SlideSubmissionStatusName, ProceedingsMaterialTypeName, SessionPurposeName +from ietf.name.models import SlideSubmissionStatusName, ProceedingsMaterialTypeName, SessionPurposeName, CountryName from ietf.utils import markdown from ietf.utils.decorators import require_api_key from ietf.utils.hedgedoc import Note, NoteError @@ -4812,15 +4812,36 @@ def proceedings_attendees(request, num=None): template = None registrations = None + stats = None + chart_data = None + if int(meeting.number) >= 118: checked_in, attended = participants_for_meeting(meeting) regs = list(Registration.objects.onsite().filter(meeting__number=num, checkedin=True)) - - for reg in Registration.objects.remote().filter(meeting__number=num).select_related('person'): - if reg.person.pk in attended and reg.person.pk not in checked_in: - regs.append(reg) + onsite_count = len(regs) + regs += [ + reg + for reg in Registration.objects.remote().filter(meeting__number=num).select_related('person') + if reg.person.pk in attended and reg.person.pk not in checked_in + ] + remote_count = len(regs) - onsite_count registrations = sorted(regs, key=lambda x: (x.last_name, x.first_name)) + + country_codes = [r.country_code for r in registrations if r.country_code] + stats = { + 'total': onsite_count + remote_count, + 'onsite': onsite_count, + 'remote': remote_count, + } + + code_to_name = dict(CountryName.objects.values_list('slug', 'name')) + country_counts = Counter(code_to_name.get(c, c) for c in country_codes).most_common() + + chart_data = { + 'type': [['Onsite', onsite_count], ['Remote', remote_count]], + 'countries': country_counts, + } else: overview_template = "/meeting/proceedings/%s/attendees.html" % meeting.number try: @@ -4832,6 +4853,8 @@ def proceedings_attendees(request, num=None): 'meeting': meeting, 'registrations': registrations, 'template': template, + 'stats': stats, + 'chart_data': chart_data, }) def proceedings_overview(request, num=None): diff --git a/ietf/static/js/attendees-chart.js b/ietf/static/js/attendees-chart.js new file mode 100644 index 0000000000..fed3b1289c --- /dev/null +++ b/ietf/static/js/attendees-chart.js @@ -0,0 +1,58 @@ +(function () { + var raw = document.getElementById('attendees-chart-data'); + if (!raw) return; + var chartData = JSON.parse(raw.textContent); + var chart = null; + var currentBreakdown = 'type'; + + // Override the global transparent background set by highcharts.js so the + // export menu and fullscreen view use the page background color. + var container = document.getElementById('attendees-pie-chart'); + var bodyBg = getComputedStyle(document.body).backgroundColor; + container.style.setProperty('--highcharts-background-color', bodyBg); + + function renderChart(breakdown) { + var seriesData = chartData[breakdown].map(function (item) { + return { name: item[0], y: item[1] }; + }); + if (chart) chart.destroy(); + chart = Highcharts.chart(container, { + chart: { type: 'pie', height: 400 }, + title: { text: null }, + tooltip: { pointFormat: '{point.name}: {point.y} ({point.percentage:.1f}%)' }, + plotOptions: { + pie: { + dataLabels: { + enabled: true, + format: '{point.name}
{point.y} ({point.percentage:.1f}%)', + }, + showInLegend: false, + } + }, + series: [{ name: 'Attendees', data: seriesData }], + }); + } + + var modal = document.getElementById('attendees-chart-modal'); + + // Render (or re-render) the chart each time the modal becomes fully visible, + // so Highcharts can measure the container dimensions correctly. + modal.addEventListener('shown.bs.modal', function () { + renderChart(currentBreakdown); + }); + + // Release the chart when the modal closes to avoid stale renders. + modal.addEventListener('hidden.bs.modal', function () { + if (chart) { + chart.destroy(); + chart = null; + } + }); + + document.querySelectorAll('[name="attendees-breakdown"]').forEach(function (radio) { + radio.addEventListener('change', function () { + currentBreakdown = this.value; + renderChart(currentBreakdown); + }); + }); +})(); diff --git a/ietf/templates/meeting/proceedings_attendees.html b/ietf/templates/meeting/proceedings_attendees.html index 390ce00cad..0c59d4ab15 100644 --- a/ietf/templates/meeting/proceedings_attendees.html +++ b/ietf/templates/meeting/proceedings_attendees.html @@ -3,6 +3,7 @@ {% load origin markup_tags static %} {% block pagehead %} + {% if chart_data %}{% endif %} {% endblock %} {% block title %}IETF {{ meeting.number }} proceedings{% endblock %} {% block content %} @@ -14,8 +15,52 @@

Attendee list of IETF {{ meeting.number }} meeting

- + + {% if chart_data %} +
+
+
Onsite: {{ stats.onsite }}
+
Remote: {{ stats.remote }}
+
Total: {{ stats.total }}
+
+ +
+ + + + {{ chart_data|json_script:"attendees-chart-data" }} + {% endif %}{# chart_data #} + {% if template %} + {{template|safe}} {% else %} @@ -44,4 +89,8 @@

Attendee list of IETF {{ meeting.number }} meeting

{% endblock %} {% block js %} -{% endblock %} \ No newline at end of file + {% if chart_data %} + + + {% endif %} +{% endblock %} diff --git a/package.json b/package.json index fec29275b4..bb71250c4b 100644 --- a/package.json +++ b/package.json @@ -112,6 +112,7 @@ "ietf/static/images/irtf-logo-white.svg", "ietf/static/images/irtf-logo.svg", "ietf/static/js/add_session_recordings.js", + "ietf/static/js/attendees-chart.js", "ietf/static/js/agenda_filter.js", "ietf/static/js/agenda_materials.js", "ietf/static/js/announcement.js", From 2c29cbaad91a7c076643167e1ffc056975b2c97e Mon Sep 17 00:00:00 2001 From: Tianyi Gao Date: Fri, 20 Mar 2026 11:45:09 +0800 Subject: [PATCH 036/102] feat: add parent section in team about (#9148) (#10551) fix: remove empty for area/parent on all groups --- ietf/group/tests_info.py | 19 +++++++++++++++++++ ietf/templates/group/group_about.html | 11 +++++++---- 2 files changed, 26 insertions(+), 4 deletions(-) diff --git a/ietf/group/tests_info.py b/ietf/group/tests_info.py index 34f8500854..3f24e2e3d6 100644 --- a/ietf/group/tests_info.py +++ b/ietf/group/tests_info.py @@ -543,6 +543,25 @@ def verify_can_edit_group(url, group, username): for username in list(set(interesting_users)-set(can_edit[group.type_id])): verify_cannot_edit_group(url, group, username) + def test_group_about_team_parent(self): + """Team about page should show parent when parent is not an area""" + GroupFactory(type_id='team', parent=GroupFactory(type_id='area', acronym='gen')) + GroupFactory(type_id='team', parent=GroupFactory(type_id='ietf', acronym='iab')) + GroupFactory(type_id='team', parent=None) + + for team in Group.objects.filter(type='team').select_related('parent'): + url = urlreverse('ietf.group.views.group_about', kwargs=dict(acronym=team.acronym)) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + if team.parent and team.parent.type_id != 'area': + self.assertContains(r, 'Parent') + self.assertContains(r, team.parent.acronym) + elif team.parent and team.parent.type_id == 'area': + self.assertContains(r, team.parent.name) + self.assertNotContains(r, '>Parent<') + else: + self.assertNotContains(r, '>Parent<') + def test_group_about_personnel(self): """Correct personnel should appear on the group About page""" group = GroupFactory() diff --git a/ietf/templates/group/group_about.html b/ietf/templates/group/group_about.html index cbc2e11536..6d1843383c 100644 --- a/ietf/templates/group/group_about.html +++ b/ietf/templates/group/group_about.html @@ -51,10 +51,13 @@ {{ group.parent.name }} ({{ group.parent.acronym }}) - {% else %} - + {% elif group.parent and group.type_id == "team" %} + - + {% endif %} @@ -444,4 +447,4 @@

group_stats("{% url 'ietf.group.views.group_stats_data' %}", ".chart"); }); -{% endblock %} \ No newline at end of file +{% endblock %} From abab6373f5f465bdcc052f45c5def0710f360dc7 Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Thu, 19 Mar 2026 23:02:40 -0500 Subject: [PATCH 037/102] fix: reduce db churn and log noise for rebuilding references (#10563) * fix: (wip) reduce db churn and log noise for rebuilding references * fix: typo in log message * fix: typo in log message Co-authored-by: Jennifer Richards --------- Co-authored-by: Jennifer Richards --- ietf/doc/utils.py | 78 ++++++++++++++++++++++++++++++-------------- ietf/submit/utils.py | 5 +-- 2 files changed, 54 insertions(+), 29 deletions(-) diff --git a/ietf/doc/utils.py b/ietf/doc/utils.py index 396b3fcfa4..8cbe5e8f3e 100644 --- a/ietf/doc/utils.py +++ b/ietf/doc/utils.py @@ -4,6 +4,7 @@ import datetime import io +import json import math import os import re @@ -954,58 +955,78 @@ def rebuild_reference_relations(doc, filenames): filenames should be a dict mapping file ext (i.e., type) to the full path of each file. """ if doc.type.slug not in ["draft", "rfc"]: + log.log(f"rebuild_reference_relations called for non draft/rfc doc {doc.name}") return None - - log.log(f"Rebuilding reference relations for {doc.name}") - # try XML first - if "xml" in filenames: - refs = XMLDraft(filenames["xml"]).get_refs() - elif "txt" in filenames: - filename = filenames["txt"] - try: - refs = draft.PlaintextDraft.from_file(filename).get_refs() - except IOError as e: - return {"errors": [f"{e.strerror}: {filename}"]} - else: + + if "xml" not in filenames and "txt" not in filenames: + log.log(f"rebuild_reference_relations error: no file available for {doc.name}") return { "errors": [ "No file available for rebuilding reference relations. Need XML or plaintext." ] } - - doc.relateddocument_set.filter( + else: + try: + # try XML first + if "xml" in filenames: + refs = XMLDraft(filenames["xml"]).get_refs() + elif "txt" in filenames: + filename = filenames["txt"] + refs = draft.PlaintextDraft.from_file(filename).get_refs() + except (IOError, UnicodeDecodeError) as e: + log.log(f"rebuild_reference_relations error: On {doc.name}: {e}") + return {"errors": [f"{e}: {filename}"]} + + before = set(doc.relateddocument_set.filter( relationship__slug__in=["refnorm", "refinfo", "refold", "refunk"] - ).delete() + ).values_list("relationship__slug","target__name")) warnings = [] errors = [] unfound = set() + intended = set() + names = [ref for ref in refs] + names.extend([ref[:-3] for ref in refs if re.match(r"^draft-.*-\d{2}$", ref)]) + queryset = Document.objects.filter(name__in=names) for ref, refType in refs.items(): - refdoc = Document.objects.filter(name=ref) - if not refdoc and re.match(r"^draft-.*-\d{2}$", ref): - refdoc = Document.objects.filter(name=ref[:-3]) + refdoc = queryset.filter(name=ref) + if not refdoc.exists() and re.match(r"^draft-.*-\d{2}$", ref): + refdoc = queryset.filter(name=ref[:-3]) count = refdoc.count() if count == 0: unfound.add("%s" % ref) continue elif count > 1: + log.unreachable("2026-3-16") # This branch is holdover from DocAlias errors.append("Too many Document objects found for %s" % ref) else: # Don't add references to ourself if doc != refdoc[0]: - RelatedDocument.objects.get_or_create( - source=doc, - target=refdoc[0], - relationship=DocRelationshipName.objects.get( - slug="ref%s" % refType - ), - ) + intended.add((f"ref{refType}", refdoc[0].name)) + if unfound: warnings.append( "There were %d references with no matching Document" % len(unfound) ) + if intended != before: + for slug, name in before-intended: + doc.relateddocument_set.filter(target__name=name, relationship_id=slug).delete() + for slug, name in intended-before: + doc.relateddocument_set.create( + target=queryset.get(name=name), + relationship_id=slug + ) + after = set(doc.relateddocument_set.filter( + relationship__slug__in=["refnorm", "refinfo", "refold", "refunk"] + ).values_list("relationship__slug","target__name")) + if after != intended: + errors.append("Attempted changed didn't achieve intended results") + changed_references = True + else: + changed_references = False + ret = {} if errors: ret["errors"] = errors @@ -1014,6 +1035,13 @@ def rebuild_reference_relations(doc, filenames): if unfound: ret["unfound"] = list(unfound) + logmsg = f"rebuild_reference_relations for {doc.name}: " + logmsg += "changed references" if changed_references else "references unchanged" + if ret: + logmsg += f" {json.dumps(ret)}" + + log.log(logmsg) + return ret def set_replaces_for_document(request, doc, new_replaces, by, email_subject, comment=""): diff --git a/ietf/submit/utils.py b/ietf/submit/utils.py index 9a7c358a6d..7e3106f723 100644 --- a/ietf/submit/utils.py +++ b/ietf/submit/utils.py @@ -395,10 +395,7 @@ def post_submission(request, submission, approved_doc_desc, approved_subm_desc): log.log(f"{submission.name}: updated state and info") - trouble = rebuild_reference_relations(draft, find_submission_filenames(draft)) - if trouble: - log.log('Rebuild_reference_relations trouble: %s'%trouble) - log.log(f"{submission.name}: rebuilt reference relations") + rebuild_reference_relations(draft, find_submission_filenames(draft)) if draft.stream_id == "ietf" and draft.group.type_id == "wg" and draft.rev == "00": # automatically set state "WG Document" From b08945aaf4618613f668bb5c231533f709bea4d4 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Fri, 20 Mar 2026 05:17:10 -0300 Subject: [PATCH 038/102] fix: maintain column count in HTML template (#10593) --- ietf/templates/group/group_about.html | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/ietf/templates/group/group_about.html b/ietf/templates/group/group_about.html index 6d1843383c..0a8b9194f2 100644 --- a/ietf/templates/group/group_about.html +++ b/ietf/templates/group/group_about.html @@ -58,6 +58,10 @@ {{ group.parent.name }} ({{ group.parent.acronym }}) + {% else %} +

+ + {% endif %} From d39317b070a7af5db4f48edaf0e7f03fd0a29680 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Mon, 23 Mar 2026 12:33:23 -0300 Subject: [PATCH 039/102] feat: update typesense search index on rfc pub/update (#10575) * chore: typesense API config for k8s * feat: DocumentInfo.pub_datetime() helper * chore(deps): install typesense library * feat: searchindex (typesense) util module * feat: sanitize abstract * feat: add (sanitized) content * style: ruff ruff on doc/tasks.py * feat: search index update task * chore: call the update task * refactor: better settings management * ci: update prod settings * chore: typing * test: searchindex tests * test: searchindex task test * style: ruff ruff * chore: drop type hints to fix mypy errors * test: fix tests * test: improve coverage * fix: handle missing content blob correctly --- ietf/api/serializers_rpc.py | 5 +- ietf/api/tests_serializers_rpc.py | 18 +++- ietf/api/tests_views_rpc.py | 44 ++++++--- ietf/api/views_rpc.py | 3 +- ietf/doc/models.py | 17 ++-- ietf/doc/tasks.py | 46 +++++++-- ietf/doc/tests_tasks.py | 64 ++++++++++-- ietf/utils/searchindex.py | 155 ++++++++++++++++++++++++++++++ ietf/utils/tests_searchindex.py | 128 ++++++++++++++++++++++++ k8s/settings_local.py | 20 ++-- requirements.txt | 1 + 11 files changed, 451 insertions(+), 50 deletions(-) create mode 100644 ietf/utils/searchindex.py create mode 100644 ietf/utils/tests_searchindex.py diff --git a/ietf/api/serializers_rpc.py b/ietf/api/serializers_rpc.py index a18dc588c4..701f05eece 100644 --- a/ietf/api/serializers_rpc.py +++ b/ietf/api/serializers_rpc.py @@ -1,4 +1,4 @@ -# Copyright The IETF Trust 2025, All Rights Reserved +# Copyright The IETF Trust 2025-2026, All Rights Reserved import datetime from pathlib import Path from typing import Literal, Optional @@ -20,6 +20,7 @@ RfcAuthor, ) from ietf.doc.serializers import RfcAuthorSerializer +from ietf.doc.tasks import update_rfc_searchindex_task from ietf.doc.utils import ( default_consensus, prettify_std_name, @@ -682,6 +683,8 @@ def update(self, instance, validated_data): stale_subseries_relations.delete() if len(rfc_events) > 0: rfc.save_with_history(rfc_events) + + update_rfc_searchindex_task.delay(rfc.rfc_number) return rfc diff --git a/ietf/api/tests_serializers_rpc.py b/ietf/api/tests_serializers_rpc.py index 1babb4c30f..ed326be451 100644 --- a/ietf/api/tests_serializers_rpc.py +++ b/ietf/api/tests_serializers_rpc.py @@ -1,4 +1,6 @@ # Copyright The IETF Trust 2026, All Rights Reserved +from unittest import mock + from django.utils import timezone from ietf.utils.test_utils import TestCase @@ -32,7 +34,8 @@ def test_create(self): with self.assertRaises(RuntimeError, msg="serializer does not allow create()"): serializer.save() - def test_update(self): + @mock.patch("ietf.api.serializers_rpc.update_rfc_searchindex_task") + def test_update(self, mock_update_searchindex_task): rfc = WgRfcFactory(pages=10) serializer = EditableRfcSerializer( instance=rfc, @@ -56,6 +59,11 @@ def test_update(self): ) self.assertTrue(serializer.is_valid()) result = serializer.save() + self.assertTrue(mock_update_searchindex_task.delay.called) + self.assertEqual( + mock_update_searchindex_task.delay.call_args, + mock.call(rfc.rfc_number), + ) result.refresh_from_db() self.assertEqual(result.title, "Yadda yadda yadda") self.assertEqual( @@ -84,7 +92,8 @@ def test_update(self): [Document.objects.get(name="fyi999")], ) - def test_partial_update(self): + @mock.patch("ietf.api.serializers_rpc.update_rfc_searchindex_task") + def test_partial_update(self, mock_update_searchindex_task): # We could test other permutations of fields, but authors is a partial update # we know we are going to use, so verifying that one in particular. rfc = WgRfcFactory(pages=10, abstract="do or do not", title="padawan") @@ -104,6 +113,11 @@ def test_partial_update(self): ) self.assertTrue(serializer.is_valid()) result = serializer.save() + self.assertTrue(mock_update_searchindex_task.delay.called) + self.assertEqual( + mock_update_searchindex_task.delay.call_args, + mock.call(rfc.rfc_number), + ) result.refresh_from_db() self.assertEqual(rfc.title, "padawan") self.assertEqual( diff --git a/ietf/api/tests_views_rpc.py b/ietf/api/tests_views_rpc.py index 7ab8778d28..a679e74789 100644 --- a/ietf/api/tests_views_rpc.py +++ b/ietf/api/tests_views_rpc.py @@ -196,7 +196,8 @@ def test_notify_rfc_published(self, mock_task_delay): self.assertEqual(mock_kwargs["rfc_number_list"], expected_rfc_number_list) @override_settings(APP_API_TOKENS={"ietf.api.views_rpc": ["valid-token"]}) - def test_upload_rfc_files(self): + @mock.patch("ietf.api.views_rpc.update_rfc_searchindex_task") + def test_upload_rfc_files(self, mock_update_searchindex_task): def _valid_post_data(): """Generate a valid post data dict @@ -217,14 +218,7 @@ def _valid_post_data(): } url = urlreverse("ietf.api.purple_api.upload_rfc_files") - unused_rfc_number = ( - Document.objects.filter(rfc_number__isnull=False).aggregate( - unused_rfc_number=Max("rfc_number") + 1 - )["unused_rfc_number"] - or 10000 - ) - - rfc = WgRfcFactory(rfc_number=unused_rfc_number) + rfc = WgRfcFactory() assert isinstance(rfc, Document), "WgRfcFactory should generate a Document" with TemporaryDirectory() as rfc_dir: settings.RFC_PATH = rfc_dir # affects overridden settings @@ -236,15 +230,17 @@ def _valid_post_data(): # no api key r = self.client.post(url, _valid_post_data(), format="multipart") self.assertEqual(r.status_code, 403) + self.assertFalse(mock_update_searchindex_task.delay.called) # invalid RFC r = self.client.post( url, - _valid_post_data() | {"rfc": unused_rfc_number + 1}, + _valid_post_data() | {"rfc": rfc.rfc_number + 10}, format="multipart", headers={"X-Api-Key": "valid-token"}, ) self.assertEqual(r.status_code, 400) + self.assertFalse(mock_update_searchindex_task.delay.called) # empty files r = self.client.post( @@ -263,6 +259,7 @@ def _valid_post_data(): headers={"X-Api-Key": "valid-token"}, ) self.assertEqual(r.status_code, 400) + self.assertFalse(mock_update_searchindex_task.delay.called) # bad file type r = self.client.post( @@ -276,9 +273,10 @@ def _valid_post_data(): headers={"X-Api-Key": "valid-token"}, ) self.assertEqual(r.status_code, 400) + self.assertFalse(mock_update_searchindex_task.delay.called) # Put a file in the way. Post should fail because replace = False - file_in_the_way = (rfc_path / f"rfc{unused_rfc_number}.txt") + file_in_the_way = (rfc_path / f"{rfc.name}.txt") file_in_the_way.touch() r = self.client.post( url, @@ -287,11 +285,12 @@ def _valid_post_data(): headers={"X-Api-Key": "valid-token"}, ) self.assertEqual(r.status_code, 409) # conflict + self.assertFalse(mock_update_searchindex_task.delay.called) file_in_the_way.unlink() # Put a blob in the way. Post should fail because replace = False blob_in_the_way = Blob.objects.create( - bucket="rfc", name=f"txt/rfc{unused_rfc_number}.txt", content=b"" + bucket="rfc", name=f"txt/{rfc.name}.txt", content=b"" ) r = self.client.post( url, @@ -300,6 +299,7 @@ def _valid_post_data(): headers={"X-Api-Key": "valid-token"}, ) self.assertEqual(r.status_code, 409) # conflict + self.assertFalse(mock_update_searchindex_task.delay.called) blob_in_the_way.delete() # valid post @@ -310,8 +310,13 @@ def _valid_post_data(): headers={"X-Api-Key": "valid-token"}, ) self.assertEqual(r.status_code, 200) + self.assertTrue(mock_update_searchindex_task.delay.called) + self.assertEqual( + mock_update_searchindex_task.delay.call_args, + mock.call(rfc.rfc_number), + ) for extension in ["xml", "txt", "html", "pdf", "json"]: - filename = f"rfc{unused_rfc_number}.{extension}" + filename = f"{rfc.name}.{extension}" self.assertEqual( (rfc_path / filename) .read_text(), @@ -328,7 +333,7 @@ def _valid_post_data(): f"{extension} blob should contain the expected content", ) # special case for notprepped - notprepped_fn = f"rfc{unused_rfc_number}.notprepped.xml" + notprepped_fn = f"{rfc.name}.notprepped.xml" self.assertEqual( ( rfc_path / "prerelease" / notprepped_fn @@ -347,6 +352,7 @@ def _valid_post_data(): ) # re-post with replace = False should now fail + mock_update_searchindex_task.reset_mock() r = self.client.post( url, _valid_post_data(), @@ -354,7 +360,8 @@ def _valid_post_data(): headers={"X-Api-Key": "valid-token"}, ) self.assertEqual(r.status_code, 409) # conflict - + self.assertFalse(mock_update_searchindex_task.delay.called) + # re-post with replace = True should succeed r = self.client.post( url, @@ -362,7 +369,12 @@ def _valid_post_data(): format="multipart", headers={"X-Api-Key": "valid-token"}, ) - self.assertEqual(r.status_code, 200) # conflict + self.assertEqual(r.status_code, 200) + self.assertTrue(mock_update_searchindex_task.delay.called) + self.assertEqual( + mock_update_searchindex_task.delay.call_args, + mock.call(rfc.rfc_number), + ) @override_settings(APP_API_TOKENS={"ietf.api.views_rpc": ["valid-token"]}) @mock.patch("ietf.api.views_rpc.create_rfc_index_task") diff --git a/ietf/api/views_rpc.py b/ietf/api/views_rpc.py index c7ae699005..cb6a59a167 100644 --- a/ietf/api/views_rpc.py +++ b/ietf/api/views_rpc.py @@ -38,7 +38,7 @@ from ietf.doc.models import Document, DocHistory, RfcAuthor, DocEvent from ietf.doc.serializers import RfcAuthorSerializer from ietf.doc.storage_utils import remove_from_storage, store_file, exists_in_storage -from ietf.doc.tasks import signal_update_rfc_metadata_task +from ietf.doc.tasks import signal_update_rfc_metadata_task, update_rfc_searchindex_task from ietf.person.models import Email, Person from ietf.sync.tasks import create_rfc_index_task @@ -516,6 +516,7 @@ def post(self, request): destination.parent.mkdir() shutil.move(ftm, destination) + update_rfc_searchindex_task.delay(rfc.rfc_number) return Response(NotificationAckSerializer().data) diff --git a/ietf/doc/models.py b/ietf/doc/models.py index 7b23a62c45..972f0a34e8 100644 --- a/ietf/doc/models.py +++ b/ietf/doc/models.py @@ -1285,11 +1285,8 @@ def submission(self): s = s.first() return s - def pub_date(self): - """Get the publication date for this document - - This is the rfc publication date for RFCs, and the new-revision date for other documents. - """ + def pub_datetime(self): + """Get the publication datetime of this document""" if self.type_id == "rfc": # As of Sept 2022, in ietf.sync.rfceditor.update_docs_from_rfc_index() `published_rfc` events are # created with a timestamp whose date *in the PST8PDT timezone* is the official publication date @@ -1297,7 +1294,15 @@ def pub_date(self): event = self.latest_event(type='published_rfc') else: event = self.latest_event(type='new_revision') - return event.time.astimezone(RPC_TZINFO).date() if event else None + return event.time.astimezone(RPC_TZINFO) if event else None + + def pub_date(self): + """Get the publication date for this document + + This is the rfc publication date for RFCs, and the new-revision date for other documents. + """ + pub_datetime = self.pub_datetime() + return None if pub_datetime is None else pub_datetime.date() def is_dochistory(self): return False diff --git a/ietf/doc/tasks.py b/ietf/doc/tasks.py index 90f4c80af5..a38cd5eb5c 100644 --- a/ietf/doc/tasks.py +++ b/ietf/doc/tasks.py @@ -3,6 +3,7 @@ # Celery task definitions # import datetime + import debug # pyflakes:ignore from celery import shared_task @@ -11,7 +12,7 @@ from django.conf import settings from django.utils import timezone -from ietf.utils import log +from ietf.utils import log, searchindex from ietf.utils.timezone import datetime_today from .expire import ( @@ -77,17 +78,19 @@ def expire_last_calls_task(): try: expire_last_call(doc) except Exception: - log.log(f"ERROR: Failed to expire last call for {doc.file_tag()} (id={doc.pk})") + log.log( + f"ERROR: Failed to expire last call for {doc.file_tag()} (id={doc.pk})" + ) else: log.log(f"Expired last call for {doc.file_tag()} (id={doc.pk})") -@shared_task +@shared_task def generate_idnits2_rfc_status_task(): outpath = Path(settings.DERIVED_DIR) / "idnits2-rfc-status" blob = generate_idnits2_rfc_status() try: - outpath.write_text(blob, encoding="utf8") # TODO-BLOBSTORE + outpath.write_text(blob, encoding="utf8") # TODO-BLOBSTORE except Exception as e: log.log(f"failed to write idnits2-rfc-status: {e}") @@ -97,7 +100,7 @@ def generate_idnits2_rfcs_obsoleted_task(): outpath = Path(settings.DERIVED_DIR) / "idnits2-rfcs-obsoleted" blob = generate_idnits2_rfcs_obsoleted() try: - outpath.write_text(blob, encoding="utf8") # TODO-BLOBSTORE + outpath.write_text(blob, encoding="utf8") # TODO-BLOBSTORE except Exception as e: log.log(f"failed to write idnits2-rfcs-obsoleted: {e}") @@ -105,7 +108,7 @@ def generate_idnits2_rfcs_obsoleted_task(): @shared_task def generate_draft_bibxml_files_task(days=7, process_all=False): """Generate bibxml files for recently updated docs - + If process_all is False (the default), processes only docs with new revisions in the last specified number of days. """ @@ -117,7 +120,9 @@ def generate_draft_bibxml_files_task(days=7, process_all=False): doc__type_id="draft", ).order_by("time") if not process_all: - doc_events = doc_events.filter(time__gte=timezone.now() - datetime.timedelta(days=days)) + doc_events = doc_events.filter( + time__gte=timezone.now() - datetime.timedelta(days=days) + ) for event in doc_events: try: update_or_create_draft_bibxml_file(event.doc, event.rev) @@ -132,6 +137,7 @@ def investigate_fragment_task(name_fragment: str): "results": investigate_fragment(name_fragment), } + @shared_task def rebuild_reference_relations_task(doc_names: list[str]): log.log(f"Task: Rebuilding reference relations for {doc_names}") @@ -157,6 +163,32 @@ def rebuild_reference_relations_task(doc_names: list[str]): def fixup_bofreq_timestamps_task(): # pragma: nocover fixup_bofreq_timestamps() + @shared_task def signal_update_rfc_metadata_task(rfc_number_list=()): signal_update_rfc_metadata(rfc_number_list) + + +@shared_task(bind=True) +def update_rfc_searchindex_task(self, rfc_number: int): + """Update the search index for one RFC""" + if not searchindex.enabled(): + log.log("Search indexing is not enabled, skipping") + return + + rfc = Document.objects.filter(type_id="rfc", rfc_number=rfc_number).first() + if rfc is None: + log.log( + f"ERROR: Document for rfc{rfc_number} not found, not updating search index" + ) + return + try: + searchindex.update_or_create_rfc_entry(rfc) + except Exception as err: + log.log(f"Search index update for {rfc.name} failed ({err})") + if isinstance(err, searchindex.RETRYABLE_ERROR_CLASSES): + searchindex_settings = searchindex.get_settings() + self.retry( + countdown=searchindex_settings["TASK_RETRY_DELAY"], + max_retries=searchindex_settings["TASK_MAX_RETRIES"], + ) diff --git a/ietf/doc/tests_tasks.py b/ietf/doc/tests_tasks.py index 29689cd596..728d21f131 100644 --- a/ietf/doc/tests_tasks.py +++ b/ietf/doc/tests_tasks.py @@ -1,18 +1,20 @@ -# Copyright The IETF Trust 2024, All Rights Reserved +# Copyright The IETF Trust 2024-2026, All Rights Reserved -import debug # pyflakes:ignore import datetime from unittest import mock from pathlib import Path +from celery.exceptions import Retry from django.conf import settings +from django.test.utils import override_settings from django.utils import timezone +from typesense import exceptions as typesense_exceptions from ietf.utils.test_utils import TestCase from ietf.utils.timezone import datetime_today -from .factories import DocumentFactory, NewRevisionDocEventFactory +from .factories import DocumentFactory, NewRevisionDocEventFactory, WgRfcFactory from .models import Document, NewRevisionDocEvent from .tasks import ( expire_ids_task, @@ -22,8 +24,10 @@ generate_idnits2_rfc_status_task, investigate_fragment_task, notify_expirations_task, + update_rfc_searchindex_task, ) + class TaskTests(TestCase): @mock.patch("ietf.doc.tasks.in_draft_expire_freeze") @mock.patch("ietf.doc.tasks.get_expired_drafts") @@ -87,7 +91,7 @@ def test_expire_last_calls_task(self, mock_get_expired, mock_expire): self.assertEqual(mock_expire.call_args_list[0], mock.call(docs[0])) self.assertEqual(mock_expire.call_args_list[1], mock.call(docs[1])) self.assertEqual(mock_expire.call_args_list[2], mock.call(docs[2])) - + # Check that it runs even if exceptions occur mock_get_expired.reset_mock() mock_expire.reset_mock() @@ -111,9 +115,40 @@ def test_investigate_fragment_task(self): retval, {"name_fragment": "some fragment", "results": investigation_results} ) + @mock.patch("ietf.doc.tasks.searchindex.update_or_create_rfc_entry") + @mock.patch("ietf.doc.tasks.searchindex.enabled") + def test_update_rfc_searchindex_task( + self, mock_searchindex_enabled, mock_create_entry + ): + mock_searchindex_enabled.return_value = False + + self.assertFalse(Document.objects.filter(rfc_number=5073).exists()) + rfc = WgRfcFactory() + update_rfc_searchindex_task(rfc_number=5073) + self.assertFalse(mock_create_entry.called) + update_rfc_searchindex_task(rfc_number=rfc.rfc_number) + self.assertFalse(mock_create_entry.called) + + mock_searchindex_enabled.return_value = True + update_rfc_searchindex_task(rfc_number=5073) + self.assertFalse(mock_create_entry.called) + update_rfc_searchindex_task(rfc_number=rfc.rfc_number) + self.assertTrue(mock_create_entry.called) + + with override_settings(SEARCHINDEX_CONFIG={"TASK_MAX_RETRIES": 0}): + # Try a non-retryable error (there are others) + mock_create_entry.side_effect = typesense_exceptions.RequestMalformed + update_rfc_searchindex_task(rfc_number=rfc.rfc_number) # no retry + # Now what should be a retryable error + mock_create_entry.side_effect = typesense_exceptions.Timeout + with self.assertRaises(Retry): + update_rfc_searchindex_task(rfc_number=rfc.rfc_number) + class Idnits2SupportTests(TestCase): - settings_temp_path_overrides = TestCase.settings_temp_path_overrides + ['DERIVED_DIR'] + settings_temp_path_overrides = TestCase.settings_temp_path_overrides + [ + "DERIVED_DIR" + ] @mock.patch("ietf.doc.tasks.generate_idnits2_rfcs_obsoleted") def test_generate_idnits2_rfcs_obsoleted_task(self, mock_generate): @@ -151,7 +186,9 @@ def setUp(self): ) # a couple that should always be ignored NewRevisionDocEventFactory( - time=now - datetime.timedelta(days=6), rev="09", doc__type_id="rfc" # not a draft + time=now - datetime.timedelta(days=6), + rev="09", + doc__type_id="rfc", # not a draft ) NewRevisionDocEventFactory( type="changed_document", # not a "new_revision" type @@ -164,7 +201,9 @@ def setUp(self): @mock.patch("ietf.doc.tasks.ensure_draft_bibxml_path_exists") @mock.patch("ietf.doc.tasks.update_or_create_draft_bibxml_file") - def test_generate_bibxml_files_for_all_drafts_task(self, mock_create, mock_ensure_path): + def test_generate_bibxml_files_for_all_drafts_task( + self, mock_create, mock_ensure_path + ): generate_draft_bibxml_files_task(process_all=True) self.assertTrue(mock_ensure_path.called) self.assertCountEqual( @@ -193,12 +232,15 @@ def test_generate_bibxml_files_for_all_drafts_task(self, mock_create, mock_ensur @mock.patch("ietf.doc.tasks.ensure_draft_bibxml_path_exists") @mock.patch("ietf.doc.tasks.update_or_create_draft_bibxml_file") - def test_generate_bibxml_files_for_recent_drafts_task(self, mock_create, mock_ensure_path): + def test_generate_bibxml_files_for_recent_drafts_task( + self, mock_create, mock_ensure_path + ): # default args - look back 7 days generate_draft_bibxml_files_task() self.assertTrue(mock_ensure_path.called) self.assertCountEqual( - mock_create.call_args_list, [mock.call(self.young_event.doc, self.young_event.rev)] + mock_create.call_args_list, + [mock.call(self.young_event.doc, self.young_event.rev)], ) mock_create.reset_mock() mock_ensure_path.reset_mock() @@ -223,7 +265,9 @@ def test_generate_bibxml_files_for_recent_drafts_task(self, mock_create, mock_en @mock.patch("ietf.doc.tasks.ensure_draft_bibxml_path_exists") @mock.patch("ietf.doc.tasks.update_or_create_draft_bibxml_file") - def test_generate_bibxml_files_for_recent_drafts_task_with_bad_value(self, mock_create, mock_ensure_path): + def test_generate_bibxml_files_for_recent_drafts_task_with_bad_value( + self, mock_create, mock_ensure_path + ): with self.assertRaises(ValueError): generate_draft_bibxml_files_task(days=0) self.assertFalse(mock_create.called) diff --git a/ietf/utils/searchindex.py b/ietf/utils/searchindex.py new file mode 100644 index 0000000000..e4427b88b5 --- /dev/null +++ b/ietf/utils/searchindex.py @@ -0,0 +1,155 @@ +# Copyright The IETF Trust 2026, All Rights Reserved +"""Search indexing utilities""" + +import re +from math import floor + +import httpx # just for exceptions +import typesense +import typesense.exceptions +from django.conf import settings + +from ietf.doc.models import Document, StoredObject +from ietf.doc.storage_utils import retrieve_str +from ietf.utils.log import log + +# Error classes that might succeed just by retrying a failed attempt. +# Must be a tuple for use with isinstance() +RETRYABLE_ERROR_CLASSES = ( + httpx.ConnectError, + httpx.ConnectTimeout, + typesense.exceptions.Timeout, + typesense.exceptions.ServerError, + typesense.exceptions.ServiceUnavailable, +) + + +DEFAULT_SETTINGS = { + "TYPESENSE_API_URL": "", + "TYPESENSE_API_KEY": "", + "TYPESENSE_COLLECTION_NAME": "docs", + "TASK_RETRY_DELAY": 10, + "TASK_MAX_RETRIES": 12, +} + + +def get_settings(): + return DEFAULT_SETTINGS | getattr(settings, "SEARCHINDEX_CONFIG", {}) + + +def enabled(): + _settings = get_settings() + return _settings["TYPESENSE_API_URL"] != "" + + +def _sanitize_text(content): + """Sanitize content or abstract text for search""" + # REs (with approximate names) + RE_DOT_OR_BANG_SPACE = r"\. |! " # -> " " (space) + RE_COMMENT_OR_TOC_CRUD = r"<--|-->|--+|\+|\.\.+" # -> "" + RE_BRACKETED_REF = r"\[[a-zA-Z0-9 -]+\]" # -> "" + RE_DOTTED_NUMBERS = r"[0-9]+\.[0-9]+(\.[0-9]+)?" # -> "" + RE_MULTIPLE_WHITESPACE = r"\s+" # -> " " (space) + # Replacement values (for clarity of intent) + SPACE = " " + EMPTY = "" + # Sanitizing begins here, order is significant! + content = re.sub(RE_DOT_OR_BANG_SPACE, SPACE, content.strip()) + content = re.sub(RE_COMMENT_OR_TOC_CRUD, EMPTY, content) + content = re.sub(RE_BRACKETED_REF, EMPTY, content) + content = re.sub(RE_DOTTED_NUMBERS, EMPTY, content) + content = re.sub(RE_MULTIPLE_WHITESPACE, SPACE, content) + return content.strip() + + +def update_or_create_rfc_entry(rfc: Document): + assert rfc.type_id == "rfc" + assert rfc.rfc_number is not None + + keywords: list[str] = rfc.keywords # help type checking + + subseries = rfc.part_of() + if len(subseries) > 1: + log( + f"RFC {rfc.rfc_number} is in multiple subseries. " + f"Indexing as {subseries[0].name}" + ) + subseries = subseries[0] if len(subseries) > 0 else None + obsoleted_by = rfc.relations_that("obs") + updated_by = rfc.relations_that("updates") + + stored_txt = ( + StoredObject.objects.exclude_deleted() + .filter(store="rfc", doc_name=rfc.name, name__startswith="txt/") + .first() + ) + content = "" + if stored_txt is not None: + # Should be available in the blobdb, but be cautious... + try: + content = retrieve_str(kind=stored_txt.store, name=stored_txt.name) + except Exception as err: + log(f"Unable to retrieve {stored_txt} from storage: {err}") + + ts_id = f"doc-{rfc.pk}" + ts_document = { + "rfcNumber": rfc.rfc_number, + "rfc": str(rfc.rfc_number), + "filename": rfc.name, + "title": rfc.title, + "abstract": _sanitize_text(rfc.abstract), + "keywords": keywords, + "type": "rfc", + "state": [state.name for state in rfc.states.all()], + "status": {"slug": rfc.std_level.slug, "name": rfc.std_level.name}, + "date": floor(rfc.time.timestamp()), + "publicationDate": floor(rfc.pub_datetime().timestamp()), + "stream": {"slug": rfc.stream.slug, "name": rfc.stream.name}, + "authors": [ + {"name": rfc_author.titlepage_name, "affiliation": rfc_author.affiliation} + for rfc_author in rfc.rfcauthor_set.all() + ], + "flags": { + "hiddenDefault": False, + "obsoleted": len(obsoleted_by) > 0, + "updated": len(updated_by) > 0, + }, + "obsoletedBy": [str(doc.rfc_number) for doc in obsoleted_by], + "updatedBy": [str(doc.rfc_number) for doc in updated_by], + "ranking": rfc.rfc_number, + } + if subseries is not None: + ts_document["subseries"] = { + "acronym": subseries.type.slug, + "number": int(subseries.name[len(subseries.type.slug) :]), + "total": len(subseries.contains()), + } + if rfc.group is not None: + ts_document["group"] = { + "acronym": rfc.group.acronym, + "name": rfc.group.name, + "full": f"{rfc.group.acronym} - {rfc.group.name}", + } + if ( + rfc.group.parent is not None + and rfc.stream_id not in ["ise", "irtf", "iab"] # exclude editorial? + ): + ts_document["area"] = { + "acronym": rfc.group.parent.acronym, + "name": rfc.group.parent.name, + "full": f"{rfc.group.parent.acronym} - {rfc.group.parent.name}", + } + if rfc.ad is not None: + ts_document["adName"] = rfc.ad.name + if content != "": + ts_document["content"] = _sanitize_text(content) + _settings = get_settings() + client = typesense.Client( + { + "api_key": _settings["TYPESENSE_API_KEY"], + "nodes": [_settings["TYPESENSE_API_URL"]], + } + ) + client.collections[_settings["TYPESENSE_COLLECTION_NAME"]].documents.upsert( + {"id": ts_id} | ts_document + ) diff --git a/ietf/utils/tests_searchindex.py b/ietf/utils/tests_searchindex.py new file mode 100644 index 0000000000..8740716c85 --- /dev/null +++ b/ietf/utils/tests_searchindex.py @@ -0,0 +1,128 @@ +# Copyright The IETF Trust 2026, All Rights Reserved +from unittest import mock + +from django.conf import settings +from django.test.utils import override_settings + +from . import searchindex +from .test_utils import TestCase +from ..blobdb.models import Blob +from ..doc.factories import ( + WgDraftFactory, + WgRfcFactory, + PublishedRfcDocEventFactory, + BcpFactory, + StdFactory, +) +from ..doc.models import Document +from ..doc.storage_utils import store_str +from ..person.factories import PersonFactory + + +class SearchindexTests(TestCase): + def test_enabled(self): + with override_settings(): + try: + del settings.SEARCHINDEX_CONFIG + except AttributeError: + pass + self.assertFalse(searchindex.enabled()) + with override_settings( + SEARCHINDEX_CONFIG={"TYPESENSE_API_KEY": "this-is-not-a-key"} + ): + self.assertFalse(searchindex.enabled()) + with override_settings( + SEARCHINDEX_CONFIG={"TYPESENSE_API_URL": "http://example.com"} + ): + self.assertTrue(searchindex.enabled()) + + def test_sanitize_text(self): + dirty_text = """ + + This is text. It + is <---- full of \tprobl.....ems! Fix it. + """ + sanitized = "This is text It is full of problems Fix it." + self.assertEqual(searchindex._sanitize_text(dirty_text), sanitized) + + @override_settings( + SEARCHINDEX_CONFIG={ + "TYPESENSE_API_URL": "http://ts.example.com", + "TYPESENSE_API_KEY": "test-api-key", + "TYPESENSE_COLLECTION_NAME": "frogs", + } + ) + @mock.patch("ietf.utils.searchindex.typesense.Client") + def test_update_or_create_rfc_entry(self, mock_ts_client_constructor): + not_rfc = WgDraftFactory() + assert isinstance(not_rfc, Document) + with self.assertRaises(AssertionError): + searchindex.update_or_create_rfc_entry(not_rfc) + self.assertFalse(mock_ts_client_constructor.called) + + invalid_rfc = WgRfcFactory(name="rfc1000000", rfc_number=None) + assert isinstance(invalid_rfc, Document) + with self.assertRaises(AssertionError): + searchindex.update_or_create_rfc_entry(invalid_rfc) + self.assertFalse(mock_ts_client_constructor.called) + + rfc = PublishedRfcDocEventFactory().doc + assert isinstance(rfc, Document) + searchindex.update_or_create_rfc_entry(rfc) + self.assertTrue(mock_ts_client_constructor.called) + # walk the tree down to the method we expected to be called... + mock_upsert = mock_ts_client_constructor.return_value.collections[ + "frogs" + ].documents.upsert # matches value in override_settings above + self.assertTrue(mock_upsert.called) + upserted_dict = mock_upsert.call_args[0][0] + # Check a few values, not exhaustive + self.assertEqual(upserted_dict["id"], f"doc-{rfc.pk}") + self.assertEqual(upserted_dict["rfcNumber"], rfc.rfc_number) + self.assertEqual( + upserted_dict["abstract"], searchindex._sanitize_text(rfc.abstract) + ) + self.assertNotIn("adName", upserted_dict) + self.assertNotIn("content", upserted_dict) # no blob + self.assertNotIn("subseries", upserted_dict) + + # repeat, this time with contents, an AD, and subseries docs + mock_upsert.reset_mock() + store_str( + kind="rfc", + name=f"txt/{rfc.name}.txt", + content="The contents of this RFC", + doc_name=rfc.name, + doc_rev=rfc.rev, # expected to be None + ) + rfc.ad = PersonFactory(name="Alfred D. Rector") + # Put it in two Subseries docs to be sure this does not break things + # (the typesense schema does not support this for real at the moment) + BcpFactory(contains=[rfc], name="bcp1234") + StdFactory(contains=[rfc], name="std1234") + searchindex.update_or_create_rfc_entry(rfc) + self.assertTrue(mock_upsert.called) + upserted_dict = mock_upsert.call_args[0][0] + # Check a few values, not exhaustive + self.assertEqual( + upserted_dict["content"], + searchindex._sanitize_text("The contents of this RFC"), + ) + self.assertEqual(upserted_dict["adName"], "Alfred D. Rector") + self.assertIn("subseries", upserted_dict) + ss_dict = upserted_dict["subseries"] + # We should get one of the two subseries docs, but neither is more correct + # than the other... + self.assertTrue( + any( + ss_dict == {"acronym": ss_type, "number": 1234, "total": 1} + for ss_type in ["bcp", "std"] + ) + ) + + # Finally, delete the contents blob and make sure things don't blow up + mock_upsert.reset_mock() + Blob.objects.get(bucket="rfc", name=f"txt/{rfc.name}.txt").delete() + searchindex.update_or_create_rfc_entry(rfc) + self.assertTrue(mock_upsert.called) + upserted_dict = mock_upsert.call_args[0][0] + self.assertNotIn("content", upserted_dict) diff --git a/k8s/settings_local.py b/k8s/settings_local.py index 56e395c5ac..8c0c66cdf2 100644 --- a/k8s/settings_local.py +++ b/k8s/settings_local.py @@ -1,4 +1,4 @@ -# Copyright The IETF Trust 2007-2024, All Rights Reserved +# Copyright The IETF Trust 2007-2026, All Rights Reserved # -*- coding: utf-8 -*- from base64 import b64decode @@ -443,12 +443,8 @@ def _multiline_to_list(s): ), } RFCINDEX_DELETE_THEN_WRITE = False # S3Storage allows file_overwrite by default -RFCINDEX_OUTPUT_PATH = os.environ.get( - "DATATRACKER_RFCINDEX_OUTPUT_PATH", "other/" -) -RFCINDEX_INPUT_PATH = os.environ.get( - "DATATRACKER_RFCINDEX_INPUT_PATH", "" -) +RFCINDEX_OUTPUT_PATH = os.environ.get("DATATRACKER_RFCINDEX_OUTPUT_PATH", "other/") +RFCINDEX_INPUT_PATH = os.environ.get("DATATRACKER_RFCINDEX_INPUT_PATH", "") # Configure the blobdb app for artifact storage _blobdb_replication_enabled = ( @@ -471,3 +467,13 @@ def _multiline_to_list(s): PASSWORD_POLICY_ENFORCE_AT_LOGIN = ( os.environ.get("DATATRACKER_ENFORCE_PW_POLICY", "true").lower() != "false" ) + +# Typesense search indexing +SEARCHINDEX_CONFIG = { + "TYPESENSE_API_URL": os.environ.get("DATATRACKER_TYPESENSE_API_URL", ""), + "TYPESENSE_API_KEY": os.environ.get("DATATRACKER_TYPESENSE_API_KEY", ""), + "TASK_RETRY_DELAY": os.environ.get("DATATRACKER_SEARCHINDEX_TASK_RETRY_DELAY", 10), + "TASK_MAX_RETRIES": os.environ.get( + "DATATRACKER_SEARCHINDEX_TASK_MAX_RETRIES", "12" + ), +} diff --git a/requirements.txt b/requirements.txt index 3d54b104ee..2b8185dab9 100644 --- a/requirements.txt +++ b/requirements.txt @@ -75,6 +75,7 @@ pymemcache>=4.0.0 # for django.core.cache.backends.memcached.PyMemcacheCache python-mimeparse>=2.0.0 # from TastyPie pytz==2025.2 # Pinned as changes need to be vetted for their effect on Meeting fields types-pytz==2025.2.0.20251108 # match pytz version +typesense>=2.0.0 requests>=2.32.4 types-requests>=2.32.4 requests-mock>=1.12.1 From e6a3b3ebc03ef539454cfa154ad0242b32c6a335 Mon Sep 17 00:00:00 2001 From: jennifer-richards <19472766+jennifer-richards@users.noreply.github.com> Date: Mon, 23 Mar 2026 15:46:07 +0000 Subject: [PATCH 040/102] ci: update base image target version to 20260323T1533 --- dev/build/Dockerfile | 2 +- dev/build/TARGET_BASE | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/dev/build/Dockerfile b/dev/build/Dockerfile index ce1828052e..af43e990e0 100644 --- a/dev/build/Dockerfile +++ b/dev/build/Dockerfile @@ -1,4 +1,4 @@ -FROM ghcr.io/ietf-tools/datatracker-app-base:20260304T1633 +FROM ghcr.io/ietf-tools/datatracker-app-base:20260323T1533 LABEL maintainer="IETF Tools Team " ENV DEBIAN_FRONTEND=noninteractive diff --git a/dev/build/TARGET_BASE b/dev/build/TARGET_BASE index 6be54fb6b0..09f74cce28 100644 --- a/dev/build/TARGET_BASE +++ b/dev/build/TARGET_BASE @@ -1 +1 @@ -20260304T1633 +20260323T1533 From 33f0dbf9e969a233f46251909515b249330fbb79 Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Mon, 23 Mar 2026 12:39:15 -0500 Subject: [PATCH 041/102] feat: trigger red recomputation on RFC publication or metadata update (#10567) * feat: trigger red recomputation on RFC publication or metadata update * fix: move red precomputer call out of transaction * chore: remove old comment, simplify request call * fix: isolate delayed task in test * test: give settings_test an InMemoryStorage for r2-rfc * fix: follow obs/updates both ways when notifying red of changes * fix: improve red utils, test red and r2 utils * chore: ruff * chore: remove unused import * test: fix patch paths --------- Co-authored-by: Jennifer Richards --- ietf/api/serializers_rpc.py | 15 +++- ietf/api/tests_serializers_rpc.py | 88 ++++++++++++++++--- ietf/api/tests_views_rpc.py | 24 ++++- ietf/api/views_rpc.py | 12 ++- ietf/doc/tasks.py | 17 ++++ ietf/doc/tests_utils.py | 140 +++++++++++++++++++++++++++++- ietf/doc/utils_r2.py | 17 ++++ ietf/doc/utils_red.py | 31 +++++++ ietf/settings_test.py | 6 +- k8s/settings_local.py | 10 +++ 10 files changed, 341 insertions(+), 19 deletions(-) create mode 100644 ietf/doc/utils_r2.py create mode 100644 ietf/doc/utils_red.py diff --git a/ietf/api/serializers_rpc.py b/ietf/api/serializers_rpc.py index 701f05eece..397ca05d9b 100644 --- a/ietf/api/serializers_rpc.py +++ b/ietf/api/serializers_rpc.py @@ -20,7 +20,7 @@ RfcAuthor, ) from ietf.doc.serializers import RfcAuthorSerializer -from ietf.doc.tasks import update_rfc_searchindex_task +from ietf.doc.tasks import trigger_red_precomputer_task, update_rfc_searchindex_task from ietf.doc.utils import ( default_consensus, prettify_std_name, @@ -683,7 +683,18 @@ def update(self, instance, validated_data): stale_subseries_relations.delete() if len(rfc_events) > 0: rfc.save_with_history(rfc_events) - + # Gather obs and updates in both directions as a title/author change to + # this doc affects the info rendering of all of the other RFCs + needs_updating = sorted( + [ + d.rfc_number + for d in [rfc] + + rfc.related_that_doc(("obs", "updates")) + + rfc.related_that(("obs", "updates")) + ] + ) + trigger_red_precomputer_task.delay(rfc_number_list=needs_updating) + # Update the search index also update_rfc_searchindex_task.delay(rfc.rfc_number) return rfc diff --git a/ietf/api/tests_serializers_rpc.py b/ietf/api/tests_serializers_rpc.py index ed326be451..167ffcd3ee 100644 --- a/ietf/api/tests_serializers_rpc.py +++ b/ietf/api/tests_serializers_rpc.py @@ -1,4 +1,5 @@ # Copyright The IETF Trust 2026, All Rights Reserved + from unittest import mock from django.utils import timezone @@ -35,8 +36,21 @@ def test_create(self): serializer.save() @mock.patch("ietf.api.serializers_rpc.update_rfc_searchindex_task") - def test_update(self, mock_update_searchindex_task): + @mock.patch("ietf.api.serializers_rpc.trigger_red_precomputer_task") + def test_update(self, mock_trigger_red_task, mock_update_searchindex_task): + updates = WgRfcFactory.create_batch(2) + obsoletes = WgRfcFactory.create_batch(2) rfc = WgRfcFactory(pages=10) + updated_by = WgRfcFactory.create_batch(2) + obsoleted_by = WgRfcFactory.create_batch(2) + for d in updates: + rfc.relateddocument_set.create(relationship_id="updates",target=d) + for d in obsoletes: + rfc.relateddocument_set.create(relationship_id="updates",target=d) + for d in updated_by: + d.relateddocument_set.create(relationship_id="updates",target=rfc) + for d in obsoleted_by: + d.relateddocument_set.create(relationship_id="updates",target=rfc) serializer = EditableRfcSerializer( instance=rfc, data={ @@ -59,11 +73,6 @@ def test_update(self, mock_update_searchindex_task): ) self.assertTrue(serializer.is_valid()) result = serializer.save() - self.assertTrue(mock_update_searchindex_task.delay.called) - self.assertEqual( - mock_update_searchindex_task.delay.call_args, - mock.call(rfc.rfc_number), - ) result.refresh_from_db() self.assertEqual(result.title, "Yadda yadda yadda") self.assertEqual( @@ -91,12 +100,42 @@ def test_update(self, mock_update_searchindex_task): result.part_of(), [Document.objects.get(name="fyi999")], ) + # Confirm that red precomputer was triggered correctly + self.assertTrue(mock_trigger_red_task.delay.called) + _, mock_kwargs = mock_trigger_red_task.delay.call_args + self.assertIn("rfc_number_list", mock_kwargs) + expected_numbers = sorted( + [ + d.rfc_number + for d in [rfc] + updates + obsoletes + updated_by + obsoleted_by + ] + ) + self.assertEqual(mock_kwargs["rfc_number_list"], expected_numbers) + # Confirm that the search index update task was triggered correctly + self.assertTrue(mock_update_searchindex_task.delay.called) + self.assertEqual( + mock_update_searchindex_task.delay.call_args, + mock.call(rfc.rfc_number), + ) @mock.patch("ietf.api.serializers_rpc.update_rfc_searchindex_task") - def test_partial_update(self, mock_update_searchindex_task): + @mock.patch("ietf.api.serializers_rpc.trigger_red_precomputer_task") + def test_partial_update(self, mock_trigger_red_task, mock_update_searchindex_task): # We could test other permutations of fields, but authors is a partial update # we know we are going to use, so verifying that one in particular. + updates = WgRfcFactory.create_batch(2) + obsoletes = WgRfcFactory.create_batch(2) rfc = WgRfcFactory(pages=10, abstract="do or do not", title="padawan") + updated_by = WgRfcFactory.create_batch(2) + obsoleted_by = WgRfcFactory.create_batch(2) + for d in updates: + rfc.relateddocument_set.create(relationship_id="updates",target=d) + for d in obsoletes: + rfc.relateddocument_set.create(relationship_id="updates",target=d) + for d in updated_by: + d.relateddocument_set.create(relationship_id="updates",target=rfc) + for d in obsoleted_by: + d.relateddocument_set.create(relationship_id="updates",target=rfc) serializer = EditableRfcSerializer( partial=True, instance=rfc, @@ -113,11 +152,6 @@ def test_partial_update(self, mock_update_searchindex_task): ) self.assertTrue(serializer.is_valid()) result = serializer.save() - self.assertTrue(mock_update_searchindex_task.delay.called) - self.assertEqual( - mock_update_searchindex_task.delay.call_args, - mock.call(rfc.rfc_number), - ) result.refresh_from_db() self.assertEqual(rfc.title, "padawan") self.assertEqual( @@ -140,8 +174,27 @@ def test_partial_update(self, mock_update_searchindex_task): self.assertEqual(result.pages, 10) self.assertEqual(result.std_level_id, "ps") self.assertEqual(result.part_of(), []) + # Confirm that the red precomputer was triggered correctly + self.assertTrue(mock_trigger_red_task.delay.called) + _, mock_kwargs = mock_trigger_red_task.delay.call_args + self.assertIn("rfc_number_list", mock_kwargs) + expected_numbers = sorted( + [ + d.rfc_number + for d in [rfc] + updates + obsoletes + updated_by + obsoleted_by + ] + ) + self.assertEqual(mock_kwargs["rfc_number_list"], expected_numbers) + # Confirm that the search index update task was called correctly + self.assertTrue(mock_update_searchindex_task.delay.called) + self.assertEqual( + mock_update_searchindex_task.delay.call_args, + mock.call(rfc.rfc_number), + ) # Test only a field on the Document itself to be sure that it works + mock_trigger_red_task.delay.reset_mock() + mock_update_searchindex_task.delay.reset_mock() serializer = EditableRfcSerializer( partial=True, instance=rfc, @@ -151,3 +204,14 @@ def test_partial_update(self, mock_update_searchindex_task): result = serializer.save() result.refresh_from_db() self.assertEqual(rfc.title, "jedi master") + # Confirm that the red precomputer was triggered correctly + self.assertTrue(mock_trigger_red_task.delay.called) + _, mock_kwargs = mock_trigger_red_task.delay.call_args + self.assertIn("rfc_number_list", mock_kwargs) + self.assertEqual(mock_kwargs["rfc_number_list"], expected_numbers) + # Confirm that the search index update task was called correctly + self.assertTrue(mock_update_searchindex_task.delay.called) + self.assertEqual( + mock_update_searchindex_task.delay.call_args, + mock.call(rfc.rfc_number), + ) diff --git a/ietf/api/tests_views_rpc.py b/ietf/api/tests_views_rpc.py index a679e74789..6d10bee8e8 100644 --- a/ietf/api/tests_views_rpc.py +++ b/ietf/api/tests_views_rpc.py @@ -197,7 +197,8 @@ def test_notify_rfc_published(self, mock_task_delay): @override_settings(APP_API_TOKENS={"ietf.api.views_rpc": ["valid-token"]}) @mock.patch("ietf.api.views_rpc.update_rfc_searchindex_task") - def test_upload_rfc_files(self, mock_update_searchindex_task): + @mock.patch("ietf.api.views_rpc.trigger_red_precomputer_task") + def test_upload_rfc_files(self, mock_trigger_red_task, mock_update_searchindex_task): def _valid_post_data(): """Generate a valid post data dict @@ -218,7 +219,14 @@ def _valid_post_data(): } url = urlreverse("ietf.api.purple_api.upload_rfc_files") + updates = RfcFactory.create_batch(2) + obsoletes = RfcFactory.create_batch(2) + rfc = WgRfcFactory() + for r in obsoletes: + rfc.relateddocument_set.create(relationship_id="obs", target=r) + for r in updates: + rfc.relateddocument_set.create(relationship_id="updates", target=r) assert isinstance(rfc, Document), "WgRfcFactory should generate a Document" with TemporaryDirectory() as rfc_dir: settings.RFC_PATH = rfc_dir # affects overridden settings @@ -303,6 +311,7 @@ def _valid_post_data(): blob_in_the_way.delete() # valid post + mock_trigger_red_task.delay.reset_mock() r = self.client.post( url, _valid_post_data(), @@ -310,7 +319,6 @@ def _valid_post_data(): headers={"X-Api-Key": "valid-token"}, ) self.assertEqual(r.status_code, 200) - self.assertTrue(mock_update_searchindex_task.delay.called) self.assertEqual( mock_update_searchindex_task.delay.call_args, mock.call(rfc.rfc_number), @@ -350,6 +358,18 @@ def _valid_post_data(): b"This is .notprepped.xml", ".notprepped.xml blob should contain the expected content", ) + # Confirm that the red precomputer was triggered correctly + self.assertTrue(mock_trigger_red_task.delay.called) + _, mock_kwargs = mock_trigger_red_task.delay.call_args + self.assertIn("rfc_number_list", mock_kwargs) + expected_rfc_number_list = [rfc.rfc_number] + expected_rfc_number_list.extend( + [d.rfc_number for d in updates + obsoletes] + ) + expected_rfc_number_list = sorted(set(expected_rfc_number_list)) + self.assertEqual(mock_kwargs["rfc_number_list"], expected_rfc_number_list) + # Confirm that the search index update task was called correctly + self.assertTrue(mock_update_searchindex_task.delay.called) # re-post with replace = False should now fail mock_update_searchindex_task.reset_mock() diff --git a/ietf/api/views_rpc.py b/ietf/api/views_rpc.py index cb6a59a167..59eed1e10e 100644 --- a/ietf/api/views_rpc.py +++ b/ietf/api/views_rpc.py @@ -38,7 +38,11 @@ from ietf.doc.models import Document, DocHistory, RfcAuthor, DocEvent from ietf.doc.serializers import RfcAuthorSerializer from ietf.doc.storage_utils import remove_from_storage, store_file, exists_in_storage -from ietf.doc.tasks import signal_update_rfc_metadata_task, update_rfc_searchindex_task +from ietf.doc.tasks import ( + signal_update_rfc_metadata_task, + trigger_red_precomputer_task, + update_rfc_searchindex_task, +) from ietf.person.models import Email, Person from ietf.sync.tasks import create_rfc_index_task @@ -516,6 +520,12 @@ def post(self, request): destination.parent.mkdir() shutil.move(ftm, destination) + # Trigger red precomputer + needs_updating = [rfc.rfc_number] + for rel in rfc.relateddocument_set.filter(relationship_id__in=["obs","updates"]): + needs_updating.append(rel.target.rfc_number) + trigger_red_precomputer_task.delay(rfc_number_list=sorted(needs_updating)) + # Trigger search index update update_rfc_searchindex_task.delay(rfc.rfc_number) return Response(NotificationAckSerializer().data) diff --git a/ietf/doc/tasks.py b/ietf/doc/tasks.py index a38cd5eb5c..19edb39014 100644 --- a/ietf/doc/tasks.py +++ b/ietf/doc/tasks.py @@ -7,11 +7,14 @@ import debug # pyflakes:ignore from celery import shared_task +from celery.exceptions import MaxRetriesExceededError from pathlib import Path from django.conf import settings from django.utils import timezone +from ietf.doc.utils_r2 import rfcs_are_in_r2 +from ietf.doc.utils_red import trigger_red_precomputer from ietf.utils import log, searchindex from ietf.utils.timezone import datetime_today @@ -169,6 +172,20 @@ def signal_update_rfc_metadata_task(rfc_number_list=()): signal_update_rfc_metadata(rfc_number_list) +@shared_task(bind=True) +def trigger_red_precomputer_task(self, rfc_number_list=()): + if not rfcs_are_in_r2(rfc_number_list): + log.log(f"Objects are not yet in R2 for RFCs {rfc_number_list}") + try: + countdown = getattr(settings, "RED_PRECOMPUTER_TRIGGER_RETRY_DELAY", 10) + max_retries = getattr(settings, "RED_PRECOMPUTER_TRIGGER_MAX_RETRIES", 12) + self.retry(countdown=countdown, max_retries=max_retries) + except MaxRetriesExceededError: + log.log(f"Gave up waiting for objects in R2 for RFCs {rfc_number_list}") + else: + trigger_red_precomputer(rfc_number_list) + + @shared_task(bind=True) def update_rfc_searchindex_task(self, rfc_number: int): """Update the search index for one RFC""" diff --git a/ietf/doc/tests_utils.py b/ietf/doc/tests_utils.py index a2784bc85e..ba672cd847 100644 --- a/ietf/doc/tests_utils.py +++ b/ietf/doc/tests_utils.py @@ -1,15 +1,23 @@ # Copyright The IETF Trust 2020, All Rights Reserved import datetime +from io import BytesIO + +import mock import debug # pyflakes:ignore +import requests from pathlib import Path from unittest.mock import call, patch from django.conf import settings +from django.core.files.storage import storages from django.db import IntegrityError from django.test.utils import override_settings from django.utils import timezone + +from ietf.doc.utils_r2 import rfcs_are_in_r2 +from ietf.doc.utils_red import trigger_red_precomputer from ietf.group.factories import GroupFactory, RoleFactory from ietf.name.models import DocTagName from ietf.person.factories import PersonFactory @@ -17,11 +25,12 @@ from ietf.utils.test_utils import TestCase, name_of_file_containing, reload_db_objects from ietf.person.models import Person from ietf.doc.factories import DocumentFactory, WgRfcFactory, WgDraftFactory -from ietf.doc.models import State, DocumentActionHolder, DocumentAuthor +from ietf.doc.models import State, DocumentActionHolder, DocumentAuthor, StoredObject from ietf.doc.utils import (update_action_holders, add_state_change_event, update_documentauthors, fuzzy_find_documents, rebuild_reference_relations, build_file_urls, ensure_draft_bibxml_path_exists, update_or_create_draft_bibxml_file, last_ballot_doc_revision) +from ietf.doc.storage_utils import store_str from ietf.utils.draft import Draft, PlaintextDraft from ietf.utils.xmldraft import XMLDraft @@ -559,3 +568,132 @@ def test_last_ballot_doc_revision(self): nobody = PersonFactory() self.assertIsNone(last_ballot_doc_revision(doc, nobody)) self.assertEqual(rev, last_ballot_doc_revision(doc, ad)) + + +class UtilsRedTests(TestCase): + @mock.patch("ietf.doc.utils_red.log") + @mock.patch("ietf.doc.utils_red.requests.post") + def test_trigger_red_precomputer_not_configured(self, mock_post, mock_log): + with override_settings(): + try: + del settings.CUSTOM_SETTING_NAME + except AttributeError: + pass + trigger_red_precomputer(rfc_number_list=[1, 2, 3]) + self.assertEqual(mock_log.call_count, 1) + mock_args, _ = mock_log.call_args + self.assertEqual( + mock_args, + ("No URL configured for triggering red precompute multiple, skipping",), + ) + + mock_log.reset_mock() + with override_settings(TRIGGER_RED_PRECOMPUTE_MULTIPLE_URL=None): + trigger_red_precomputer(rfc_number_list=[1, 2, 3]) + self.assertFalse(mock_post.called) + self.assertEqual(mock_log.call_count, 1) + mock_args, _ = mock_log.call_args + self.assertEqual( + mock_args, + ("No URL configured for triggering red precompute multiple, skipping",), + ) + + @override_settings( + TRIGGER_RED_PRECOMPUTE_MULTIPLE_URL="urlbits", + ) + @mock.patch("ietf.doc.utils_red.log") + @mock.patch("ietf.doc.utils_red.requests.post", side_effect=requests.Timeout()) + def test_trigger_red_precomputer_swallows_timeout_exception( + self, mock_post, mock_log + ): + exception_raised = False + try: + trigger_red_precomputer(rfc_number_list=[1, 2, 3]) + except Exception: + exception_raised = True + self.assertFalse(exception_raised) + self.assertEqual(mock_log.call_count, 2) + # only checking the last log call + mock_args, _ = mock_log.call_args + self.assertEqual(len(mock_args), 1) + self.assertIn("POST request timed out", mock_args[0]) + + @override_settings( + TRIGGER_RED_PRECOMPUTE_MULTIPLE_URL="urlbits", + ) + @mock.patch("ietf.doc.utils_red.requests.post", side_effect=Exception()) + def test_trigger_red_precomputer_does_not_swallow_too_much(self, mock_post): + exception_raised = False + try: + trigger_red_precomputer(rfc_number_list=[1, 2, 3]) + except Exception: + exception_raised = True + self.assertTrue(exception_raised) + + @override_settings( + TRIGGER_RED_PRECOMPUTE_MULTIPLE_URL="urlbits", + DEFAULT_REQUESTS_TIMEOUT=314159265, + ) + @mock.patch("ietf.doc.utils_red.log") + @mock.patch("ietf.doc.utils_red.requests.post") + def test_trigger_red_precomputer(self, mock_post, mock_log): + mock_post.return_value = mock.Mock(status_code=200) + trigger_red_precomputer(rfc_number_list=[1, 2, 3]) + self.assertTrue(mock_post.called) + _, mock_kwargs = mock_post.call_args + self.assertIn("url", mock_kwargs) + self.assertEqual(mock_kwargs["url"], "urlbits") + self.assertIn("json", mock_kwargs) + self.assertEqual(mock_kwargs["json"], {"rfcs": "1,2,3"}) + self.assertIn("timeout", mock_kwargs) + self.assertEqual(mock_kwargs["timeout"], 314159265) + self.assertEqual(mock_log.call_count, 1) # Not testing the first info log value + mock_log.reset_mock() + mock_post.reset_mock() + mock_post.return_value = mock.Mock( + status_code=500, + ) + trigger_red_precomputer(rfc_number_list=[1, 2, 3]) + self.assertEqual(mock_log.call_count, 2) + mock_args, _ = mock_log.call_args + self.assertEqual(len(mock_args), 1) + expected = f"POST request failed for {settings.TRIGGER_RED_PRECOMPUTE_MULTIPLE_URL} : status_code=500" + self.assertEqual(mock_args[0], expected) + + +class UtilsR2TestCase(TestCase): + def test_rfcs_are_in_r2(self): + rfcs = WgRfcFactory.create_batch(2) + rfc_name_list = [rfc.name for rfc in rfcs] + rfc_number_list = [rfc.rfc_number for rfc in rfcs] + r2_rfc_bucket = storages["r2-rfc"] + # Right now the various doc Factories do not populate any content + self.assertEqual( + StoredObject.objects.filter( + store="rfc", doc_name__in=rfc_name_list + ).count(), + 0, + ) + self.assertTrue(rfcs_are_in_r2(rfc_number_list=rfc_number_list)) + for rfc in rfcs: + store_str( + kind="rfc", + name=f"testartifact/{rfc.name}.testartifact", + content="", + doc_name=rfc.name, + doc_rev=None, + ) + self.assertEqual( + StoredObject.objects.filter( + store="rfc", doc_name__in=rfc_name_list + ).count(), + 2, + ) + self.assertFalse(rfcs_are_in_r2(rfc_number_list=rfc_number_list)) + r2_rfc_bucket.save(f"testartifact/{rfcs[0].name}.testartifact", BytesIO(b"")) + self.assertFalse(rfcs_are_in_r2(rfc_number_list=rfc_number_list)) + r2_rfc_bucket.save(f"testartifact/{rfcs[1].name}.testartifact", BytesIO(b"")) + self.assertTrue(rfcs_are_in_r2(rfc_number_list=rfc_number_list)) + + + diff --git a/ietf/doc/utils_r2.py b/ietf/doc/utils_r2.py new file mode 100644 index 0000000000..53fb978303 --- /dev/null +++ b/ietf/doc/utils_r2.py @@ -0,0 +1,17 @@ +# Copyright The IETF Trust 2026, All Rights Reserved + +from django.core.files.storage import storages + +from ietf.doc.models import StoredObject + + +def rfcs_are_in_r2(rfc_number_list=()): + r2_rfc_bucket = storages["r2-rfc"] + for rfc_number in rfc_number_list: + stored_objects = StoredObject.objects.filter( + store="rfc", doc_name=f"rfc{rfc_number}" + ) + for stored_object in stored_objects: + if not r2_rfc_bucket.exists(stored_object.name): + return False + return True diff --git a/ietf/doc/utils_red.py b/ietf/doc/utils_red.py new file mode 100644 index 0000000000..bcda893dca --- /dev/null +++ b/ietf/doc/utils_red.py @@ -0,0 +1,31 @@ +# Copyright The IETF Trust 2026, All Rights Reserved + +import requests + +from django.conf import settings + +from ietf.utils.log import log + + +def trigger_red_precomputer(rfc_number_list=()): + url = getattr(settings, "TRIGGER_RED_PRECOMPUTE_MULTIPLE_URL", None) + if url is not None: + payload = { + "rfcs": ",".join([str(n) for n in rfc_number_list]), + } + try: + log(f"Triggering red precompute multiple for RFCs {rfc_number_list}") + response = requests.post( + url=url, + json=payload, + timeout=settings.DEFAULT_REQUESTS_TIMEOUT, + ) + except requests.Timeout as e: + log(f"POST request timed out for {url} : {e}") + return + if response.status_code != 200: + log( + f"POST request failed for {url} : status_code={response.status_code}" + ) + else: + log("No URL configured for triggering red precompute multiple, skipping") diff --git a/ietf/settings_test.py b/ietf/settings_test.py index 1f5a7e8ddc..e7ebc13eb2 100755 --- a/ietf/settings_test.py +++ b/ietf/settings_test.py @@ -115,8 +115,12 @@ def tempdir_with_cleanup(**kwargs): except NameError: pass -# Use InMemoryStorage for red bucket storage +# Use InMemoryStorage for red bucket and r2-rfc storages STORAGES["red_bucket"] = { "BACKEND": "django.core.files.storage.InMemoryStorage", "OPTIONS": {"location": "red_bucket"}, } +STORAGES["r2-rfc"] = { + "BACKEND": "django.core.files.storage.InMemoryStorage", + "OPTIONS": {"location": "r2-rfc"}, +} diff --git a/k8s/settings_local.py b/k8s/settings_local.py index 8c0c66cdf2..323b7fd45a 100644 --- a/k8s/settings_local.py +++ b/k8s/settings_local.py @@ -80,6 +80,16 @@ def _multiline_to_list(s): else: raise RuntimeError("DATATRACKER_API_PRIVATE_KEY_PEM_B64 must be set") +_RED_PRECOMPUTER_TRIGGER_RETRY_DELAY = os.environ.get("DATATRACKER_RED_PRECOMPUTER_TRIGGER_RETRY_DELAY", None) +if _RED_PRECOMPUTER_TRIGGER_RETRY_DELAY is not None: + RED_PRECOMPUTER_TRIGGER_RETRY_DELAY = _RED_PRECOMPUTER_TRIGGER_RETRY_DELAY +_RED_PRECOMPUTER_TRIGGER_MAX_RETRIES = os.environ.get("DATATRACKER_RED_PRECOMPUTER_TRIGGER_MAX_RETRIES", None) +if _RED_PRECOMPUTER_TRIGGER_MAX_RETRIES is not None: + RED_PRECOMPUTER_TRIGGER_MAX_RETRIES = _RED_PRECOMPUTER_TRIGGER_MAX_RETRIES +_TRIGGER_RED_PRECOMPUTE_MULTIPLE_URL = os.environ.get("DATATRACKER_TRIGGER_RED_PRECOMPUTE_MULTIPLE_URL", None) +if _TRIGGER_RED_PRECOMPUTE_MULTIPLE_URL is not None: + TRIGGER_RED_PRECOMPUTE_MULTIPLE_URL = _TRIGGER_RED_PRECOMPUTE_MULTIPLE_URL + # Set DEBUG if DATATRACKER_DEBUG env var is the word "true" DEBUG = os.environ.get("DATATRACKER_DEBUG", "false").lower() == "true" From e5b037ba83c2275efcd5a034c4bd1af67932d23f Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Mon, 23 Mar 2026 13:19:54 -0500 Subject: [PATCH 042/102] fix: rebuild reference relations once we have rfc contents (#10578) Co-authored-by: Jennifer Richards --- ietf/api/tests_views_rpc.py | 13 ++++++++++++- ietf/api/views_rpc.py | 4 ++++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/ietf/api/tests_views_rpc.py b/ietf/api/tests_views_rpc.py index 6d10bee8e8..0db67e126f 100644 --- a/ietf/api/tests_views_rpc.py +++ b/ietf/api/tests_views_rpc.py @@ -196,9 +196,15 @@ def test_notify_rfc_published(self, mock_task_delay): self.assertEqual(mock_kwargs["rfc_number_list"], expected_rfc_number_list) @override_settings(APP_API_TOKENS={"ietf.api.views_rpc": ["valid-token"]}) + @mock.patch("ietf.api.views_rpc.rebuild_reference_relations_task") @mock.patch("ietf.api.views_rpc.update_rfc_searchindex_task") @mock.patch("ietf.api.views_rpc.trigger_red_precomputer_task") - def test_upload_rfc_files(self, mock_trigger_red_task, mock_update_searchindex_task): + def test_upload_rfc_files( + self, + mock_trigger_red_task, + mock_update_searchindex_task, + mock_rebuild_relations, + ): def _valid_post_data(): """Generate a valid post data dict @@ -370,6 +376,11 @@ def _valid_post_data(): self.assertEqual(mock_kwargs["rfc_number_list"], expected_rfc_number_list) # Confirm that the search index update task was called correctly self.assertTrue(mock_update_searchindex_task.delay.called) + # Confirm reference relations rebuild task was called correctly + self.assertTrue(mock_rebuild_relations.delay.called) + _, mock_kwargs = mock_rebuild_relations.delay.call_args + self.assertIn("doc_names", mock_kwargs) + self.assertEqual(mock_kwargs["doc_names"], [rfc.name]) # re-post with replace = False should now fail mock_update_searchindex_task.reset_mock() diff --git a/ietf/api/views_rpc.py b/ietf/api/views_rpc.py index 59eed1e10e..6c7464e252 100644 --- a/ietf/api/views_rpc.py +++ b/ietf/api/views_rpc.py @@ -40,6 +40,7 @@ from ietf.doc.storage_utils import remove_from_storage, store_file, exists_in_storage from ietf.doc.tasks import ( signal_update_rfc_metadata_task, + rebuild_reference_relations_task, trigger_red_precomputer_task, update_rfc_searchindex_task, ) @@ -527,6 +528,9 @@ def post(self, request): trigger_red_precomputer_task.delay(rfc_number_list=sorted(needs_updating)) # Trigger search index update update_rfc_searchindex_task.delay(rfc.rfc_number) + # Trigger reference relation srebuild + rebuild_reference_relations_task.delay(doc_names=[rfc.name]) + return Response(NotificationAckSerializer().data) From 10ebdf9a6433b34d32352b4bb1b4e9b285773de8 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Mon, 23 Mar 2026 15:44:22 -0300 Subject: [PATCH 043/102] chore: deduplicate logging and clean up config (#10592) * fix: remove redundant loggers + cleanup * style: ruff ruff (logging config) * chore: alphabetize loggers * refactor: modern suppression of DisallowedHost log * style: minor cleanup / comments * fix: roll back accidental commit * fix: django.request at ERROR level * fix: squelch other SuspiciousOperation mail --- ietf/settings.py | 188 +++++++++++++++++++---------------------------- 1 file changed, 74 insertions(+), 114 deletions(-) diff --git a/ietf/settings.py b/ietf/settings.py index e0b4f20118..40a4cb5c56 100644 --- a/ietf/settings.py +++ b/ietf/settings.py @@ -1,4 +1,4 @@ -# Copyright The IETF Trust 2007-2025, All Rights Reserved +# Copyright The IETF Trust 2007-2026, All Rights Reserved # -*- coding: utf-8 -*- @@ -13,6 +13,7 @@ import warnings from hashlib import sha384 from typing import Any, Dict, List, Tuple # pyflakes:ignore +from django.http import UnreadablePostError # DeprecationWarnings are suppressed by default, enable them warnings.simplefilter("always", DeprecationWarning) @@ -236,153 +237,112 @@ FILE_UPLOAD_PERMISSIONS = 0o644 -# ------------------------------------------------------------------------ -# Django/Python Logging Framework Modifications -# Filter out "Invalid HTTP_HOST" emails -# Based on http://www.tiwoc.de/blog/2013/03/django-prevent-email-notification-on-suspiciousoperation/ -from django.core.exceptions import SuspiciousOperation -def skip_suspicious_operations(record): - if record.exc_info: - exc_value = record.exc_info[1] - if isinstance(exc_value, SuspiciousOperation): - return False - return True +# +# Logging config +# -# Filter out UreadablePostError: -from django.http import UnreadablePostError +# Callback to filter out UnreadablePostError: def skip_unreadable_post(record): if record.exc_info: - exc_type, exc_value = record.exc_info[:2] # pylint: disable=unused-variable + exc_type, exc_value = record.exc_info[:2] # pylint: disable=unused-variable if isinstance(exc_value, UnreadablePostError): return False return True -# Copied from DEFAULT_LOGGING as of Django 1.10.5 on 22 Feb 2017, and modified -# to incorporate html logging, invalid http_host filtering, and more. -# Changes from the default has comments. - -# The Python logging flow is as follows: -# (see https://docs.python.org/2.7/howto/logging.html#logging-flow) -# -# Init: get a Logger: logger = logging.getLogger(name) -# -# Logging call, e.g. logger.error(level, msg, *args, exc_info=(...), extra={...}) -# --> Logger (discard if level too low for this logger) -# (create log record from level, msg, args, exc_info, extra) -# --> Filters (discard if any filter attach to logger rejects record) -# --> Handlers (discard if level too low for handler) -# --> Filters (discard if any filter attached to handler rejects record) -# --> Formatter (format log record and emit) -# - LOGGING = { - 'version': 1, - 'disable_existing_loggers': False, - # - 'loggers': { - 'django': { - 'handlers': ['console', 'mail_admins'], - 'level': 'INFO', - }, - 'django.request': { - 'handlers': ['console'], - 'level': 'ERROR', + "version": 1, + "disable_existing_loggers": False, + "loggers": { + "celery": { + "handlers": ["console"], + "level": "INFO", }, - 'django.server': { - 'handlers': ['django.server'], - 'level': 'INFO', + "datatracker": { + "handlers": ["console"], + "level": "INFO", }, - 'django.security': { - 'handlers': ['console', ], - 'level': 'INFO', + "django": { + "handlers": ["console", "mail_admins"], + "level": "INFO", }, - 'oidc_provider': { - 'handlers': ['console', ], - 'level': 'DEBUG', + "django.request": {"level": "ERROR"}, # only log 5xx, ignore 4xx + "django.security": { + # SuspiciousOperation errors - log to console only + "handlers": ["console"], + "propagate": False, # no further handling please }, - 'datatracker': { - 'handlers': ['console'], - 'level': 'INFO', + "django.server": { + # Only used by Django's runserver development server + "handlers": ["django.server"], + "level": "INFO", }, - 'celery': { - 'handlers': ['console'], - 'level': 'INFO', + "oidc_provider": { + "handlers": ["console"], + "level": "DEBUG", }, }, - # - # No logger filters - # - 'handlers': { - 'console': { - 'level': 'DEBUG', - 'class': 'logging.StreamHandler', - 'formatter': 'plain', + "handlers": { + "console": { + "level": "DEBUG", + "class": "logging.StreamHandler", + "formatter": "plain", }, - 'debug_console': { - # Active only when DEBUG=True - 'level': 'DEBUG', - 'filters': ['require_debug_true'], - 'class': 'logging.StreamHandler', - 'formatter': 'plain', + "debug_console": { + "level": "DEBUG", + "filters": ["require_debug_true"], + "class": "logging.StreamHandler", + "formatter": "plain", }, - 'django.server': { - 'level': 'INFO', - 'class': 'logging.StreamHandler', - 'formatter': 'django.server', + "django.server": { + "level": "INFO", + "class": "logging.StreamHandler", + "formatter": "django.server", }, - 'mail_admins': { - 'level': 'ERROR', - 'filters': [ - 'require_debug_false', - 'skip_suspicious_operations', # custom - 'skip_unreadable_posts', # custom + "mail_admins": { + "level": "ERROR", + "filters": [ + "require_debug_false", + "skip_unreadable_posts", ], - 'class': 'django.utils.log.AdminEmailHandler', - 'include_html': True, # non-default - } + "class": "django.utils.log.AdminEmailHandler", + "include_html": True, + }, }, - # # All these are used by handlers - 'filters': { - 'require_debug_false': { - '()': 'django.utils.log.RequireDebugFalse', - }, - 'require_debug_true': { - '()': 'django.utils.log.RequireDebugTrue', + "filters": { + "require_debug_false": { + "()": "django.utils.log.RequireDebugFalse", }, - # custom filter, function defined above: - 'skip_suspicious_operations': { - '()': 'django.utils.log.CallbackFilter', - 'callback': skip_suspicious_operations, + "require_debug_true": { + "()": "django.utils.log.RequireDebugTrue", }, # custom filter, function defined above: - 'skip_unreadable_posts': { - '()': 'django.utils.log.CallbackFilter', - 'callback': skip_unreadable_post, + "skip_unreadable_posts": { + "()": "django.utils.log.CallbackFilter", + "callback": skip_unreadable_post, }, }, - # And finally the formatters - 'formatters': { - 'django.server': { - '()': 'django.utils.log.ServerFormatter', - 'format': '[%(server_time)s] %(message)s', + "formatters": { + "django.server": { + "()": "django.utils.log.ServerFormatter", + "format": "[%(server_time)s] %(message)s", }, - 'plain': { - 'style': '{', - 'format': '{levelname}: {name}:{lineno}: {message}', + "plain": { + "style": "{", + "format": "{levelname}: {name}:{lineno}: {message}", }, - 'json' : { + "json": { "class": "ietf.utils.jsonlogger.DatatrackerJsonFormatter", "style": "{", - "format": "{asctime}{levelname}{message}{name}{pathname}{lineno}{funcName}{process}", - } + "format": ( + "{asctime}{levelname}{message}{name}{pathname}{lineno}{funcName}" + "{process}{status_code}" + ), + }, }, } -# End logging -# ------------------------------------------------------------------------ - X_FRAME_OPTIONS = 'SAMEORIGIN' CSRF_TRUSTED_ORIGINS = [ From 14dd4cfdacb49552a2fcb9d9525e713f8ebd3c26 Mon Sep 17 00:00:00 2001 From: Tianyi Gao Date: Tue, 24 Mar 2026 02:45:24 +0800 Subject: [PATCH 044/102] feat: show parents on list of teams with grouping (#8635) (#10552) * feat: show parents on list of teams with grouping (#8635) * fix: sort teams by parent type then parent name in active teams list --- ietf/group/views.py | 13 +++++++-- ietf/templates/group/active_teams.html | 38 ++++++++++++++++---------- 2 files changed, 34 insertions(+), 17 deletions(-) diff --git a/ietf/group/views.py b/ietf/group/views.py index efe3eca15d..8561a5059f 100644 --- a/ietf/group/views.py +++ b/ietf/group/views.py @@ -245,10 +245,19 @@ def active_review_dirs(request): return render(request, 'group/active_review_dirs.html', {'dirs' : dirs }) def active_teams(request): - teams = Group.objects.filter(type="team", state="active").order_by("name") + parent_type_order = {"area": 1, "adm": 3, None: 4} + + def team_sort_key(group): + type_id = group.parent.type_id if group.parent else None + return (parent_type_order.get(type_id, 2), group.parent.name if group.parent else "", group.name) + + teams = sorted( + Group.objects.filter(type="team", state="active").select_related("parent"), + key=team_sort_key, + ) for group in teams: group.chairs = sorted(roles(group, "chair"), key=extract_last_name) - return render(request, 'group/active_teams.html', {'teams' : teams }) + return render(request, 'group/active_teams.html', {'teams': teams}) def active_iab(request): iabgroups = Group.objects.filter(type__in=("program","iabasg","iabworkshop"), state="active").order_by("-type_id","name") diff --git a/ietf/templates/group/active_teams.html b/ietf/templates/group/active_teams.html index 502d971a20..771dfda290 100644 --- a/ietf/templates/group/active_teams.html +++ b/ietf/templates/group/active_teams.html @@ -16,21 +16,29 @@

Active teams

- - {% for group in teams %} - - - - - - {% endfor %} - + {% regroup teams by parent as grouped_teams %} + {% for group_entry in grouped_teams %} + + + + + {% for group in group_entry.list %} + + + + + + {% endfor %} + + {% endfor %}
Parent + {{ group.parent.name }} + ({{ group.parent.acronym }}) +
Chairs
- {{ group.acronym }} - {{ group.name }} - {% for chair in group.chairs %} - {% person_link chair.person %}{% if not forloop.last %},{% endif %} - {% endfor %} -
+ {% if group_entry.grouper %}{{ group_entry.grouper.name }}{% else %}Other{% endif %} +
+ {{ group.acronym }} + {{ group.name }} + {% for chair in group.chairs %} + {% person_link chair.person %}{% if not forloop.last %},{% endif %} + {% endfor %} +
{% endblock %} {% block js %} From 057d52b76666ab6fcfd366700862be10db4844bb Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Mon, 23 Mar 2026 15:56:39 -0300 Subject: [PATCH 045/102] ci: update actions to avoid deprecations (#10604) * ci: upload-artifact -> v7 * ci: checkout -> v6 --- .github/workflows/build-base-app.yml | 2 +- .github/workflows/build-devblobstore.yml | 2 +- .github/workflows/build-mq-broker.yml | 2 +- .github/workflows/build.yml | 8 ++++---- .github/workflows/ci-run-tests.yml | 2 +- .github/workflows/codeql-analysis.yml | 2 +- .github/workflows/dependency-review.yml | 2 +- .github/workflows/dev-assets-sync-nightly.yml | 2 +- .github/workflows/tests.yml | 14 +++++++------- 9 files changed, 18 insertions(+), 18 deletions(-) diff --git a/.github/workflows/build-base-app.yml b/.github/workflows/build-base-app.yml index 4a4394fca0..2b937cbfef 100644 --- a/.github/workflows/build-base-app.yml +++ b/.github/workflows/build-base-app.yml @@ -18,7 +18,7 @@ jobs: packages: write steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 with: token: ${{ secrets.GH_COMMON_TOKEN }} diff --git a/.github/workflows/build-devblobstore.yml b/.github/workflows/build-devblobstore.yml index f49a11af19..41b2e0d47a 100644 --- a/.github/workflows/build-devblobstore.yml +++ b/.github/workflows/build-devblobstore.yml @@ -20,7 +20,7 @@ jobs: packages: write steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 diff --git a/.github/workflows/build-mq-broker.yml b/.github/workflows/build-mq-broker.yml index 4de861dbcd..76c9b93168 100644 --- a/.github/workflows/build-mq-broker.yml +++ b/.github/workflows/build-mq-broker.yml @@ -24,7 +24,7 @@ jobs: packages: write steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 - name: Set up QEMU uses: docker/setup-qemu-action@v3 diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index d97889fbb8..8872c7f7d3 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -66,7 +66,7 @@ jobs: base_image_version: ${{ steps.baseimgversion.outputs.base_image_version }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 with: fetch-depth: 1 fetch-tags: false @@ -164,7 +164,7 @@ jobs: TARGET_BASE: ${{needs.prepare.outputs.base_image_version}} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 with: fetch-depth: 1 fetch-tags: false @@ -341,7 +341,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} - name: Upload Build Artifacts - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v7 with: name: release-${{ env.PKG_VERSION }} path: /home/runner/work/release/release.tar.gz @@ -403,7 +403,7 @@ jobs: PKG_VERSION: ${{needs.prepare.outputs.pkg_version}} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 with: ref: main diff --git a/.github/workflows/ci-run-tests.yml b/.github/workflows/ci-run-tests.yml index 278bd8af2f..5349f1ac7a 100644 --- a/.github/workflows/ci-run-tests.yml +++ b/.github/workflows/ci-run-tests.yml @@ -23,7 +23,7 @@ jobs: base_image_version: ${{ steps.baseimgversion.outputs.base_image_version }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 with: fetch-depth: 1 fetch-tags: false diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 3444c03b5e..4ab32d27a6 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -26,7 +26,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v4 + uses: actions/checkout@v6 - name: Initialize CodeQL uses: github/codeql-action/init@v3 diff --git a/.github/workflows/dependency-review.yml b/.github/workflows/dependency-review.yml index 6d0683c471..e255b270ff 100644 --- a/.github/workflows/dependency-review.yml +++ b/.github/workflows/dependency-review.yml @@ -15,7 +15,7 @@ jobs: runs-on: ubuntu-latest steps: - name: 'Checkout Repository' - uses: actions/checkout@v4 + uses: actions/checkout@v6 - name: 'Dependency Review' uses: actions/dependency-review-action@v4 with: diff --git a/.github/workflows/dev-assets-sync-nightly.yml b/.github/workflows/dev-assets-sync-nightly.yml index 4cfbf6365b..926d816b38 100644 --- a/.github/workflows/dev-assets-sync-nightly.yml +++ b/.github/workflows/dev-assets-sync-nightly.yml @@ -29,7 +29,7 @@ jobs: contents: read packages: write steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 - name: Login to GitHub Container Registry uses: docker/login-action@v3 diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 836314bac0..be7b834b7a 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -32,7 +32,7 @@ jobs: image: ghcr.io/ietf-tools/datatracker-devblobstore:latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 - name: Prepare for tests run: | @@ -68,7 +68,7 @@ jobs: coverage xml - name: Upload geckodriver.log - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v7 if: ${{ failure() }} with: name: geckodriverlog @@ -87,7 +87,7 @@ jobs: mv latest-coverage.json coverage.json - name: Upload Coverage Results as Build Artifact - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v7 if: ${{ always() }} with: name: coverage @@ -102,7 +102,7 @@ jobs: project: [chromium, firefox] steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 - uses: actions/setup-node@v6 with: @@ -121,7 +121,7 @@ jobs: npx playwright test --project=${{ matrix.project }} - name: Upload Report - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v7 if: ${{ always() }} continue-on-error: true with: @@ -143,7 +143,7 @@ jobs: image: ghcr.io/ietf-tools/datatracker-db:latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 - name: Prepare for tests run: | @@ -180,7 +180,7 @@ jobs: npx playwright test --project=${{ matrix.project }} -c playwright-legacy.config.js - name: Upload Report - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v7 if: ${{ always() }} continue-on-error: true with: From 02070ee2f4dc6ee599e08a87e92b345198ae40fa Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 23 Mar 2026 16:04:08 -0300 Subject: [PATCH 046/102] chore(deps): bump actions/setup-python from 5 to 6 (#9480) Bumps [actions/setup-python](https://github.com/actions/setup-python) from 5 to 6. - [Release notes](https://github.com/actions/setup-python/releases) - [Commits](https://github.com/actions/setup-python/compare/v5...v6) --- updated-dependencies: - dependency-name: actions/setup-python dependency-version: '6' dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 8872c7f7d3..07a304cac2 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -175,7 +175,7 @@ jobs: node-version: 18.x - name: Setup Python - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: "3.x" From 4a6627826993863bcb9cd5ede6b6e6f5b19eb0e9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 23 Mar 2026 17:07:28 -0300 Subject: [PATCH 047/102] chore(deps): bump ncipollo/release-action from 1.18.0 to 1.20.0 (#9478) Bumps [ncipollo/release-action](https://github.com/ncipollo/release-action) from 1.18.0 to 1.20.0. - [Release notes](https://github.com/ncipollo/release-action/releases) - [Commits](https://github.com/ncipollo/release-action/compare/v1.18.0...v1.20.0) --- updated-dependencies: - dependency-name: ncipollo/release-action dependency-version: 1.20.0 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 07a304cac2..ed425f9ae5 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -98,7 +98,7 @@ jobs: echo "IS_RELEASE=true" >> $GITHUB_ENV - name: Create Draft Release - uses: ncipollo/release-action@v1.18.0 + uses: ncipollo/release-action@v1.21.0 if: ${{ github.ref_name == 'release' }} with: prerelease: true @@ -315,7 +315,7 @@ jobs: histCoveragePath: historical-coverage.json - name: Create Release - uses: ncipollo/release-action@v1.18.0 + uses: ncipollo/release-action@v1.21.0 if: ${{ env.SHOULD_DEPLOY == 'true' }} with: allowUpdates: true @@ -328,7 +328,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} - name: Update Baseline Coverage - uses: ncipollo/release-action@v1.18.0 + uses: ncipollo/release-action@v1.21.0 if: ${{ github.event.inputs.updateCoverage == 'true' || github.ref_name == 'release' }} with: allowUpdates: true From 753bd507c5d9cfdad4793d0e3feed68726fecf1e Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Mon, 23 Mar 2026 15:38:15 -0500 Subject: [PATCH 048/102] fix: include editorial docs in sent-to-rpc (#10605) --- ietf/api/views_rpc.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ietf/api/views_rpc.py b/ietf/api/views_rpc.py index 6c7464e252..1e96118e58 100644 --- a/ietf/api/views_rpc.py +++ b/ietf/api/views_rpc.py @@ -210,19 +210,19 @@ def submitted_to_rpc(self, request): Those queries overreturn - there may be things, particularly not from the IETF stream that are already in the queue. """ ietf_docs = Q(states__type_id="draft-iesg", states__slug__in=["ann"]) - irtf_iab_ise_docs = Q( + irtf_iab_ise_editorial_docs = Q( states__type_id__in=[ "draft-stream-iab", "draft-stream-irtf", "draft-stream-ise", + "draft-stream-editorial", ], states__slug__in=["rfc-edit"], ) - # TODO: Need a way to talk about editorial stream docs docs = ( self.get_queryset() .filter(type_id="draft") - .filter(ietf_docs | irtf_iab_ise_docs) + .filter(ietf_docs | irtf_iab_ise_editorial_docs) ) serializer = self.get_serializer(docs, many=True) return Response(serializer.data) From 4308162174bb565b988c4fca9289c424c736ecba Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Mon, 23 Mar 2026 17:38:34 -0300 Subject: [PATCH 049/102] ci: handle rabbitmq version for push trigger (#10606) --- .github/workflows/build-mq-broker.yml | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build-mq-broker.yml b/.github/workflows/build-mq-broker.yml index 76c9b93168..50472122c4 100644 --- a/.github/workflows/build-mq-broker.yml +++ b/.github/workflows/build-mq-broker.yml @@ -39,6 +39,15 @@ jobs: username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} + - name: Set rabbitmq version + id: rabbitmq-version + run: | + if [[ "${{ inputs.rabbitmq_version }}" == "" ]]; then + echo "RABBITMQ_VERSION=3.13-alpine" >> $GITHUB_OUTPUT + else + echo "RABBITMQ_VERSION=${{ inputs.rabbitmq_version }}" >> $GITHUB_OUTPUT + fi + - name: Docker Build & Push uses: docker/build-push-action@v6 env: @@ -48,7 +57,7 @@ jobs: file: dev/mq/Dockerfile platforms: linux/amd64,linux/arm64 push: true - build-args: RABBITMQ_VERSION=${{ inputs.rabbitmq_version }} + build-args: RABBITMQ_VERSION=${{ steps.rabbitmq-version.outputs.RABBITMQ_VERSION }} tags: | - ghcr.io/ietf-tools/datatracker-mq:${{ inputs.rabbitmq_version }} + ghcr.io/ietf-tools/datatracker-mq:${{ steps.rabbitmq-version.outputs.RABBITMQ_VERSION }} ghcr.io/ietf-tools/datatracker-mq:latest From eb041f7d81c7469f0b4c765150ccfadba5604177 Mon Sep 17 00:00:00 2001 From: Martin Thomson Date: Tue, 24 Mar 2026 05:39:01 +0900 Subject: [PATCH 050/102] fix: Rewrite CSS style attributes in SVG (#10584) This makes the dark mode work properly for drafts like https://datatracker.ietf.org/doc/html/draft-hajdusek-qirg-timing-physics-01 which have diagrams that use a mix of ordinary attributes and the style attribute. Using the style attribute makes the rules there invisible to the method we use for the remapping of black and white for dark mode. --- ietf/static/js/document_html.js | 79 +++++++++++++++++++++++++++++++++ 1 file changed, 79 insertions(+) diff --git a/ietf/static/js/document_html.js b/ietf/static/js/document_html.js index 6e8861739a..3e609f3965 100644 --- a/ietf/static/js/document_html.js +++ b/ietf/static/js/document_html.js @@ -117,4 +117,83 @@ document.addEventListener("DOMContentLoaded", function (event) { } }); } + + // Rewrite these CSS properties so that the values are available for restyling. + document.querySelectorAll("svg [style]").forEach(el => { + // Push these CSS properties into their own attributes + const SVG_PRESENTATION_ATTRS = new Set([ + 'alignment-baseline', 'baseline-shift', 'clip', 'clip-path', 'clip-rule', + 'color', 'color-interpolation', 'color-interpolation-filters', + 'color-rendering', 'cursor', 'direction', 'display', 'dominant-baseline', + 'fill', 'fill-opacity', 'fill-rule', 'filter', 'flood-color', + 'flood-opacity', 'font-family', 'font-size', 'font-size-adjust', + 'font-stretch', 'font-style', 'font-variant', 'font-weight', + 'image-rendering', 'letter-spacing', 'lighting-color', 'marker-end', + 'marker-mid', 'marker-start', 'mask', 'opacity', 'overflow', 'paint-order', + 'pointer-events', 'shape-rendering', 'stop-color', 'stop-opacity', + 'stroke', 'stroke-dasharray', 'stroke-dashoffset', 'stroke-linecap', + 'stroke-linejoin', 'stroke-miterlimit', 'stroke-opacity', 'stroke-width', + 'text-anchor', 'text-decoration', 'text-rendering', 'unicode-bidi', + 'vector-effect', 'visibility', 'word-spacing', 'writing-mode', + ]); + + // Simple CSS splitter: respects quoted strings and parens so semicolons + // inside url(...) or "..." don't get treated as declaration boundaries. + function parseDeclarations(styleText) { + const decls = []; + let buf = ''; + let inStr = false; + let strChar = ''; + let escaped = false; + let depth = 0; + + for (const ch of styleText) { + if (inStr) { + if (escaped) { + escaped = false; + } else if (ch === '\\') { + escaped = true; + } else if (ch === strChar) { + inStr = false; + } + } else if (ch === '"' || ch === "'") { + inStr = true; + strChar = ch; + } else if (ch === '(') { + depth++; + } else if (ch === ')') { + depth--; + } else if (ch === ';' && depth === 0) { + const trimmed = buf.trim(); + if (trimmed) { + decls.push(trimmed); + } + buf = ''; + continue; + } + buf += ch; + } + const trimmed = buf.trim(); + if (trimmed) { + decls.push(trimmed); + } + return decls; + } + + const remainder = []; + for (const decl of parseDeclarations(el.getAttribute('style'))) { + const [prop, val] = decl.split(":", 2).map(v => v.trim()); + if (val && !/!important$/.test(val) && SVG_PRESENTATION_ATTRS.has(prop)) { + el.setAttribute(prop, val); + } else { + remainder.push(decl); + } + } + + if (remainder.length > 0) { + el.setAttribute('style', remainder.join('; ')); + } else { + el.removeAttribute('style'); + } + }); }); From 93e9bd3aad53808e791302c8fd99d74eb0873385 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 23 Mar 2026 17:44:26 -0300 Subject: [PATCH 051/102] chore(deps): bump github/codeql-action from 3 to 4 (#9956) Bumps [github/codeql-action](https://github.com/github/codeql-action) from 3 to 4. - [Release notes](https://github.com/github/codeql-action/releases) - [Changelog](https://github.com/github/codeql-action/blob/main/CHANGELOG.md) - [Commits](https://github.com/github/codeql-action/compare/v3...v4) --- updated-dependencies: - dependency-name: github/codeql-action dependency-version: '4' dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/codeql-analysis.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 4ab32d27a6..bc20779ae6 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -29,9 +29,9 @@ jobs: uses: actions/checkout@v6 - name: Initialize CodeQL - uses: github/codeql-action/init@v3 + uses: github/codeql-action/init@v4 with: languages: ${{ matrix.language }} - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3 + uses: github/codeql-action/analyze@v4 From f9aebd5aa881557d7493db415d04a3d89494d637 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 23 Mar 2026 17:48:56 -0300 Subject: [PATCH 052/102] chore(deps): bump actions/download-artifact from 4.3.0 to 6.0.0 (#9805) Bumps [actions/download-artifact](https://github.com/actions/download-artifact) from 4.3.0 to 6.0.0. - [Release notes](https://github.com/actions/download-artifact/releases) - [Commits](https://github.com/actions/download-artifact/compare/v4.3.0...v6.0.0) --- updated-dependencies: - dependency-name: actions/download-artifact dependency-version: 6.0.0 dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index ed425f9ae5..74791747b6 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -186,7 +186,7 @@ jobs: - name: Download a Coverage Results if: ${{ github.event.inputs.skiptests == 'false' || github.ref_name == 'release' }} - uses: actions/download-artifact@v4.3.0 + uses: actions/download-artifact@v6.0.0 with: name: coverage @@ -291,7 +291,7 @@ jobs: - name: Download Coverage Results if: ${{ github.event.inputs.skiptests == 'false' || github.ref_name == 'release' }} - uses: actions/download-artifact@v4.3.0 + uses: actions/download-artifact@v6.0.0 with: name: coverage From 7d84aacad621b83753d6701afbcc864592d4822f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 23 Mar 2026 17:50:26 -0300 Subject: [PATCH 053/102] chore(deps): bump the npm group (#10602) Bumps the npm group in /dev/deploy-to-container with 2 updates: [dockerode](https://github.com/apocas/dockerode) and [tar](https://github.com/isaacs/node-tar). Updates `dockerode` from 4.0.9 to 4.0.10 - [Release notes](https://github.com/apocas/dockerode/releases) - [Commits](https://github.com/apocas/dockerode/compare/v4.0.9...v4.0.10) Updates `tar` from 7.5.11 to 7.5.12 - [Release notes](https://github.com/isaacs/node-tar/releases) - [Changelog](https://github.com/isaacs/node-tar/blob/main/CHANGELOG.md) - [Commits](https://github.com/isaacs/node-tar/compare/v7.5.11...v7.5.12) --- updated-dependencies: - dependency-name: dockerode dependency-version: 4.0.10 dependency-type: direct:production update-type: version-update:semver-patch dependency-group: npm - dependency-name: tar dependency-version: 7.5.12 dependency-type: direct:production update-type: version-update:semver-patch dependency-group: npm ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- dev/deploy-to-container/package-lock.json | 113 ++++++++++------------ dev/deploy-to-container/package.json | 4 +- 2 files changed, 54 insertions(+), 63 deletions(-) diff --git a/dev/deploy-to-container/package-lock.json b/dev/deploy-to-container/package-lock.json index b62109f0e2..a68f170c4b 100644 --- a/dev/deploy-to-container/package-lock.json +++ b/dev/deploy-to-container/package-lock.json @@ -6,12 +6,12 @@ "": { "name": "deploy-to-container", "dependencies": { - "dockerode": "^4.0.9", + "dockerode": "^4.0.10", "fs-extra": "^11.3.4", "nanoid": "5.1.7", "nanoid-dictionary": "5.0.0", "slugify": "1.6.8", - "tar": "^7.5.11", + "tar": "^7.5.12", "yargs": "^17.7.2" }, "engines": { @@ -160,7 +160,6 @@ "version": "0.2.6", "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz", "integrity": "sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==", - "license": "MIT", "dependencies": { "safer-buffer": "~2.1.0" } @@ -188,7 +187,6 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", "integrity": "sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==", - "license": "BSD-3-Clause", "dependencies": { "tweetnacl": "^0.14.3" } @@ -227,9 +225,9 @@ } }, "node_modules/buildcheck": { - "version": "0.0.6", - "resolved": "https://registry.npmjs.org/buildcheck/-/buildcheck-0.0.6.tgz", - "integrity": "sha512-8f9ZJCUXyT1M35Jx7MkBgmBMo3oHTTBIPLiY9xyL0pl3T5RwcPEY8cUHr5LBNfu/fk6c2T4DJZuVM/8ZZT2D2A==", + "version": "0.0.7", + "resolved": "https://registry.npmjs.org/buildcheck/-/buildcheck-0.0.7.tgz", + "integrity": "sha512-lHblz4ahamxpTmnsk+MNTRWsjYKv965MwOrSJyeD588rR3Jcu7swE+0wN5F+PbL5cjgu/9ObkhfzEPuofEMwLA==", "optional": true, "engines": { "node": ">=10.0.0" @@ -284,10 +282,9 @@ } }, "node_modules/debug": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", - "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", - "license": "MIT", + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", "dependencies": { "ms": "^2.1.3" }, @@ -301,10 +298,9 @@ } }, "node_modules/docker-modem": { - "version": "5.0.6", - "resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-5.0.6.tgz", - "integrity": "sha512-ens7BiayssQz/uAxGzH8zGXCtiV24rRWXdjNha5V4zSOcxmAZsfGVm/PPFbwQdqEkDnhG+SyR9E3zSHUbOKXBQ==", - "license": "Apache-2.0", + "version": "5.0.7", + "resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-5.0.7.tgz", + "integrity": "sha512-XJgGhoR/CLpqshm4d3L7rzH6t8NgDFUIIpztYlLHIApeJjMZKYJMz2zxPsYxnejq5h3ELYSw/RBsi3t5h7gNTA==", "dependencies": { "debug": "^4.1.1", "readable-stream": "^3.5.0", @@ -316,14 +312,14 @@ } }, "node_modules/dockerode": { - "version": "4.0.9", - "resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.9.tgz", - "integrity": "sha512-iND4mcOWhPaCNh54WmK/KoSb35AFqPAUWFMffTQcp52uQt36b5uNwEJTSXntJZBbeGad72Crbi/hvDIv6us/6Q==", + "version": "4.0.10", + "resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.10.tgz", + "integrity": "sha512-8L/P9JynLBiG7/coiA4FlQXegHltRqS0a+KqI44P1zgQh8QLHTg7FKOwhkBgSJwZTeHsq30WRoVFLuwkfK0YFg==", "dependencies": { "@balena/dockerignore": "^1.0.2", "@grpc/grpc-js": "^1.11.1", "@grpc/proto-loader": "^0.7.13", - "docker-modem": "^5.0.6", + "docker-modem": "^5.0.7", "protobufjs": "^7.3.2", "tar-fs": "^2.1.4", "uuid": "^10.0.0" @@ -464,14 +460,12 @@ "node_modules/ms": { "version": "2.1.3", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", - "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", - "license": "MIT" + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" }, "node_modules/nan": { - "version": "2.22.0", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.22.0.tgz", - "integrity": "sha512-nbajikzWTMwsW+eSsNm3QwlOs7het9gGJU5dDZzRTQGk03vyBOauxgI4VakDzE0PtsGTmXPsXTbbjVhRwR5mpw==", - "license": "MIT", + "version": "2.26.2", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.26.2.tgz", + "integrity": "sha512-0tTvBTYkt3tdGw22nrAy50x7gpbGCCFH3AFcyS5WiUu7Eu4vWlri1woE6qHBSfy11vksDqkiwjOnlR7WV8G1Hw==", "optional": true }, "node_modules/nanoid": { @@ -580,8 +574,7 @@ "node_modules/safer-buffer": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", - "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", - "license": "MIT" + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" }, "node_modules/slugify": { "version": "1.6.8", @@ -594,13 +587,12 @@ "node_modules/split-ca": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/split-ca/-/split-ca-1.0.1.tgz", - "integrity": "sha512-Q5thBSxp5t8WPTTJQS59LrGqOZqOsrhDGDVm8azCqIBjSBd7nd9o2PM+mDulQQkh8h//4U6hFZnc/mul8t5pWQ==", - "license": "ISC" + "integrity": "sha512-Q5thBSxp5t8WPTTJQS59LrGqOZqOsrhDGDVm8azCqIBjSBd7nd9o2PM+mDulQQkh8h//4U6hFZnc/mul8t5pWQ==" }, "node_modules/ssh2": { - "version": "1.16.0", - "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.16.0.tgz", - "integrity": "sha512-r1X4KsBGedJqo7h8F5c4Ybpcr5RjyP+aWIG007uBPRjmdQWfEiVLzSK71Zji1B9sKxwaCvD8y8cwSkYrlLiRRg==", + "version": "1.17.0", + "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.17.0.tgz", + "integrity": "sha512-wPldCk3asibAjQ/kziWQQt1Wh3PgDFpC0XpwclzKcdT1vql6KeYxf5LIt4nlFkUeR8WuphYMKqUA56X4rjbfgQ==", "hasInstallScript": true, "dependencies": { "asn1": "^0.2.6", @@ -611,7 +603,7 @@ }, "optionalDependencies": { "cpu-features": "~0.0.10", - "nan": "^2.20.0" + "nan": "^2.23.0" } }, "node_modules/string_decoder": { @@ -647,9 +639,9 @@ } }, "node_modules/tar": { - "version": "7.5.11", - "resolved": "https://registry.npmjs.org/tar/-/tar-7.5.11.tgz", - "integrity": "sha512-ChjMH33/KetonMTAtpYdgUFr0tbz69Fp2v7zWxQfYZX4g5ZN2nOBXm1R2xyA+lMIKrLKIoKAwFj93jE/avX9cQ==", + "version": "7.5.12", + "resolved": "https://registry.npmjs.org/tar/-/tar-7.5.12.tgz", + "integrity": "sha512-9TsuLcdhOn4XztcQqhNyq1KOwOOED/3k58JAvtULiYqbO8B/0IBAAIE1hj0Svmm58k27TmcigyDI0deMlgG3uw==", "dependencies": { "@isaacs/fs-minipass": "^4.0.0", "chownr": "^3.0.0", @@ -698,8 +690,7 @@ "node_modules/tweetnacl": { "version": "0.14.5", "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", - "integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==", - "license": "Unlicense" + "integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==" }, "node_modules/undici-types": { "version": "6.20.0", @@ -949,9 +940,9 @@ } }, "buildcheck": { - "version": "0.0.6", - "resolved": "https://registry.npmjs.org/buildcheck/-/buildcheck-0.0.6.tgz", - "integrity": "sha512-8f9ZJCUXyT1M35Jx7MkBgmBMo3oHTTBIPLiY9xyL0pl3T5RwcPEY8cUHr5LBNfu/fk6c2T4DJZuVM/8ZZT2D2A==", + "version": "0.0.7", + "resolved": "https://registry.npmjs.org/buildcheck/-/buildcheck-0.0.7.tgz", + "integrity": "sha512-lHblz4ahamxpTmnsk+MNTRWsjYKv965MwOrSJyeD588rR3Jcu7swE+0wN5F+PbL5cjgu/9ObkhfzEPuofEMwLA==", "optional": true }, "chownr": { @@ -993,17 +984,17 @@ } }, "debug": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", - "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", "requires": { "ms": "^2.1.3" } }, "docker-modem": { - "version": "5.0.6", - "resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-5.0.6.tgz", - "integrity": "sha512-ens7BiayssQz/uAxGzH8zGXCtiV24rRWXdjNha5V4zSOcxmAZsfGVm/PPFbwQdqEkDnhG+SyR9E3zSHUbOKXBQ==", + "version": "5.0.7", + "resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-5.0.7.tgz", + "integrity": "sha512-XJgGhoR/CLpqshm4d3L7rzH6t8NgDFUIIpztYlLHIApeJjMZKYJMz2zxPsYxnejq5h3ELYSw/RBsi3t5h7gNTA==", "requires": { "debug": "^4.1.1", "readable-stream": "^3.5.0", @@ -1012,14 +1003,14 @@ } }, "dockerode": { - "version": "4.0.9", - "resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.9.tgz", - "integrity": "sha512-iND4mcOWhPaCNh54WmK/KoSb35AFqPAUWFMffTQcp52uQt36b5uNwEJTSXntJZBbeGad72Crbi/hvDIv6us/6Q==", + "version": "4.0.10", + "resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.10.tgz", + "integrity": "sha512-8L/P9JynLBiG7/coiA4FlQXegHltRqS0a+KqI44P1zgQh8QLHTg7FKOwhkBgSJwZTeHsq30WRoVFLuwkfK0YFg==", "requires": { "@balena/dockerignore": "^1.0.2", "@grpc/grpc-js": "^1.11.1", "@grpc/proto-loader": "^0.7.13", - "docker-modem": "^5.0.6", + "docker-modem": "^5.0.7", "protobufjs": "^7.3.2", "tar-fs": "^2.1.4", "uuid": "^10.0.0" @@ -1126,9 +1117,9 @@ "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" }, "nan": { - "version": "2.22.0", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.22.0.tgz", - "integrity": "sha512-nbajikzWTMwsW+eSsNm3QwlOs7het9gGJU5dDZzRTQGk03vyBOauxgI4VakDzE0PtsGTmXPsXTbbjVhRwR5mpw==", + "version": "2.26.2", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.26.2.tgz", + "integrity": "sha512-0tTvBTYkt3tdGw22nrAy50x7gpbGCCFH3AFcyS5WiUu7Eu4vWlri1woE6qHBSfy11vksDqkiwjOnlR7WV8G1Hw==", "optional": true }, "nanoid": { @@ -1213,14 +1204,14 @@ "integrity": "sha512-Q5thBSxp5t8WPTTJQS59LrGqOZqOsrhDGDVm8azCqIBjSBd7nd9o2PM+mDulQQkh8h//4U6hFZnc/mul8t5pWQ==" }, "ssh2": { - "version": "1.16.0", - "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.16.0.tgz", - "integrity": "sha512-r1X4KsBGedJqo7h8F5c4Ybpcr5RjyP+aWIG007uBPRjmdQWfEiVLzSK71Zji1B9sKxwaCvD8y8cwSkYrlLiRRg==", + "version": "1.17.0", + "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.17.0.tgz", + "integrity": "sha512-wPldCk3asibAjQ/kziWQQt1Wh3PgDFpC0XpwclzKcdT1vql6KeYxf5LIt4nlFkUeR8WuphYMKqUA56X4rjbfgQ==", "requires": { "asn1": "^0.2.6", "bcrypt-pbkdf": "^1.0.2", "cpu-features": "~0.0.10", - "nan": "^2.20.0" + "nan": "^2.23.0" } }, "string_decoder": { @@ -1250,9 +1241,9 @@ } }, "tar": { - "version": "7.5.11", - "resolved": "https://registry.npmjs.org/tar/-/tar-7.5.11.tgz", - "integrity": "sha512-ChjMH33/KetonMTAtpYdgUFr0tbz69Fp2v7zWxQfYZX4g5ZN2nOBXm1R2xyA+lMIKrLKIoKAwFj93jE/avX9cQ==", + "version": "7.5.12", + "resolved": "https://registry.npmjs.org/tar/-/tar-7.5.12.tgz", + "integrity": "sha512-9TsuLcdhOn4XztcQqhNyq1KOwOOED/3k58JAvtULiYqbO8B/0IBAAIE1hj0Svmm58k27TmcigyDI0deMlgG3uw==", "requires": { "@isaacs/fs-minipass": "^4.0.0", "chownr": "^3.0.0", diff --git a/dev/deploy-to-container/package.json b/dev/deploy-to-container/package.json index 1c95a4540c..aa9e82dbdf 100644 --- a/dev/deploy-to-container/package.json +++ b/dev/deploy-to-container/package.json @@ -2,12 +2,12 @@ "name": "deploy-to-container", "type": "module", "dependencies": { - "dockerode": "^4.0.9", + "dockerode": "^4.0.10", "fs-extra": "^11.3.4", "nanoid": "5.1.7", "nanoid-dictionary": "5.0.0", "slugify": "1.6.8", - "tar": "^7.5.11", + "tar": "^7.5.12", "yargs": "^17.7.2" }, "engines": { From 3d00e594e6a667fe8084bc8548d6993c29d515e6 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Mon, 23 Mar 2026 19:50:19 -0300 Subject: [PATCH 054/102] chore(deps): bump more action versions (#10608) --- .github/workflows/build-base-app.yml | 6 +++--- .github/workflows/build-devblobstore.yml | 6 +++--- .github/workflows/build-mq-broker.yml | 6 +++--- .github/workflows/build.yml | 10 +++++----- .github/workflows/dev-assets-sync-nightly.yml | 4 ++-- 5 files changed, 16 insertions(+), 16 deletions(-) diff --git a/.github/workflows/build-base-app.yml b/.github/workflows/build-base-app.yml index 2b937cbfef..1b0855cc47 100644 --- a/.github/workflows/build-base-app.yml +++ b/.github/workflows/build-base-app.yml @@ -31,17 +31,17 @@ jobs: uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 + uses: docker/setup-buildx-action@v4 - name: Login to GitHub Container Registry - uses: docker/login-action@v3 + uses: docker/login-action@v4 with: registry: ghcr.io username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - name: Docker Build & Push - uses: docker/build-push-action@v6 + uses: docker/build-push-action@v7 env: DOCKER_BUILD_SUMMARY: false with: diff --git a/.github/workflows/build-devblobstore.yml b/.github/workflows/build-devblobstore.yml index 41b2e0d47a..14c4b1a135 100644 --- a/.github/workflows/build-devblobstore.yml +++ b/.github/workflows/build-devblobstore.yml @@ -23,17 +23,17 @@ jobs: - uses: actions/checkout@v6 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 + uses: docker/setup-buildx-action@v4 - name: Login to GitHub Container Registry - uses: docker/login-action@v3 + uses: docker/login-action@v4 with: registry: ghcr.io username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - name: Docker Build & Push - uses: docker/build-push-action@v6 + uses: docker/build-push-action@v7 env: DOCKER_BUILD_SUMMARY: false with: diff --git a/.github/workflows/build-mq-broker.yml b/.github/workflows/build-mq-broker.yml index 50472122c4..ef7ed2f65c 100644 --- a/.github/workflows/build-mq-broker.yml +++ b/.github/workflows/build-mq-broker.yml @@ -30,10 +30,10 @@ jobs: uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 + uses: docker/setup-buildx-action@v4 - name: Login to GitHub Container Registry - uses: docker/login-action@v3 + uses: docker/login-action@v4 with: registry: ghcr.io username: ${{ github.actor }} @@ -49,7 +49,7 @@ jobs: fi - name: Docker Build & Push - uses: docker/build-push-action@v6 + uses: docker/build-push-action@v7 env: DOCKER_BUILD_SUMMARY: false with: diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 74791747b6..8ec806b229 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -253,10 +253,10 @@ jobs: EOL - name: Setup Docker Buildx - uses: docker/setup-buildx-action@v3 + uses: docker/setup-buildx-action@v4 - name: Login to GitHub Container Registry - uses: docker/login-action@v3 + uses: docker/login-action@v4 with: registry: ghcr.io username: ${{ github.actor }} @@ -267,7 +267,7 @@ jobs: run: echo "FEATURE_LATEST_TAG=$(echo $GITHUB_REF_NAME | tr / -)" >> $GITHUB_ENV - name: Build Images - uses: docker/build-push-action@v6 + uses: docker/build-push-action@v7 env: DOCKER_BUILD_SUMMARY: false with: @@ -360,7 +360,7 @@ jobs: steps: - name: Notify on Slack (Success) if: ${{ !contains(join(needs.*.result, ','), 'failure') }} - uses: slackapi/slack-github-action@v2 + uses: slackapi/slack-github-action@v3 with: token: ${{ secrets.SLACK_GH_BOT }} method: chat.postMessage @@ -375,7 +375,7 @@ jobs: value: "Completed" - name: Notify on Slack (Failure) if: ${{ contains(join(needs.*.result, ','), 'failure') }} - uses: slackapi/slack-github-action@v2 + uses: slackapi/slack-github-action@v3 with: token: ${{ secrets.SLACK_GH_BOT }} method: chat.postMessage diff --git a/.github/workflows/dev-assets-sync-nightly.yml b/.github/workflows/dev-assets-sync-nightly.yml index 926d816b38..cd986f06f3 100644 --- a/.github/workflows/dev-assets-sync-nightly.yml +++ b/.github/workflows/dev-assets-sync-nightly.yml @@ -32,14 +32,14 @@ jobs: - uses: actions/checkout@v6 - name: Login to GitHub Container Registry - uses: docker/login-action@v3 + uses: docker/login-action@v4 with: registry: ghcr.io username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - name: Docker Build & Push - uses: docker/build-push-action@v6 + uses: docker/build-push-action@v7 env: DOCKER_BUILD_SUMMARY: false with: From e51469a5d437491071610156d56dcb73191ad61c Mon Sep 17 00:00:00 2001 From: Rudi Matz Date: Fri, 27 Mar 2026 13:44:35 -0400 Subject: [PATCH 055/102] feat: add email/name for ADs and WG Chairs --- ietf/api/serializers_rpc.py | 28 +++++++++++++++++++++++++++- ietf/group/serializers.py | 6 ++++++ 2 files changed, 33 insertions(+), 1 deletion(-) diff --git a/ietf/api/serializers_rpc.py b/ietf/api/serializers_rpc.py index 397ca05d9b..d888de4586 100644 --- a/ietf/api/serializers_rpc.py +++ b/ietf/api/serializers_rpc.py @@ -27,7 +27,7 @@ update_action_holders, update_rfcauthors, ) -from ietf.group.models import Group +from ietf.group.models import Group, Role from ietf.group.serializers import AreaSerializer from ietf.name.models import StreamName, StdLevelName from ietf.person.models import Person @@ -97,6 +97,21 @@ class Meta: fields = ["draft_name", "authors"] +class WgChairSerializer(serializers.Serializer): + """Serialize a WG chair's name and email from a Role""" + + name = serializers.SerializerMethodField() + email = serializers.SerializerMethodField() + + @extend_schema_field(serializers.CharField) + def get_name(self, role: Role) -> str: + return role.person.plain_name() + + @extend_schema_field(serializers.EmailField) + def get_email(self, role: Role) -> str: + return role.email.email_address() + + class DocumentAuthorSerializer(serializers.ModelSerializer): """Serializer for a Person in a response""" @@ -126,6 +141,7 @@ class FullDraftSerializer(serializers.ModelSerializer): source="shepherd.person", read_only=True ) consensus = serializers.SerializerMethodField() + wg_chairs = serializers.SerializerMethodField() class Meta: model = Document @@ -145,11 +161,21 @@ class Meta: "consensus", "shepherd", "ad", + "wg_chairs", ] def get_consensus(self, doc: Document) -> Optional[bool]: return default_consensus(doc) + @extend_schema_field(WgChairSerializer(many=True)) + def get_wg_chairs(self, doc: Document): + if doc.group is None: + return [] + chairs = doc.group.role_set.filter(name_id="chair").select_related( + "person", "email" + ) + return WgChairSerializer(chairs, many=True).data + def get_source_format( self, doc: Document ) -> Literal["unknown", "xml-v2", "xml-v3", "txt"]: diff --git a/ietf/group/serializers.py b/ietf/group/serializers.py index db3b37af48..e789ba46bf 100644 --- a/ietf/group/serializers.py +++ b/ietf/group/serializers.py @@ -20,8 +20,14 @@ class AreaDirectorSerializer(serializers.Serializer): Works with Email or Role """ + name = serializers.SerializerMethodField() email = serializers.SerializerMethodField() + @extend_schema_field(serializers.CharField) + def get_name(self, instance: Email | Role): + person = getattr(instance, 'person', None) + return person.plain_name() if person else None + @extend_schema_field(serializers.EmailField) def get_email(self, instance: Email | Role): if isinstance(instance, Role): From b1cc7edc7ff5e80f7eb0072657e88450a4b2c06b Mon Sep 17 00:00:00 2001 From: Rudi Matz Date: Fri, 27 Mar 2026 14:32:07 -0400 Subject: [PATCH 056/102] adapt test --- ietf/group/tests_serializers.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/ietf/group/tests_serializers.py b/ietf/group/tests_serializers.py index bf29e6c8fd..b584a17ae2 100644 --- a/ietf/group/tests_serializers.py +++ b/ietf/group/tests_serializers.py @@ -31,7 +31,7 @@ def test_serializes_role(self): serialized = AreaDirectorSerializer(role).data self.assertEqual( serialized, - {"email": role.email.email_address()}, + {"email": role.email.email_address(), "name": role.person.plain_name()}, ) def test_serializes_email(self): @@ -40,7 +40,10 @@ def test_serializes_email(self): serialized = AreaDirectorSerializer(email).data self.assertEqual( serialized, - {"email": email.email_address()}, + { + "email": email.email_address(), + "name": email.person.plain_name() if email.person else None, + }, ) @@ -63,7 +66,10 @@ def test_serializes_active_area(self): self.assertEqual(serialized["name"], area.name) self.assertCountEqual( serialized["ads"], - [{"email": ad.email.email_address()} for ad in ad_roles], + [ + {"email": ad.email.email_address(), "name": ad.person.plain_name()} + for ad in ad_roles + ], ) def test_serializes_inactive_area(self): From 5775077317640de8981cf27b0b8c54e42d8ae9a2 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Wed, 1 Apr 2026 18:53:13 -0300 Subject: [PATCH 057/102] fix: limit access to manual post cancellation (#10638) * fix: drop access_token from URL * test: update test case * test: remove unneeded test There is no longer a dedicated manual post cancel action * chore: update copyrights --- ietf/submit/tests.py | 30 +++++++++++++++----------- ietf/templates/submit/manual_post.html | 16 ++++---------- 2 files changed, 21 insertions(+), 25 deletions(-) diff --git a/ietf/submit/tests.py b/ietf/submit/tests.py index 400d0d8c7d..ad361d31b2 100644 --- a/ietf/submit/tests.py +++ b/ietf/submit/tests.py @@ -1,4 +1,4 @@ -# Copyright The IETF Trust 2011-2023, All Rights Reserved +# Copyright The IETF Trust 2011-2026, All Rights Reserved # -*- coding: utf-8 -*- @@ -207,20 +207,24 @@ def test_manualpost_view(self): r = self.client.get(url) self.assertEqual(r.status_code, 200) q = PyQuery(r.content) - self.assertIn( - urlreverse( - "ietf.submit.views.submission_status", - kwargs=dict(submission_id=submission.pk) - ), - q("#manual.submissions td a").attr("href") - ) - self.assertIn( - submission.name, - q("#manual.submissions td a").text() + # Validate that the basic submission status URL is on the manual post page + # _without_ an access token, even if logged in as various users. + expected_url = urlreverse( + "ietf.submit.views.submission_status", + kwargs=dict(submission_id=submission.pk) ) + selected_elts = q("#manual.submissions td a") + self.assertEqual(expected_url, selected_elts.attr("href")) + self.assertIn(submission.name, selected_elts.text()) + for username in ["plain", "secretary"]: + self.client.login(username=username, password=username + "+password") + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + q = PyQuery(r.content) + selected_elts = q("#manual.submissions td a") + self.assertEqual(expected_url, selected_elts.attr("href")) + self.assertIn(submission.name, selected_elts.text()) - def test_manualpost_cancel(self): - pass class SubmitTests(BaseSubmitTestCase): def setUp(self): diff --git a/ietf/templates/submit/manual_post.html b/ietf/templates/submit/manual_post.html index 6e4a2ba42a..0da83e750f 100644 --- a/ietf/templates/submit/manual_post.html +++ b/ietf/templates/submit/manual_post.html @@ -1,5 +1,5 @@ {% extends "submit/submit_base.html" %} -{# Copyright The IETF Trust 2015, All Rights Reserved #} +{# Copyright The IETF Trust 2015-2026, All Rights Reserved #} {% load origin static %} {% block pagehead %} @@ -27,17 +27,9 @@

Submissions needing manual posting

{% for s in manual %} - {% if user.is_authenticated %} - - - {{ s.name }}-{{ s.rev }} - - - {% else %} - - {{ s.name }}-{{ s.rev }} - - {% endif %} + + {{ s.name }}-{{ s.rev }} + {{ s.submission_date }} {% if s.passes_checks %} From 6058769a64778679d4b3b5ca5e6937ed5f2ec6c8 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Thu, 2 Apr 2026 15:57:49 -0300 Subject: [PATCH 058/102] ci: optional bucket suffix for storage cfg (#10637) * ci: optional bucket suffix for storage cfg * style: ruff ruff * fix: roll back bizarre editor glitch --- docker/scripts/app-configure-blobstore.py | 10 +++++++--- k8s/settings_local.py | 22 ++++++++++++++++------ 2 files changed, 23 insertions(+), 9 deletions(-) diff --git a/docker/scripts/app-configure-blobstore.py b/docker/scripts/app-configure-blobstore.py index 3140e39306..9ae64e0041 100755 --- a/docker/scripts/app-configure-blobstore.py +++ b/docker/scripts/app-configure-blobstore.py @@ -24,10 +24,13 @@ def init_blobstore(): ), ) for bucketname in ARTIFACT_STORAGE_NAMES: + adjusted_bucket_name = ( + os.environ.get("BLOB_STORE_BUCKET_PREFIX", "") + + bucketname + + os.environ.get("BLOB_STORE_BUCKET_SUFFIX", "") + ).strip() try: - blobstore.create_bucket( - Bucket=f"{os.environ.get('BLOB_STORE_BUCKET_PREFIX', '')}{bucketname}".strip() - ) + blobstore.create_bucket(Bucket=adjusted_bucket_name) except botocore.exceptions.ClientError as err: if err.response["Error"]["Code"] == "BucketAlreadyExists": print(f"Bucket {bucketname} already exists") @@ -36,5 +39,6 @@ def init_blobstore(): else: print(f"Bucket {bucketname} created") + if __name__ == "__main__": sys.exit(init_blobstore()) diff --git a/k8s/settings_local.py b/k8s/settings_local.py index 323b7fd45a..b45cbbe260 100644 --- a/k8s/settings_local.py +++ b/k8s/settings_local.py @@ -18,7 +18,7 @@ def _multiline_to_list(s): - """Helper to split at newlines and conver to list""" + """Helper to split at newlines and convert to list""" return [item.strip() for item in s.split("\n")] @@ -80,13 +80,19 @@ def _multiline_to_list(s): else: raise RuntimeError("DATATRACKER_API_PRIVATE_KEY_PEM_B64 must be set") -_RED_PRECOMPUTER_TRIGGER_RETRY_DELAY = os.environ.get("DATATRACKER_RED_PRECOMPUTER_TRIGGER_RETRY_DELAY", None) +_RED_PRECOMPUTER_TRIGGER_RETRY_DELAY = os.environ.get( + "DATATRACKER_RED_PRECOMPUTER_TRIGGER_RETRY_DELAY", None +) if _RED_PRECOMPUTER_TRIGGER_RETRY_DELAY is not None: - RED_PRECOMPUTER_TRIGGER_RETRY_DELAY = _RED_PRECOMPUTER_TRIGGER_RETRY_DELAY -_RED_PRECOMPUTER_TRIGGER_MAX_RETRIES = os.environ.get("DATATRACKER_RED_PRECOMPUTER_TRIGGER_MAX_RETRIES", None) + RED_PRECOMPUTER_TRIGGER_RETRY_DELAY = _RED_PRECOMPUTER_TRIGGER_RETRY_DELAY +_RED_PRECOMPUTER_TRIGGER_MAX_RETRIES = os.environ.get( + "DATATRACKER_RED_PRECOMPUTER_TRIGGER_MAX_RETRIES", None +) if _RED_PRECOMPUTER_TRIGGER_MAX_RETRIES is not None: RED_PRECOMPUTER_TRIGGER_MAX_RETRIES = _RED_PRECOMPUTER_TRIGGER_MAX_RETRIES -_TRIGGER_RED_PRECOMPUTE_MULTIPLE_URL = os.environ.get("DATATRACKER_TRIGGER_RED_PRECOMPUTE_MULTIPLE_URL", None) +_TRIGGER_RED_PRECOMPUTE_MULTIPLE_URL = os.environ.get( + "DATATRACKER_TRIGGER_RED_PRECOMPUTE_MULTIPLE_URL", None +) if _TRIGGER_RED_PRECOMPUTE_MULTIPLE_URL is not None: TRIGGER_RED_PRECOMPUTE_MULTIPLE_URL = _TRIGGER_RED_PRECOMPUTE_MULTIPLE_URL @@ -387,6 +393,7 @@ def _multiline_to_list(s): "and DATATRACKER_BLOB_STORE_SECRET_KEY must be set" ) _blob_store_bucket_prefix = os.environ.get("DATATRACKER_BLOB_STORE_BUCKET_PREFIX", "") +_blob_store_bucket_suffix = os.environ.get("DATATRACKER_BLOB_STORE_BUCKET_SUFFIX", "") _blob_store_enable_profiling = ( os.environ.get("DATATRACKER_BLOB_STORE_ENABLE_PROFILING", "false").lower() == "true" ) @@ -406,6 +413,9 @@ def _multiline_to_list(s): if storagename in ["staging"]: continue replica_storagename = f"r2-{storagename}" + adjusted_bucket_name = ( + _blob_store_bucket_prefix + storagename + _blob_store_bucket_suffix + ).strip() STORAGES[replica_storagename] = { "BACKEND": "ietf.doc.storage.MetadataS3Storage", "OPTIONS": dict( @@ -422,7 +432,7 @@ def _multiline_to_list(s): retries={"total_max_attempts": _blob_store_max_attempts}, ), verify=False, - bucket_name=f"{_blob_store_bucket_prefix}{storagename}".strip(), + bucket_name=adjusted_bucket_name, ietf_log_blob_timing=_blob_store_enable_profiling, ), } From a46a2efc05b2e7f5d1b50c76d543e1ca16ae8918 Mon Sep 17 00:00:00 2001 From: Kesara Rathnayake Date: Tue, 7 Apr 2026 09:25:24 +1200 Subject: [PATCH 059/102] feat: Generate bcp-index.txt (#10631) * feat: Generate bcp-index.txt * fix: Fix issue with author names * feat: Update bcp-index.txt header * refactor: Generalize some functions * fix: Sort RFCs * test: Add tests for bcp-index.txt * fix: Fix range bug * test: Add test for BCP entry * test: Fix test_create_bcp_txt_index --- ietf/sync/rfcindex.py | 98 +++++++++++++++++++++++++++++++ ietf/sync/tests_rfcindex.py | 69 ++++++++++++++++++++-- ietf/templates/sync/bcp-index.txt | 52 ++++++++++++++++ 3 files changed, 215 insertions(+), 4 deletions(-) create mode 100644 ietf/templates/sync/bcp-index.txt diff --git a/ietf/sync/rfcindex.py b/ietf/sync/rfcindex.py index 63c2044931..357cc4069a 100644 --- a/ietf/sync/rfcindex.py +++ b/ietf/sync/rfcindex.py @@ -24,6 +24,8 @@ from ietf.utils.log import log FORMATS_FOR_INDEX = ["txt", "html", "pdf", "xml", "ps"] +SS_TXT_MARGIN = 3 +SS_TXT_CUE_COL_WIDTH = 14 def format_rfc_number(n): @@ -267,6 +269,87 @@ def get_rfc_text_index_entries(): return entries +def subseries_text_line(line, first=False): + """Return subseries text entry line""" + indent = " " * SS_TXT_CUE_COL_WIDTH + if first: + initial_indent = " " * SS_TXT_MARGIN + else: + initial_indent = indent + return fill( + line, + initial_indent=initial_indent, + subsequent_indent=indent, + width=80, + break_on_hyphens=False, + ) + + +def get_bcp_text_index_entries(): + """Returns BCP entries for bcp-index.txt""" + entries = [] + + highest_bcp_number = ( + Document.objects.filter(type_id="bcp") + .annotate( + number=Cast( + Substr("name", 4, None), + output_field=models.IntegerField(), + ) + ) + .order_by("-number") + .first() + .number + ) + + for bcp_number in range(1, highest_bcp_number + 1): + bcp_name = f"BCP{bcp_number}" + bcp = Document.objects.filter(type_id="bcp", name=f"{bcp_name.lower()}").first() + + if bcp: + entry = subseries_text_line( + ( + f"[{bcp_name}]" + f"{' ' * (SS_TXT_CUE_COL_WIDTH - len(bcp_name) - 2 - SS_TXT_MARGIN)}" + f"Best Current Practice {bcp_number}," + ), + first=True, + ) + entry += "\n" + entry += subseries_text_line( + f"<{settings.RFC_EDITOR_INFO_BASE_URL}{bcp_name.lower()}>." + ) + entry += "\n" + entry += subseries_text_line( + "At the time of writing, this BCP comprises the following:" + ) + entry += "\n\n" + rfcs = sorted(bcp.contains(), key=lambda x: x.rfc_number) + for rfc in rfcs: + authors = ", ".join( + author.format_for_titlepage() for author in rfc.rfcauthor_set.all() + ) + entry += subseries_text_line( + ( + f'{authors}, "{rfc.title}", BCP¶{bcp_number}, RFC¶{rfc.rfc_number}, ' + f"DOI¶{rfc.doi}, {rfc.pub_date().strftime('%B %Y')}, " + f"<{settings.RFC_EDITOR_INFO_BASE_URL}rfc{rfc.rfc_number}>." + ) + ).replace("¶", " ") + entry += "\n\n" + else: + entry = subseries_text_line( + ( + f"[{bcp_name}]" + f"{' ' * (SS_TXT_CUE_COL_WIDTH - len(bcp_name) - 2 - SS_TXT_MARGIN)}" + f"Best Current Practice {bcp_number} currently contains no RFCs" + ), + first=True, + ) + entries.append(entry) + return entries + + def add_subseries_xml_index_entries(rfc_index, ss_type, include_all=False): """Add subseries entries for rfc-index.xml""" # subseries docs annotated with numeric number @@ -481,3 +564,18 @@ def create_rfc_xml_index(): pretty_print=4, ) save_to_red_bucket("rfc-index.xml", pretty_index) + + +def create_bcp_txt_index(): + """Create text index of BCPs""" + DATE_FMT = "%m/%d/%Y" + created_on = timezone.now().strftime(DATE_FMT) + log("Creating bcp-index.txt") + index = render_to_string( + "sync/bcp-index.txt", + { + "created_on": created_on, + "bcps": get_bcp_text_index_entries(), + }, + ) + save_to_red_bucket("bcp-index.txt", index) diff --git a/ietf/sync/tests_rfcindex.py b/ietf/sync/tests_rfcindex.py index e682c016f5..cad5b577d4 100644 --- a/ietf/sync/tests_rfcindex.py +++ b/ietf/sync/tests_rfcindex.py @@ -7,16 +7,22 @@ from django.test.utils import override_settings from lxml import etree -from ietf.doc.factories import PublishedRfcDocEventFactory, IndividualRfcFactory +from ietf.doc.factories import ( + BcpFactory, + IndividualRfcFactory, + PublishedRfcDocEventFactory, +) from ietf.name.models import DocTagName from ietf.sync.rfcindex import ( + create_bcp_txt_index, create_rfc_txt_index, create_rfc_xml_index, format_rfc_number, - save_to_red_bucket, - get_unusable_rfc_numbers, get_april1_rfc_numbers, get_publication_std_levels, + get_unusable_rfc_numbers, + save_to_red_bucket, + subseries_text_line, ) from ietf.utils.test_utils import TestCase @@ -69,6 +75,9 @@ def setUp(self): ).doc self.rfc.tags.add(DocTagName.objects.get(slug="errata")) + # Create a BCP with non-April Fools RFC + self.bcp = BcpFactory(contains=[self.rfc], name="bcp11") + # Set up a publication-std-levels.json file to indicate the publication # standard of self.rfc as different from its current value red_bucket.save( @@ -137,7 +146,7 @@ def test_create_rfc_xml_index(self, mock_save): children = list(index) # elements as list # Should be one rfc-not-issued-entry - self.assertEqual(len(children), 3) + self.assertEqual(len(children), 14) self.assertEqual( [ c.find(f"{ns}doc-id").text @@ -184,6 +193,53 @@ def test_create_rfc_xml_index(self, mock_save): [(f"{ns}month", "April"), (f"{ns}year", "2021")], ) + @override_settings(RFCINDEX_INPUT_PATH="input/") + @mock.patch("ietf.sync.rfcindex.save_to_red_bucket") + def test_create_bcp_txt_index(self, mock_save): + create_bcp_txt_index() + self.assertEqual(mock_save.call_count, 1) + self.assertEqual(mock_save.call_args[0][0], "bcp-index.txt") + contents = mock_save.call_args[0][1] + self.assertTrue(isinstance(contents, str)) + # starts from 1 + self.assertIn( + "[BCP1]", + contents, + ) + # fill up to 11 + self.assertIn( + "[BCP10]", + contents, + ) + # but not to 12 + self.assertNotIn( + "[BCP12]", + contents, + ) + # Test empty BCPs + self.assertIn( + "Best Current Practice 9 currently contains no RFCs", + contents, + ) + # No zero prefix! + self.assertNotIn( + "[BCP0001]", + contents, + ) + # Has BCP11 with a RFC + self.assertIn( + "Best Current Practice 11,", + contents, + ) + self.assertIn( + f'"{self.rfc.title}"', + contents, + ) + self.assertIn( + f'BCP 11, RFC {self.rfc.rfc_number},', + contents, + ) + class HelperTests(TestCase): def test_format_rfc_number(self): @@ -234,3 +290,8 @@ def test_get_publication_std_levels_raises(self): with self.assertRaises(json.JSONDecodeError): get_publication_std_levels() red_bucket.delete("publication-std-levels.json") + + def test_subseries_text_line(self): + text = "foobar" + self.assertEqual(subseries_text_line(line=text, first=True), f" {text}") + self.assertEqual(subseries_text_line(line=text), f" {text}") diff --git a/ietf/templates/sync/bcp-index.txt b/ietf/templates/sync/bcp-index.txt new file mode 100644 index 0000000000..dd19920eba --- /dev/null +++ b/ietf/templates/sync/bcp-index.txt @@ -0,0 +1,52 @@ + + +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + BCP INDEX + ------------- + +(CREATED ON: {{created_on}}.) + +This file contains citations for all BCPs in numeric order. The BCPs +form a sub-series of the RFC document series, specifically those RFCs +with the status BEST CURRENT PRACTICE. + +BCP citations appear in this format: + + [BCP#] Best Current Practice #, + . + At the time of writing, this BCP comprises the following: + + Author 1, Author 2, "Title of the RFC", BCP #, RFC №, + DOI DOI string, Issue date, + . + +For example: + + [BCP3] Best Current Practice 3, + . + At the time of writing, this BCP comprises the following: + + F. Kastenholz, "Variance for The PPP Compression Control Protocol + and The PPP Encryption Control Protocol", BCP 3, RFC 1915, + DOI 10.17487/RFC1915, February 1996, + . + +Key to fields: + +# is the BCP number. + +№ is the RFC number. + +BCPs and other RFCs may be obtained from https://www.rfc-editor.org. + +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + BCP INDEX + --------- + + + +{% for bcp in bcps %}{{bcp|safe}} + +{% endfor %} From 7c7219f0dcf326f369c7b4bd04337f95f0a7a9f4 Mon Sep 17 00:00:00 2001 From: Kesara Rathnayake Date: Wed, 8 Apr 2026 11:16:57 +1200 Subject: [PATCH 060/102] feat: Generate std-index.txt (#10665) * feat: Generate std-index.txt * style: Ruff ruff Good boy! * test: Fix flaky test * test: Add tests for std-index.txt --- ietf/sync/rfcindex.py | 80 +++++++++++++++++++++++++++++++ ietf/sync/tests_rfcindex.py | 64 ++++++++++++++++++++++++- ietf/templates/sync/std-index.txt | 51 ++++++++++++++++++++ 3 files changed, 193 insertions(+), 2 deletions(-) create mode 100644 ietf/templates/sync/std-index.txt diff --git a/ietf/sync/rfcindex.py b/ietf/sync/rfcindex.py index 357cc4069a..6a6a4bfa9f 100644 --- a/ietf/sync/rfcindex.py +++ b/ietf/sync/rfcindex.py @@ -350,6 +350,71 @@ def get_bcp_text_index_entries(): return entries +def get_std_text_index_entries(): + """Returns STD entries for std-index.txt""" + entries = [] + + highest_std_number = ( + Document.objects.filter(type_id="std") + .annotate( + number=Cast( + Substr("name", 4, None), + output_field=models.IntegerField(), + ) + ) + .order_by("-number") + .first() + .number + ) + + for std_number in range(1, highest_std_number + 1): + std_name = f"STD{std_number}" + std = Document.objects.filter(type_id="std", name=f"{std_name.lower()}").first() + + if std and std.contains(): + entry = subseries_text_line( + ( + f"[{std_name}]" + f"{' ' * (SS_TXT_CUE_COL_WIDTH - len(std_name) - 2 - SS_TXT_MARGIN)}" + f"Internet Standard {std_number}," + ), + first=True, + ) + entry += "\n" + entry += subseries_text_line( + f"<{settings.RFC_EDITOR_INFO_BASE_URL}{std_name.lower()}>." + ) + entry += "\n" + entry += subseries_text_line( + "At the time of writing, this STD comprises the following:" + ) + entry += "\n\n" + rfcs = sorted(std.contains(), key=lambda x: x.rfc_number) + for rfc in rfcs: + authors = ", ".join( + author.format_for_titlepage() for author in rfc.rfcauthor_set.all() + ) + entry += subseries_text_line( + ( + f'{authors}, "{rfc.title}", STD¶{std_number}, RFC¶{rfc.rfc_number}, ' + f"DOI¶{rfc.doi}, {rfc.pub_date().strftime('%B %Y')}, " + f"<{settings.RFC_EDITOR_INFO_BASE_URL}rfc{rfc.rfc_number}>." + ) + ).replace("¶", " ") + entry += "\n\n" + else: + entry = subseries_text_line( + ( + f"[{std_name}]" + f"{' ' * (SS_TXT_CUE_COL_WIDTH - len(std_name) - 2 - SS_TXT_MARGIN)}" + f"Internet Standard {std_number} currently contains no RFCs" + ), + first=True, + ) + entries.append(entry) + return entries + + def add_subseries_xml_index_entries(rfc_index, ss_type, include_all=False): """Add subseries entries for rfc-index.xml""" # subseries docs annotated with numeric number @@ -579,3 +644,18 @@ def create_bcp_txt_index(): }, ) save_to_red_bucket("bcp-index.txt", index) + + +def create_std_txt_index(): + """Create text index of STDs""" + DATE_FMT = "%m/%d/%Y" + created_on = timezone.now().strftime(DATE_FMT) + log("Creating std-index.txt") + index = render_to_string( + "sync/std-index.txt", + { + "created_on": created_on, + "stds": get_std_text_index_entries(), + }, + ) + save_to_red_bucket("std-index.txt", index) diff --git a/ietf/sync/tests_rfcindex.py b/ietf/sync/tests_rfcindex.py index cad5b577d4..70bc41b992 100644 --- a/ietf/sync/tests_rfcindex.py +++ b/ietf/sync/tests_rfcindex.py @@ -9,6 +9,7 @@ from ietf.doc.factories import ( BcpFactory, + StdFactory, IndividualRfcFactory, PublishedRfcDocEventFactory, ) @@ -17,6 +18,7 @@ create_bcp_txt_index, create_rfc_txt_index, create_rfc_xml_index, + create_std_txt_index, format_rfc_number, get_april1_rfc_numbers, get_publication_std_levels, @@ -78,6 +80,9 @@ def setUp(self): # Create a BCP with non-April Fools RFC self.bcp = BcpFactory(contains=[self.rfc], name="bcp11") + # Create a STD with non-April Fools RFC + self.std = StdFactory(contains=[self.rfc], name="std11") + # Set up a publication-std-levels.json file to indicate the publication # standard of self.rfc as different from its current value red_bucket.save( @@ -146,7 +151,7 @@ def test_create_rfc_xml_index(self, mock_save): children = list(index) # elements as list # Should be one rfc-not-issued-entry - self.assertEqual(len(children), 14) + self.assertEqual(len(children), 15) self.assertEqual( [ c.find(f"{ns}doc-id").text @@ -236,7 +241,62 @@ def test_create_bcp_txt_index(self, mock_save): contents, ) self.assertIn( - f'BCP 11, RFC {self.rfc.rfc_number},', + "BCP 11,", + contents, + ) + self.assertIn( + f"RFC {self.rfc.rfc_number},", + contents, + ) + + @override_settings(RFCINDEX_INPUT_PATH="input/") + @mock.patch("ietf.sync.rfcindex.save_to_red_bucket") + def test_create_std_txt_index(self, mock_save): + create_std_txt_index() + self.assertEqual(mock_save.call_count, 1) + self.assertEqual(mock_save.call_args[0][0], "std-index.txt") + contents = mock_save.call_args[0][1] + self.assertTrue(isinstance(contents, str)) + # starts from 1 + self.assertIn( + "[STD1]", + contents, + ) + # fill up to 11 + self.assertIn( + "[STD10]", + contents, + ) + # but not to 12 + self.assertNotIn( + "[STD12]", + contents, + ) + # Test empty STDs + self.assertIn( + "Internet Standard 9 currently contains no RFCs", + contents, + ) + # No zero prefix! + self.assertNotIn( + "[STD0001]", + contents, + ) + # Has STD11 with a RFC + self.assertIn( + "Internet Standard 11,", + contents, + ) + self.assertIn( + f'"{self.rfc.title}"', + contents, + ) + self.assertIn( + "STD 11,", + contents, + ) + self.assertIn( + f"RFC {self.rfc.rfc_number},", contents, ) diff --git a/ietf/templates/sync/std-index.txt b/ietf/templates/sync/std-index.txt new file mode 100644 index 0000000000..c075d1d43e --- /dev/null +++ b/ietf/templates/sync/std-index.txt @@ -0,0 +1,51 @@ + + +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + STD INDEX + ------------- + +(CREATED ON: {{created_on}}.) + +This file contains citations for all STDs in numeric order. Each +STD represents a single Internet Standard technical specification, +composed of one or more RFCs with Internet Standard status. + +STD citations appear in this format: + + [STD#] Best Current Practice #, + . + At the time of writing, this STD comprises the following: + + Author 1, Author 2, "Title of the RFC", STD #, RFC №, + DOI DOI string, Issue date, + . + +For example: + + [STD6] Internet Standard 6, + . + At the time of writing, this STD comprises the following: + + J. Postel, "User Datagram Protocol", STD 6, RFC 768, + DOI 10.17487/RFC0768, August 1980, + . + +Key to fields: + +# is the STD number. + +№ is the RFC number. + +STDs and other RFCs may be obtained from https://www.rfc-editor.org. + +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + STD INDEX + --------- + + + +{% for std in stds %}{{std|safe}} + +{% endfor %} From e72ead86dee707b5cbd9aeea96437dbaee78c88d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 8 Apr 2026 12:35:06 -0300 Subject: [PATCH 061/102] chore(deps): bump appleboy/ssh-action from 1.2.2 to 1.2.5 (#10623) Bumps [appleboy/ssh-action](https://github.com/appleboy/ssh-action) from 1.2.2 to 1.2.5. - [Release notes](https://github.com/appleboy/ssh-action/releases) - [Commits](https://github.com/appleboy/ssh-action/compare/2ead5e36573f08b82fbfce1504f1a4b05a647c6f...0ff4204d59e8e51228ff73bce53f80d53301dee2) --- updated-dependencies: - dependency-name: appleboy/ssh-action dependency-version: 1.2.5 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/tests-az.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/tests-az.yml b/.github/workflows/tests-az.yml index 8553563a19..833ca89bef 100644 --- a/.github/workflows/tests-az.yml +++ b/.github/workflows/tests-az.yml @@ -38,7 +38,7 @@ jobs: ssh-keyscan -t rsa $vminfo >> ~/.ssh/known_hosts - name: Remote SSH into VM - uses: appleboy/ssh-action@2ead5e36573f08b82fbfce1504f1a4b05a647c6f + uses: appleboy/ssh-action@0ff4204d59e8e51228ff73bce53f80d53301dee2 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: From c7657c3f22f5f7a906fd2cf01aaed7b54feca9e3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 8 Apr 2026 12:37:35 -0300 Subject: [PATCH 062/102] chore(deps): bump stefanzweifel/git-auto-commit-action from 6 to 7 (#10624) Bumps [stefanzweifel/git-auto-commit-action](https://github.com/stefanzweifel/git-auto-commit-action) from 6 to 7. - [Release notes](https://github.com/stefanzweifel/git-auto-commit-action/releases) - [Changelog](https://github.com/stefanzweifel/git-auto-commit-action/blob/master/CHANGELOG.md) - [Commits](https://github.com/stefanzweifel/git-auto-commit-action/compare/v6...v7) --- updated-dependencies: - dependency-name: stefanzweifel/git-auto-commit-action dependency-version: '7' dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build-base-app.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build-base-app.yml b/.github/workflows/build-base-app.yml index 1b0855cc47..5e274838a1 100644 --- a/.github/workflows/build-base-app.yml +++ b/.github/workflows/build-base-app.yml @@ -60,7 +60,7 @@ jobs: echo "${{ env.IMGVERSION }}" > dev/build/TARGET_BASE - name: Commit CHANGELOG.md - uses: stefanzweifel/git-auto-commit-action@v6 + uses: stefanzweifel/git-auto-commit-action@v7 with: branch: ${{ github.ref_name }} commit_message: 'ci: update base image target version to ${{ env.IMGVERSION }}' From f39e916a73eaab6c0172a09e98c28ba628b7bcc4 Mon Sep 17 00:00:00 2001 From: Eric Rescorla Date: Wed, 8 Apr 2026 08:50:19 -0700 Subject: [PATCH 063/102] fix: Rewrite upper right document search box (#10538) * Rewrite upper right document search box. Fixes #10358 This is a fix to the problem where the first item in the dropdown is auto-selected and then when you hit return you go to that rather than searching for what's in the text field. It appears to be challenging to get this behavior with select2, so this is actually a rewrite of the box with explicit behavior. As a side effect, the draft names actually render a bit better. Co-Authored-By: Claude Opus 4.6 * Respond to review comments --------- Co-authored-by: EKR aibot Co-authored-by: Claude Opus 4.6 --- ietf/static/css/ietf.scss | 17 +++++ ietf/static/js/navbar-doc-search.js | 113 ++++++++++++++++++++++++++++ ietf/templates/base.html | 24 +++--- package.json | 1 + 4 files changed, 143 insertions(+), 12 deletions(-) create mode 100644 ietf/static/js/navbar-doc-search.js diff --git a/ietf/static/css/ietf.scss b/ietf/static/css/ietf.scss index df973863d5..6695c57b13 100644 --- a/ietf/static/css/ietf.scss +++ b/ietf/static/css/ietf.scss @@ -1216,3 +1216,20 @@ iframe.status { .overflow-shadows--bottom-only { box-shadow: inset 0px -21px 18px -20px var(--bs-body-color); } + +#navbar-doc-search-wrapper { + position: relative; +} + +#navbar-doc-search-results { + max-height: 400px; + overflow-y: auto; + min-width: auto; + left: 0; + right: 0; + + .dropdown-item { + white-space: normal; + overflow-wrap: break-word; + } +} diff --git a/ietf/static/js/navbar-doc-search.js b/ietf/static/js/navbar-doc-search.js new file mode 100644 index 0000000000..c36c032310 --- /dev/null +++ b/ietf/static/js/navbar-doc-search.js @@ -0,0 +1,113 @@ +$(function () { + var $input = $('#navbar-doc-search'); + var $results = $('#navbar-doc-search-results'); + var ajaxUrl = $input.data('ajax-url'); + var debounceTimer = null; + var highlightedIndex = -1; + var keyboardHighlight = false; + var currentItems = []; + + function showDropdown() { + $results.addClass('show'); + } + + function hideDropdown() { + $results.removeClass('show'); + highlightedIndex = -1; + keyboardHighlight = false; + updateHighlight(); + } + + function updateHighlight() { + $results.find('.dropdown-item').removeClass('active'); + if (highlightedIndex >= 0 && highlightedIndex < currentItems.length) { + $results.find('.dropdown-item').eq(highlightedIndex).addClass('active'); + } + } + + function doSearch(query) { + if (query.length < 2) { + hideDropdown(); + return; + } + $.ajax({ + url: ajaxUrl, + dataType: 'json', + data: { q: query }, + success: function (data) { + currentItems = data; + highlightedIndex = -1; + $results.empty(); + if (data.length === 0) { + $results.append('
  • No results found
  • '); + } else { + data.forEach(function (item) { + var $li = $('
  • '); + var $a = $('' + item.text + ''); + $li.append($a); + $results.append($li); + }); + } + showDropdown(); + } + }); + } + + $input.on('input', function () { + clearTimeout(debounceTimer); + var query = $(this).val().trim(); + debounceTimer = setTimeout(function () { + doSearch(query); + }, 250); + }); + + $input.on('keydown', function (e) { + if (e.key === 'ArrowDown') { + e.preventDefault(); + if (highlightedIndex < currentItems.length - 1) { + highlightedIndex++; + keyboardHighlight = true; + updateHighlight(); + } + } else if (e.key === 'ArrowUp') { + e.preventDefault(); + if (highlightedIndex > 0) { + highlightedIndex--; + keyboardHighlight = true; + updateHighlight(); + } + } else if (e.key === 'Enter') { + e.preventDefault(); + if (keyboardHighlight && highlightedIndex >= 0 && highlightedIndex < currentItems.length) { + window.location.href = currentItems[highlightedIndex].url; + } else { + var query = $(this).val().trim(); + if (query) { + window.location.href = '/doc/search/?name=' + encodeURIComponent(query) + '&rfcs=on&activedrafts=on&olddrafts=on'; + } + } + } else if (e.key === 'Escape') { + hideDropdown(); + $input.blur(); + } + }); + + // Hover highlights (visual only — Enter still submits the text) + $results.on('mouseenter', '.dropdown-item', function () { + highlightedIndex = $results.find('.dropdown-item').index(this); + keyboardHighlight = false; + updateHighlight(); + }); + + $results.on('mouseleave', '.dropdown-item', function () { + highlightedIndex = -1; + updateHighlight(); + }); + + // Click outside closes dropdown + $(document).on('click', function (e) { + if (!$(e.target).closest('#navbar-doc-search-wrapper').length) { + hideDropdown(); + } + }); +}); diff --git a/ietf/templates/base.html b/ietf/templates/base.html index 25ce50c467..b0df04f30a 100644 --- a/ietf/templates/base.html +++ b/ietf/templates/base.html @@ -67,13 +67,17 @@ {% endif %} - +