From 1fbedd7df17c2b123c64cf6cd17898ef2663bedb Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Wed, 5 Feb 2025 16:31:35 -0400 Subject: [PATCH 001/405] chore: typing fixes for factory-boy 3.3.3 (#8501) * chore: typing fixes for factory-boy 3.3.3 * chore: more comments --- ietf/doc/factories.py | 13 ++++++++----- ietf/ipr/tests.py | 2 +- ietf/meeting/tests_views.py | 3 ++- 3 files changed, 11 insertions(+), 7 deletions(-) diff --git a/ietf/doc/factories.py b/ietf/doc/factories.py index 50fba50c42..19aa9ecc9c 100644 --- a/ietf/doc/factories.py +++ b/ietf/doc/factories.py @@ -7,7 +7,7 @@ import factory.fuzzy import datetime -from typing import Optional # pyflakes:ignore +from typing import Any # pyflakes:ignore from django.conf import settings from django.utils import timezone @@ -37,13 +37,16 @@ class Meta: model = Document skip_postgeneration_save = True + # n.b., a few attributes are typed as Any so mypy won't complain when we override in subclasses title = factory.Faker('sentence',nb_words=5) - abstract = factory.Faker('paragraph', nb_sentences=5) + abstract: Any = factory.Faker('paragraph', nb_sentences=5) rev = '00' - std_level_id = None # type: Optional[str] + std_level_id: Any = None intended_std_level_id = None time = timezone.now() - expires = factory.LazyAttribute(lambda o: o.time+datetime.timedelta(days=settings.INTERNET_DRAFT_DAYS_TO_EXPIRE)) + expires: Any = factory.LazyAttribute( + lambda o: o.time+datetime.timedelta(days=settings.INTERNET_DRAFT_DAYS_TO_EXPIRE) + ) pages = factory.fuzzy.FuzzyInteger(2,400) @@ -282,7 +285,7 @@ class Meta: type = 'added_comment' by = factory.SubFactory('ietf.person.factories.PersonFactory') - doc = factory.SubFactory(DocumentFactory) + doc: Any = factory.SubFactory(DocumentFactory) # `Any` to appease mypy when a subclass overrides doc desc = factory.Faker('sentence',nb_words=6) @factory.lazy_attribute diff --git a/ietf/ipr/tests.py b/ietf/ipr/tests.py index beb4d7fc1c..e0d00b5d1a 100644 --- a/ietf/ipr/tests.py +++ b/ietf/ipr/tests.py @@ -726,7 +726,7 @@ def test_notify_generic(self): self.assertIn(f'{settings.IDTRACKER_BASE_URL}{urlreverse("ietf.ipr.views.showlist")}', get_payload_text(outbox[1]).replace('\n',' ')) def send_ipr_email_helper(self) -> tuple[str, IprEvent, HolderIprDisclosure]: - ipr = HolderIprDisclosureFactory() + ipr = HolderIprDisclosureFactory.create() # call create() explicitly so mypy sees correct type url = urlreverse('ietf.ipr.views.email',kwargs={ "id": ipr.id }) self.client.login(username="secretary", password="secretary+password") yesterday = date_today() - datetime.timedelta(1) diff --git a/ietf/meeting/tests_views.py b/ietf/meeting/tests_views.py index 3e948f8386..94f06dc89c 100644 --- a/ietf/meeting/tests_views.py +++ b/ietf/meeting/tests_views.py @@ -2124,7 +2124,8 @@ def create_timeslots_url(meeting): @staticmethod def create_bare_meeting(number=120) -> Meeting: """Create a basic IETF meeting""" - return MeetingFactory( + # Call create() explicitly so mypy sees the correct type + return MeetingFactory.create( type_id='ietf', number=number, date=date_today() + datetime.timedelta(days=10), From 060320d766803e479d860832a2ae9ebeb5a9d4d3 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Wed, 5 Feb 2025 18:29:32 -0400 Subject: [PATCH 002/405] feat: keep proceedings cache up to date via celery (#8449) * refactor: better control proceedings caching * refactor: move methods from views to utils * chore: revert accidental settings change * fix: eliminate circular import get_schedule() with name=None should perhaps be an anti-pattern * feat: task to recompute proceedings daily * chore: proceedings cache lifetime = 1 day * fix: ensure finalization is immediately reflected * chore: update beat comments in docker-compose * style: undo a couple whitespace changes * test: update / refactor tests * test: test task * refactor: disallow positional arg to task * refactor: add trivial test of old task --- docker-compose.yml | 1 + ietf/meeting/tasks.py | 32 ++ ietf/meeting/tests_tasks.py | 51 +++ ietf/meeting/tests_views.py | 91 ++++- ietf/meeting/utils.py | 171 +++++++- ietf/meeting/views.py | 157 +------- ietf/templates/meeting/proceedings.html | 380 ++++++++---------- .../meeting/proceedings_wrapper.html | 27 ++ 8 files changed, 542 insertions(+), 368 deletions(-) create mode 100644 ietf/meeting/tests_tasks.py create mode 100644 ietf/templates/meeting/proceedings_wrapper.html diff --git a/docker-compose.yml b/docker-compose.yml index 65b28f54fe..9988b10c2e 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -101,6 +101,7 @@ services: # stop_grace_period: 1m # volumes: # - .:/workspace +# - app-assets:/assets volumes: postgresdb-data: diff --git a/ietf/meeting/tasks.py b/ietf/meeting/tasks.py index 43cbb0a75f..2b7c2fca91 100644 --- a/ietf/meeting/tasks.py +++ b/ietf/meeting/tasks.py @@ -3,10 +3,42 @@ # Celery task definitions # from celery import shared_task +from django.utils import timezone +from ietf.utils import log +from .models import Meeting +from .utils import generate_proceedings_content from .views import generate_agenda_data @shared_task def agenda_data_refresh(): generate_agenda_data(force_refresh=True) + + +@shared_task +def proceedings_content_refresh_task(*, all=False): + """Refresh meeting proceedings cache + + If `all` is `False`, then refreshes the cache for meetings whose numbers modulo + 24 equal the current hour number (0-23). Scheduling the task once per hour will + then result in all proceedings being recomputed daily, with no more than two per + hour (now) or a few per hour in the next decade. That keeps the computation time + to under a couple minutes on our current production system. + + If `all` is True, refreshes all meetings + """ + now = timezone.now() + + for meeting in Meeting.objects.filter(type_id="ietf").order_by("number"): + if meeting.proceedings_format_version == 1: + continue # skip v1 proceedings, they're stored externally + num = meeting.get_number() # convert str -> int + if num is None: + log.log( + f"Not refreshing proceedings for meeting {meeting.number}: " + f"type is 'ietf' but get_number() returned None" + ) + elif all or (num % 24 == now.hour): + log.log(f"Refreshing proceedings for meeting {meeting.number}...") + generate_proceedings_content(meeting, force_refresh=True) diff --git a/ietf/meeting/tests_tasks.py b/ietf/meeting/tests_tasks.py new file mode 100644 index 0000000000..c026a99835 --- /dev/null +++ b/ietf/meeting/tests_tasks.py @@ -0,0 +1,51 @@ +# Copyright The IETF Trust 2025, All Rights Reserved + +import datetime +from mock import patch, call +from ietf.utils.test_utils import TestCase +from .factories import MeetingFactory +from .tasks import proceedings_content_refresh_task, agenda_data_refresh + + +class TaskTests(TestCase): + @patch("ietf.meeting.tasks.generate_agenda_data") + def test_agenda_data_refresh(self, mock_generate): + agenda_data_refresh() + self.assertTrue(mock_generate.called) + self.assertEqual(mock_generate.call_args, call(force_refresh=True)) + + @patch("ietf.meeting.tasks.generate_proceedings_content") + def test_proceedings_content_refresh_task(self, mock_generate): + # Generate a couple of meetings + meeting120 = MeetingFactory(type_id="ietf", number="120") # 24 * 5 + meeting127 = MeetingFactory(type_id="ietf", number="127") # 24 * 5 + 7 + + # Times to be returned + now_utc = datetime.datetime.now(tz=datetime.timezone.utc) + hour_00_utc = now_utc.replace(hour=0) + hour_01_utc = now_utc.replace(hour=1) + hour_07_utc = now_utc.replace(hour=7) + + # hour 00 - should call meeting with number % 24 == 0 + with patch("ietf.meeting.tasks.timezone.now", return_value=hour_00_utc): + proceedings_content_refresh_task() + self.assertEqual(mock_generate.call_count, 1) + self.assertEqual(mock_generate.call_args, call(meeting120, force_refresh=True)) + mock_generate.reset_mock() + + # hour 01 - should call no meetings + with patch("ietf.meeting.tasks.timezone.now", return_value=hour_01_utc): + proceedings_content_refresh_task() + self.assertEqual(mock_generate.call_count, 0) + + # hour 07 - should call meeting with number % 24 == 0 + with patch("ietf.meeting.tasks.timezone.now", return_value=hour_07_utc): + proceedings_content_refresh_task() + self.assertEqual(mock_generate.call_count, 1) + self.assertEqual(mock_generate.call_args, call(meeting127, force_refresh=True)) + mock_generate.reset_mock() + + # With all=True, all should be called regardless of time. Reuse hour_01_utc which called none before + with patch("ietf.meeting.tasks.timezone.now", return_value=hour_01_utc): + proceedings_content_refresh_task(all=True) + self.assertEqual(mock_generate.call_count, 2) diff --git a/ietf/meeting/tests_views.py b/ietf/meeting/tests_views.py index 94f06dc89c..581725dbc8 100644 --- a/ietf/meeting/tests_views.py +++ b/ietf/meeting/tests_views.py @@ -32,6 +32,7 @@ from django.http import QueryDict, FileResponse from django.template import Context, Template from django.utils import timezone +from django.utils.safestring import mark_safe from django.utils.text import slugify import debug # pyflakes:ignore @@ -46,7 +47,7 @@ from ietf.meeting.helpers import send_interim_minutes_reminder, populate_important_dates, update_important_dates from ietf.meeting.models import Session, TimeSlot, Meeting, SchedTimeSessAssignment, Schedule, SessionPresentation, SlideSubmission, SchedulingEvent, Room, Constraint, ConstraintName from ietf.meeting.test_data import make_meeting_test_data, make_interim_meeting, make_interim_test_data -from ietf.meeting.utils import condition_slide_order +from ietf.meeting.utils import condition_slide_order, generate_proceedings_content from ietf.meeting.utils import add_event_info_to_session_qs, participants_for_meeting from ietf.meeting.utils import create_recording, delete_recording, get_next_sequence, bluesheet_data from ietf.meeting.views import session_draft_list, parse_agenda_filter_params, sessions_post_save, agenda_extract_schedule @@ -8296,8 +8297,7 @@ def _proceedings_file(): path = Path(settings.BASE_DIR) / 'meeting/test_procmat.pdf' return path.open('rb') - def _assertMeetingHostsDisplayed(self, response, meeting): - pq = PyQuery(response.content) + def _assertMeetingHostsDisplayed(self, pq: PyQuery, meeting): host_divs = pq('div.host-logo') self.assertEqual(len(host_divs), meeting.meetinghosts.count(), 'Should have a logo for every meeting host') self.assertEqual( @@ -8313,12 +8313,11 @@ def _assertMeetingHostsDisplayed(self, response, meeting): 'Correct image and name for each host should appear in the correct order' ) - def _assertProceedingsMaterialsDisplayed(self, response, meeting): + def _assertProceedingsMaterialsDisplayed(self, pq: PyQuery, meeting): """Checks that all (and only) active materials are linked with correct href and title""" expected_materials = [ m for m in meeting.proceedings_materials.order_by('type__order') if m.active() ] - pq = PyQuery(response.content) links = pq('div.proceedings-material a') self.assertEqual(len(links), len(expected_materials), 'Should have an entry for each active ProceedingsMaterial') self.assertEqual( @@ -8327,9 +8326,8 @@ def _assertProceedingsMaterialsDisplayed(self, response, meeting): 'Correct title and link for each ProceedingsMaterial should appear in the correct order' ) - def _assertGroupSessions(self, response, meeting): + def _assertGroupSessions(self, pq: PyQuery): """Checks that group/sessions are present""" - pq = PyQuery(response.content) sections = ["plenaries", "gen", "iab", "editorial", "irtf", "training"] for section in sections: self.assertEqual(len(pq(f"#{section}")), 1, f"{section} section should exists in proceedings") @@ -8337,10 +8335,9 @@ def _assertGroupSessions(self, response, meeting): def test_proceedings(self): """Proceedings should be displayed correctly - Currently only tests that the view responds with a 200 response code and checks the ProceedingsMaterials - at the top of the proceedings. Ought to actually test the display of the individual group/session - materials as well. + Proceedings contents are tested in detail when testing generate_proceedings_content. """ + # number must be >97 (settings.PROCEEDINGS_VERSION_CHANGES) meeting = make_meeting_test_data(meeting=MeetingFactory(type_id='ietf', number='100')) session = Session.objects.filter(meeting=meeting, group__acronym="mars").first() GroupEventFactory(group=session.group,type='status_update') @@ -8365,16 +8362,72 @@ def test_proceedings(self): self._create_proceedings_materials(meeting) url = urlreverse("ietf.meeting.views.proceedings", kwargs=dict(num=meeting.number)) - r = self.client.get(url) + cached_content = mark_safe("

Fake proceedings content

") + with patch("ietf.meeting.views.generate_proceedings_content") as mock_gpc: + mock_gpc.return_value = cached_content + r = self.client.get(url) self.assertEqual(r.status_code, 200) + self.assertIn(cached_content, r.content.decode()) + self.assertTemplateUsed(r, "meeting/proceedings_wrapper.html") + self.assertTemplateNotUsed(r, "meeting/proceedings.html") + # These are rendered in proceedings_wrapper.html, so test them here if len(meeting.city) > 0: self.assertContains(r, meeting.city) if len(meeting.venue_name) > 0: self.assertContains(r, meeting.venue_name) + self._assertMeetingHostsDisplayed(PyQuery(r.content), meeting) + + @patch("ietf.meeting.utils.caches") + def test_generate_proceedings_content(self, mock_caches): + # number must be >97 (settings.PROCEEDINGS_VERSION_CHANGES) + meeting = make_meeting_test_data(meeting=MeetingFactory(type_id='ietf', number='100')) + + # First, check that by default a value in the cache is used without doing any other computation + mock_default_cache = mock_caches["default"] + mock_default_cache.get.return_value = "a cached value" + result = generate_proceedings_content(meeting) + self.assertEqual(result, "a cached value") + self.assertFalse(mock_default_cache.set.called) + self.assertTrue(mock_default_cache.get.called) + cache_key = mock_default_cache.get.call_args.args[0] + mock_default_cache.get.reset_mock() + + # Now set up for actual computation of the proceedings content. + session = Session.objects.filter(meeting=meeting, group__acronym="mars").first() + GroupEventFactory(group=session.group,type='status_update') + SessionPresentationFactory(document__type_id='recording',session=session) + SessionPresentationFactory(document__type_id='recording',session=session,document__title="Audio recording for tests") + + # Add various group sessions + groups = [] + parent_groups = [ + GroupFactory.create(type_id="area", acronym="gen"), + GroupFactory.create(acronym="iab"), + GroupFactory.create(acronym="irtf"), + ] + for parent in parent_groups: + groups.append(GroupFactory.create(parent=parent)) + for acronym in ["rsab", "edu"]: + groups.append(GroupFactory.create(acronym=acronym)) + for group in groups: + SessionFactory(meeting=meeting, group=group) + + self.write_materials_files(meeting, session) + self._create_proceedings_materials(meeting) + + # Now "empty" the mock cache and see that we compute the expected proceedings content. + mock_default_cache.get.return_value = None + proceedings_content = generate_proceedings_content(meeting) + self.assertTrue(mock_default_cache.get.called) + self.assertEqual(mock_default_cache.get.call_args.args[0], cache_key, "same cache key each time") + self.assertTrue(mock_default_cache.set.called) + self.assertEqual(mock_default_cache.set.call_args, call(cache_key, proceedings_content, timeout=86400)) + mock_default_cache.get.reset_mock() + mock_default_cache.set.reset_mock() # standard items on every proceedings - pq = PyQuery(r.content) + pq = PyQuery(proceedings_content) self.assertNotEqual( pq('a[href="{}"]'.format( urlreverse('ietf.meeting.views.proceedings_overview', kwargs=dict(num=meeting.number))) @@ -8405,9 +8458,17 @@ def test_proceedings(self): ) # configurable contents - self._assertMeetingHostsDisplayed(r, meeting) - self._assertProceedingsMaterialsDisplayed(r, meeting) - self._assertGroupSessions(r, meeting) + self._assertProceedingsMaterialsDisplayed(pq, meeting) + self._assertGroupSessions(pq) + + # Finally, repeat the first cache test, but now with force_refresh=True. The cached value + # should be ignored and we should recompute the proceedings as before. + mock_default_cache.get.return_value = "a cached value" + result = generate_proceedings_content(meeting, force_refresh=True) + self.assertEqual(result, proceedings_content) # should have recomputed the same thing + self.assertFalse(mock_default_cache.get.called, "don't bother reading cache when force_refresh is True") + self.assertTrue(mock_default_cache.set.called) + self.assertEqual(mock_default_cache.set.call_args, call(cache_key, proceedings_content, timeout=86400)) def test_named_session(self): """Session with a name should appear separately in the proceedings""" diff --git a/ietf/meeting/utils.py b/ietf/meeting/utils.py index 6e681fdc31..92bae5ac23 100644 --- a/ietf/meeting/utils.py +++ b/ietf/meeting/utils.py @@ -3,6 +3,8 @@ import datetime import itertools import os +from hashlib import sha384 + import pytz import subprocess @@ -11,8 +13,9 @@ from django.conf import settings from django.contrib import messages +from django.core.cache import caches from django.core.files.base import ContentFile -from django.db.models import OuterRef, Subquery, TextField, Q, Value +from django.db.models import OuterRef, Subquery, TextField, Q, Value, Max from django.db.models.functions import Coalesce from django.template.loader import render_to_string from django.utils import timezone @@ -995,3 +998,169 @@ def participants_for_meeting(meeting): sessions = meeting.session_set.filter(Q(type='plenary') | Q(group__type__in=['wg', 'rg'])) attended = Attended.objects.filter(session__in=sessions).values_list('person', flat=True).distinct() return (checked_in, attended) + + +def generate_proceedings_content(meeting, force_refresh=False): + """Render proceedings content for a meeting and update cache + + :meeting: meeting whose proceedings should be rendered + :force_refresh: true to force regeneration and cache refresh + """ + cache = caches["default"] + cache_version = Document.objects.filter(session__meeting__number=meeting.number).aggregate(Max('time'))["time__max"] + # Include proceedings_final in the bare_key so we'll always reflect that accurately, even at the cost of + # a recomputation in the view + bare_key = f"proceedings.{meeting.number}.{cache_version}.final={meeting.proceedings_final}" + cache_key = sha384(bare_key.encode("utf8")).hexdigest() + if not force_refresh: + cached_content = cache.get(cache_key, None) + if cached_content is not None: + return cached_content + + def area_and_group_acronyms_from_session(s): + area = s.group_parent_at_the_time() + if area == None: + area = s.group.parent + group = s.group_at_the_time() + return (area.acronym, group.acronym) + + schedule = meeting.schedule + sessions = ( + meeting.session_set.with_current_status() + .filter(Q(timeslotassignments__schedule__in=[schedule, schedule.base if schedule else None]) + | Q(current_status='notmeet')) + .select_related() + .order_by('-current_status') + ) + + plenaries, _ = organize_proceedings_sessions( + sessions.filter(name__icontains='plenary') + .exclude(current_status='notmeet') + ) + irtf_meeting, irtf_not_meeting = organize_proceedings_sessions( + sessions.filter(group__parent__acronym = 'irtf').order_by('group__acronym') + ) + # per Colin (datatracker #5010) - don't report not meeting rags + irtf_not_meeting = [item for item in irtf_not_meeting if item["group"].type_id != "rag"] + irtf = {"meeting_groups":irtf_meeting, "not_meeting_groups":irtf_not_meeting} + + training, _ = organize_proceedings_sessions( + sessions.filter(group__acronym__in=['edu','iaoc'], type_id__in=['regular', 'other',]) + .exclude(current_status='notmeet') + ) + iab, _ = organize_proceedings_sessions( + sessions.filter(group__parent__acronym = 'iab') + .exclude(current_status='notmeet') + ) + editorial, _ = organize_proceedings_sessions( + sessions.filter(group__acronym__in=['rsab','rswg']) + .exclude(current_status='notmeet') + ) + + ietf = sessions.filter(group__parent__type__slug = 'area').exclude(group__acronym__in=['edu','iepg','tools']) + ietf = list(ietf) + ietf.sort(key=lambda s: area_and_group_acronyms_from_session(s)) + ietf_areas = [] + for area, area_sessions in itertools.groupby(ietf, key=lambda s: s.group_parent_at_the_time()): + meeting_groups, not_meeting_groups = organize_proceedings_sessions(area_sessions) + ietf_areas.append((area, meeting_groups, not_meeting_groups)) + + with timezone.override(meeting.tz()): + rendered_content = render_to_string( + "meeting/proceedings.html", + { + 'meeting': meeting, + 'plenaries': plenaries, + 'training': training, + 'irtf': irtf, + 'iab': iab, + 'editorial': editorial, + 'ietf_areas': ietf_areas, + 'meetinghost_logo': { + 'max_height': settings.MEETINGHOST_LOGO_MAX_DISPLAY_HEIGHT, + 'max_width': settings.MEETINGHOST_LOGO_MAX_DISPLAY_WIDTH, + } + }, + ) + cache.set( + cache_key, + rendered_content, + timeout=86400, # one day, in seconds + ) + return rendered_content + + +def organize_proceedings_sessions(sessions): + # Collect sessions by Group, then bin by session name (including sessions with blank names). + # If all of a group's sessions are 'notmeet', the processed data goes in not_meeting_sessions. + # Otherwise, the data goes in meeting_sessions. + meeting_groups = [] + not_meeting_groups = [] + for group_acronym, group_sessions in itertools.groupby(sessions, key=lambda s: s.group.acronym): + by_name = {} + is_meeting = False + all_canceled = True + group = None + for s in sorted( + group_sessions, + key=lambda gs: ( + gs.official_timeslotassignment().timeslot.time + if gs.official_timeslotassignment() else datetime.datetime(datetime.MAXYEAR, 1, 1) + ), + ): + group = s.group + if s.current_status != 'notmeet': + is_meeting = True + if s.current_status != 'canceled': + all_canceled = False + by_name.setdefault(s.name, []) + if s.current_status != 'notmeet' or s.presentations.exists(): + by_name[s.name].append(s) # for notmeet, only include sessions with materials + for sess_name, ss in by_name.items(): + session = ss[0] if ss else None + def _format_materials(items): + """Format session/material for template + + Input is a list of (session, materials) pairs. The materials value can be a single value or a list. + """ + material_times = {} # key is material, value is first timestamp it appeared + for s, mats in items: + tsa = s.official_timeslotassignment() + timestamp = tsa.timeslot.time if tsa else None + if not isinstance(mats, list): + mats = [mats] + for mat in mats: + if mat and mat not in material_times: + material_times[mat] = timestamp + n_mats = len(material_times) + result = [] + if n_mats == 1: + result.append({'material': list(material_times)[0]}) # no 'time' when only a single material + elif n_mats > 1: + for mat, timestamp in material_times.items(): + result.append({'material': mat, 'time': timestamp}) + return result + + entry = { + 'group': group, + 'name': sess_name, + 'session': session, + 'canceled': all_canceled, + 'has_materials': s.presentations.exists(), + 'agendas': _format_materials((s, s.agenda()) for s in ss), + 'minutes': _format_materials((s, s.minutes()) for s in ss), + 'bluesheets': _format_materials((s, s.bluesheets()) for s in ss), + 'recordings': _format_materials((s, s.recordings()) for s in ss), + 'meetecho_recordings': _format_materials((s, [s.session_recording_url()]) for s in ss), + 'chatlogs': _format_materials((s, s.chatlogs()) for s in ss), + 'slides': _format_materials((s, s.slides()) for s in ss), + 'drafts': _format_materials((s, s.drafts()) for s in ss), + 'last_update': session.last_update if hasattr(session, 'last_update') else None + } + if session and session.meeting.type_id == 'ietf' and not session.meeting.proceedings_final: + entry['attendances'] = _format_materials((s, s) for s in ss if Attended.objects.filter(session=s).exists()) + if is_meeting: + meeting_groups.append(entry) + else: + not_meeting_groups.append(entry) + return meeting_groups, not_meeting_groups diff --git a/ietf/meeting/views.py b/ietf/meeting/views.py index 2f2464028b..1226e30d60 100644 --- a/ietf/meeting/views.py +++ b/ietf/meeting/views.py @@ -75,7 +75,13 @@ from ietf.meeting.helpers import send_interim_approval from ietf.meeting.helpers import send_interim_approval_request from ietf.meeting.helpers import send_interim_announcement_request, sessions_post_cancel -from ietf.meeting.utils import finalize, sort_accept_tuple, condition_slide_order +from ietf.meeting.utils import ( + condition_slide_order, + finalize, + generate_proceedings_content, + organize_proceedings_sessions, + sort_accept_tuple, +) from ietf.meeting.utils import add_event_info_to_session_qs from ietf.meeting.utils import session_time_for_sorting from ietf.meeting.utils import session_requested_by, SaveMaterialsError @@ -4128,93 +4134,10 @@ def upcoming_json(request): response = HttpResponse(json.dumps(data, indent=2, sort_keys=False), content_type='application/json;charset=%s'%settings.DEFAULT_CHARSET) return response -def organize_proceedings_sessions(sessions): - # Collect sessions by Group, then bin by session name (including sessions with blank names). - # If all of a group's sessions are 'notmeet', the processed data goes in not_meeting_sessions. - # Otherwise, the data goes in meeting_sessions. - meeting_groups = [] - not_meeting_groups = [] - for group_acronym, group_sessions in itertools.groupby(sessions, key=lambda s: s.group.acronym): - by_name = {} - is_meeting = False - all_canceled = True - group = None - for s in sorted( - group_sessions, - key=lambda gs: ( - gs.official_timeslotassignment().timeslot.time - if gs.official_timeslotassignment() else datetime.datetime(datetime.MAXYEAR, 1, 1) - ), - ): - group = s.group - if s.current_status != 'notmeet': - is_meeting = True - if s.current_status != 'canceled': - all_canceled = False - by_name.setdefault(s.name, []) - if s.current_status != 'notmeet' or s.presentations.exists(): - by_name[s.name].append(s) # for notmeet, only include sessions with materials - for sess_name, ss in by_name.items(): - session = ss[0] if ss else None - def _format_materials(items): - """Format session/material for template - - Input is a list of (session, materials) pairs. The materials value can be a single value or a list. - """ - material_times = {} # key is material, value is first timestamp it appeared - for s, mats in items: - tsa = s.official_timeslotassignment() - timestamp = tsa.timeslot.time if tsa else None - if not isinstance(mats, list): - mats = [mats] - for mat in mats: - if mat and mat not in material_times: - material_times[mat] = timestamp - n_mats = len(material_times) - result = [] - if n_mats == 1: - result.append({'material': list(material_times)[0]}) # no 'time' when only a single material - elif n_mats > 1: - for mat, timestamp in material_times.items(): - result.append({'material': mat, 'time': timestamp}) - return result - - entry = { - 'group': group, - 'name': sess_name, - 'session': session, - 'canceled': all_canceled, - 'has_materials': s.presentations.exists(), - 'agendas': _format_materials((s, s.agenda()) for s in ss), - 'minutes': _format_materials((s, s.minutes()) for s in ss), - 'bluesheets': _format_materials((s, s.bluesheets()) for s in ss), - 'recordings': _format_materials((s, s.recordings()) for s in ss), - 'meetecho_recordings': _format_materials((s, [s.session_recording_url()]) for s in ss), - 'chatlogs': _format_materials((s, s.chatlogs()) for s in ss), - 'slides': _format_materials((s, s.slides()) for s in ss), - 'drafts': _format_materials((s, s.drafts()) for s in ss), - 'last_update': session.last_update if hasattr(session, 'last_update') else None - } - if session and session.meeting.type_id == 'ietf' and not session.meeting.proceedings_final: - entry['attendances'] = _format_materials((s, s) for s in ss if Attended.objects.filter(session=s).exists()) - if is_meeting: - meeting_groups.append(entry) - else: - not_meeting_groups.append(entry) - return meeting_groups, not_meeting_groups - def proceedings(request, num=None): - - def area_and_group_acronyms_from_session(s): - area = s.group_parent_at_the_time() - if area == None: - area = s.group.parent - group = s.group_at_the_time() - return (area.acronym, group.acronym) - meeting = get_meeting(num) - + # Early proceedings were hosted on www.ietf.org rather than the datatracker if meeting.proceedings_format_version == 1: return HttpResponseRedirect(settings.PROCEEDINGS_V1_BASE_URL.format(meeting=meeting)) @@ -4225,72 +4148,12 @@ def area_and_group_acronyms_from_session(s): kwargs['num'] = num return redirect('ietf.meeting.views.materials', **kwargs) - begin_date = meeting.get_submission_start_date() - cut_off_date = meeting.get_submission_cut_off_date() - cor_cut_off_date = meeting.get_submission_correction_date() - today_utc = date_today(datetime.timezone.utc) - - schedule = get_schedule(meeting, None) - sessions = ( - meeting.session_set.with_current_status() - .filter(Q(timeslotassignments__schedule__in=[schedule, schedule.base if schedule else None]) - | Q(current_status='notmeet')) - .select_related() - .order_by('-current_status') - ) - - plenaries, _ = organize_proceedings_sessions( - sessions.filter(name__icontains='plenary') - .exclude(current_status='notmeet') - ) - irtf_meeting, irtf_not_meeting = organize_proceedings_sessions( - sessions.filter(group__parent__acronym = 'irtf').order_by('group__acronym') - ) - # per Colin (datatracker #5010) - don't report not meeting rags - irtf_not_meeting = [item for item in irtf_not_meeting if item["group"].type_id != "rag"] - irtf = {"meeting_groups":irtf_meeting, "not_meeting_groups":irtf_not_meeting} - - training, _ = organize_proceedings_sessions( - sessions.filter(group__acronym__in=['edu','iaoc'], type_id__in=['regular', 'other',]) - .exclude(current_status='notmeet') - ) - iab, _ = organize_proceedings_sessions( - sessions.filter(group__parent__acronym = 'iab') - .exclude(current_status='notmeet') - ) - editorial, _ = organize_proceedings_sessions( - sessions.filter(group__acronym__in=['rsab','rswg']) - .exclude(current_status='notmeet') - ) - - ietf = sessions.filter(group__parent__type__slug = 'area').exclude(group__acronym__in=['edu','iepg','tools']) - ietf = list(ietf) - ietf.sort(key=lambda s: area_and_group_acronyms_from_session(s)) - ietf_areas = [] - for area, area_sessions in itertools.groupby(ietf, key=lambda s: s.group_parent_at_the_time()): - meeting_groups, not_meeting_groups = organize_proceedings_sessions(area_sessions) - ietf_areas.append((area, meeting_groups, not_meeting_groups)) - - cache_version = Document.objects.filter(session__meeting__number=meeting.number).aggregate(Max('time'))["time__max"] with timezone.override(meeting.tz()): - return render(request, "meeting/proceedings.html", { + return render(request, "meeting/proceedings_wrapper.html", { 'meeting': meeting, - 'plenaries': plenaries, - 'training': training, - 'irtf': irtf, - 'iab': iab, - 'editorial': editorial, - 'ietf_areas': ietf_areas, - 'cut_off_date': cut_off_date, - 'cor_cut_off_date': cor_cut_off_date, - 'submission_started': today_utc > begin_date, - 'cache_version': cache_version, 'attendance': meeting.get_attendance(), - 'meetinghost_logo': { - 'max_height': settings.MEETINGHOST_LOGO_MAX_DISPLAY_HEIGHT, - 'max_width': settings.MEETINGHOST_LOGO_MAX_DISPLAY_WIDTH, - } + 'proceedings_content': generate_proceedings_content(meeting), }) @role_required('Secretariat') diff --git a/ietf/templates/meeting/proceedings.html b/ietf/templates/meeting/proceedings.html index b5d4a6198a..0aa8197fe9 100644 --- a/ietf/templates/meeting/proceedings.html +++ b/ietf/templates/meeting/proceedings.html @@ -1,184 +1,160 @@ -{% extends "base.html" %} -{# Copyright The IETF Trust 2015, All Rights Reserved #} -{% load origin %} -{% load ietf_filters static %} -{% block pagehead %} - -{% endblock %} -{% block title %} - IETF {{ meeting.number }} - {% if not meeting.proceedings_final %}Draft{% endif %} - Proceedings -{% endblock %} -{% block content %} - {% origin %} - {% include 'meeting/proceedings/title.html' with meeting=meeting attendance=attendance only %} - {% if user|has_role:"Secretariat" and not meeting.proceedings_final %} - - Finalize proceedings - - {% endif %} - {# cache for 15 minutes, as long as there's no proceedings activity. takes 4-8 seconds to generate. #} - {% load cache %} - {% cache 900 ietf_meeting_proceedings meeting.number cache_version %} - {% include 'meeting/proceedings/introduction.html' with meeting=meeting only %} - - {% if plenaries %} -

Plenaries

- - - - - - - - - - - - {% for entry in plenaries %} - {% include "meeting/group_proceedings.html" with entry=entry meeting=meeting show_agenda=True only %} - {% endfor %} - -
GroupArtifactsRecordingsSlidesInternet-Drafts
- {% endif %} - - {% for area, meeting_groups, not_meeting_groups in ietf_areas %} -

- {{ area.acronym|upper }} {{ area.name }} -

- {% if meeting_groups %} - - - - - - - - - - - - {% for entry in meeting_groups %} - {% include "meeting/group_proceedings.html" with entry=entry meeting=meeting show_agenda=True only %} - {% endfor %} - -
GroupArtifactsRecordingsSlidesInternet-Drafts
- {% endif %} - {% if not_meeting_groups %} -

- {{ area.name }} groups not meeting: - {% for entry in not_meeting_groups %} - {% if entry.name == "" %}{# do not show named sessions in this list #} - - {{ entry.group.acronym }} - {% if not forloop.last %},{% endif %} - {% endif %} - {% endfor %} -

- - - - - - - - - - - - {% for entry in not_meeting_groups %}{% if entry.has_materials %} - {% include "meeting/group_proceedings.html" with entry=entry meeting=meeting show_agenda=True only %} - {% endif %}{% endfor %} - -
- {% endif %} - {% endfor %} - - {% if training %} -

Training

+{% include 'meeting/proceedings/introduction.html' with meeting=meeting only %} + +{% if plenaries %} +

Plenaries

+ + + + + + + + + + + + {% for entry in plenaries %} + {% include "meeting/group_proceedings.html" with entry=entry meeting=meeting show_agenda=True only %} + {% endfor %} + +
GroupArtifactsRecordingsSlidesInternet-Drafts
+{% endif %} + +{% for area, meeting_groups, not_meeting_groups in ietf_areas %} +

+ {{ area.acronym|upper }} {{ area.name }} +

+ {% if meeting_groups %} - - - - - - - + + + + + + + - {% for entry in training %} - {% include "meeting/group_proceedings.html" with entry=entry meeting=meeting show_agenda=False only %} - {% endfor %} + {% for entry in meeting_groups %} + {% include "meeting/group_proceedings.html" with entry=entry meeting=meeting show_agenda=True only %} + {% endfor %}
GroupArtifactsRecordingsSlidesInternet-Drafts
GroupArtifactsRecordingsSlidesInternet-Drafts
{% endif %} - - {% if iab %} -

- IAB Internet Architecture Board -

- + {% if not_meeting_groups %} +

+ {{ area.name }} groups not meeting: + {% for entry in not_meeting_groups %} + {% if entry.name == "" %}{# do not show named sessions in this list #} + + {{ entry.group.acronym }} + {% if not forloop.last %},{% endif %} + {% endif %} + {% endfor %} +

+
- - - - - - - + + + + + + + - {% for entry in iab %} - {% include "meeting/group_proceedings.html" with entry=entry meeting=meeting show_agenda=True only %} - {% endfor %} + {% for entry in not_meeting_groups %}{% if entry.has_materials %} + {% include "meeting/group_proceedings.html" with entry=entry meeting=meeting show_agenda=True only %} + {% endif %}{% endfor %}
- Group - - Artifacts - - Recordings - - Slides - - Internet-Drafts -
{% endif %} - - {% if irtf.meeting_groups %} -

- IRTF Internet Research Task Force -

- - - - - - - - - - - - {% for entry in irtf.meeting_groups %} - {% include "meeting/group_proceedings.html" with entry=entry meeting=meeting show_agenda=True only %} - {% endfor %} - -
- Group - - Artifacts - - Recordings - - Slides - - Internet-Drafts -
- {% if irtf.not_meeting_groups %} +{% endfor %} + +{% if training %} +

Training

+ + + + + + + + + + + + {% for entry in training %} + {% include "meeting/group_proceedings.html" with entry=entry meeting=meeting show_agenda=False only %} + {% endfor %} + +
GroupArtifactsRecordingsSlidesInternet-Drafts
+{% endif %} + +{% if iab %} +

+ IAB Internet Architecture Board +

+ + + + + + + + + + + + {% for entry in iab %} + {% include "meeting/group_proceedings.html" with entry=entry meeting=meeting show_agenda=True only %} + {% endfor %} + +
+ Group + + Artifacts + + Recordings + + Slides + + Internet-Drafts +
+{% endif %} + +{% if irtf.meeting_groups %} +

+ IRTF Internet Research Task Force +

+ + + + + + + + + + + + {% for entry in irtf.meeting_groups %} + {% include "meeting/group_proceedings.html" with entry=entry meeting=meeting show_agenda=True only %} + {% endfor %} + +
+ Group + + Artifacts + + Recordings + + Slides + + Internet-Drafts +
+ {% if irtf.not_meeting_groups %}

IRTF groups not meeting: {% for entry in irtf.not_meeting_groups %} @@ -191,18 +167,18 @@

- - - - - - - + + + + + + + - {% for entry in irtf.not_meeting %}{% if entry.has_materials %} - {% include "meeting/group_proceedings.html" with entry=entry meeting=meeting show_agenda=True only %} - {% endif %}{% endfor %} + {% for entry in irtf.not_meeting %}{% if entry.has_materials %} + {% include "meeting/group_proceedings.html" with entry=entry meeting=meeting show_agenda=True only %} + {% endif %}{% endfor %}
{% endif %} @@ -211,35 +187,29 @@

Editorial Stream

- - - - - - - + + + + + + + - {% for entry in editorial %} - {% include "meeting/group_proceedings.html" with entry=entry meeting=meeting show_agenda=True only %} - {% endfor %} + {% for entry in editorial %} + {% include "meeting/group_proceedings.html" with entry=entry meeting=meeting show_agenda=True only %} + {% endfor %}
- Group - - Artifacts - - Recordings - - Slides - - Internet-Drafts -
+ Group + + Artifacts + + Recordings + + Slides + + Internet-Drafts +
{% endif %} - {% endif %} -{% endcache %} -{% endblock %} -{% block js %} - -{% endblock %} +{% endif %} diff --git a/ietf/templates/meeting/proceedings_wrapper.html b/ietf/templates/meeting/proceedings_wrapper.html new file mode 100644 index 0000000000..a20291a693 --- /dev/null +++ b/ietf/templates/meeting/proceedings_wrapper.html @@ -0,0 +1,27 @@ +{% extends "base.html" %} +{# Copyright The IETF Trust 2015, All Rights Reserved #} +{% load origin %} +{% load ietf_filters static %} +{% block pagehead %} + +{% endblock %} +{% block title %} + IETF {{ meeting.number }} + {% if not meeting.proceedings_final %}Draft{% endif %} + Proceedings +{% endblock %} +{% block content %} + {% origin %} + {% include 'meeting/proceedings/title.html' with meeting=meeting attendance=attendance only %} + {% if user|has_role:"Secretariat" and not meeting.proceedings_final %} + + Finalize proceedings + + {% endif %} + {{ proceedings_content }} +{% endblock %} +{% block js %} + +{% endblock %} From ed19b54d8a39fca72a292181bdb2c12e9b63a5b6 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Thu, 6 Feb 2025 14:35:07 -0400 Subject: [PATCH 003/405] fix: deal with "GMT" time zone in iCalendar files (#8506) * fix: omit vtimezone when None * fix: fix ics time format for tz=GMT --- ietf/doc/templatetags/ietf_filters.py | 5 ++++- ietf/templates/meeting/agenda.ics | 10 +++++----- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/ietf/doc/templatetags/ietf_filters.py b/ietf/doc/templatetags/ietf_filters.py index e1a80a26b2..86507eeaaa 100644 --- a/ietf/doc/templatetags/ietf_filters.py +++ b/ietf/doc/templatetags/ietf_filters.py @@ -533,11 +533,14 @@ def ics_date_time(dt, tzname): >>> ics_date_time(datetime.datetime(2022,1,2,3,4,5), 'UTC') ':20220102T030405Z' + >>> ics_date_time(datetime.datetime(2022,1,2,3,4,5), 'GmT') + ':20220102T030405Z' + >>> ics_date_time(datetime.datetime(2022,1,2,3,4,5), 'America/Los_Angeles') ';TZID=America/Los_Angeles:20220102T030405' """ timestamp = dt.strftime('%Y%m%dT%H%M%S') - if tzname.lower() == 'utc': + if tzname.lower() in ('gmt', 'utc'): return f':{timestamp}Z' else: return f';TZID={ics_esc(tzname)}:{timestamp}' diff --git a/ietf/templates/meeting/agenda.ics b/ietf/templates/meeting/agenda.ics index 72b7ab806c..8bc8222bbe 100644 --- a/ietf/templates/meeting/agenda.ics +++ b/ietf/templates/meeting/agenda.ics @@ -1,15 +1,15 @@ -{% load humanize tz %}{% autoescape off %}{% timezone schedule.meeting.tz %}{% load ietf_filters textfilters %}{% load cache %}{% cache 1800 ietf_meeting_agenda_ics schedule.meeting.number request.path request.GET %}BEGIN:VCALENDAR +{% load humanize tz %}{% autoescape off %}{% timezone schedule.meeting.tz %}{% with tzname=schedule.meeting.time_zone|lower %}{% load ietf_filters textfilters %}{% load cache %}{% cache 1800 ietf_meeting_agenda_ics schedule.meeting.number request.path request.GET %}BEGIN:VCALENDAR VERSION:2.0 METHOD:PUBLISH PRODID:-//IETF//datatracker.ietf.org ical agenda//EN -{{schedule.meeting.vtimezone}}{% for item in assignments %}BEGIN:VEVENT +{% if tzname != "utc" and tzname != "gmt" %}{% firstof schedule.meeting.vtimezone "" %}{% endif %}{% for item in assignments %}BEGIN:VEVENT UID:ietf-{{schedule.meeting.number}}-{{item.timeslot.pk}}-{{item.session.group.acronym}} SUMMARY:{% if item.session.name %}{{item.session.name|ics_esc}}{% else %}{{item.session.group_at_the_time.acronym|lower}} - {{item.session.group_at_the_time.name}}{%endif%}{% if item.session.agenda_note %} ({{item.session.agenda_note}}){% endif %} {% if item.timeslot.show_location %}LOCATION:{{item.timeslot.get_location}} {% endif %}STATUS:{{item.session.ical_status}} CLASS:PUBLIC -DTSTART{% ics_date_time item.timeslot.local_start_time schedule.meeting.time_zone %} -DTEND{% ics_date_time item.timeslot.local_end_time schedule.meeting.time_zone %} +DTSTART{% ics_date_time item.timeslot.local_start_time tzname %} +DTEND{% ics_date_time item.timeslot.local_end_time tzname %} DTSTAMP{% ics_date_time item.timeslot.modified|utc 'utc' %}{% if item.session.agenda %} URL:{{item.session.agenda.get_versionless_href}}{% endif %} DESCRIPTION:{{item.timeslot.name|ics_esc}}\n{% if item.session.agenda_note %} @@ -29,4 +29,4 @@ DESCRIPTION:{{item.timeslot.name|ics_esc}}\n{% if item.session.agenda_note %} \n{# link agenda for ietf meetings #} See in schedule: {% absurl 'agenda' num=schedule.meeting.number %}#row-{{ item.slug }}\n{% endif %} END:VEVENT -{% endfor %}END:VCALENDAR{% endcache %}{% endtimezone %}{% endautoescape %} +{% endfor %}END:VCALENDAR{% endcache %}{% endwith %}{% endtimezone %}{% endautoescape %} From d1ec7378da3ecb66d3e492219db75fda69681df4 Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Fri, 14 Feb 2025 18:14:03 -0500 Subject: [PATCH 004/405] fix: use groupAcronym to determine hackathon icon in agenda (#8540) * fix: use groupAcronym to determine hackathon icon in agenda * test: fix agenda tests related to hackathon --- client/agenda/AgendaScheduleList.vue | 4 ++-- playwright/helpers/meeting.js | 2 +- playwright/tests/meeting/agenda.spec.js | 6 +++--- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/client/agenda/AgendaScheduleList.vue b/client/agenda/AgendaScheduleList.vue index e7c14797ee..905677b4da 100644 --- a/client/agenda/AgendaScheduleList.vue +++ b/client/agenda/AgendaScheduleList.vue @@ -307,7 +307,7 @@ const meetingEvents = computed(() => { }) } // -> Point to Wiki for Hackathon sessions, HedgeDocs otherwise - if (item.name.toLowerCase().includes('hackathon')) { + if (item.groupAcronym === 'hackathon') { links.push({ id: `lnk-${item.id}-wiki`, label: 'Wiki', @@ -461,7 +461,7 @@ const meetingEvents = computed(() => { case 'other': if (item.name.toLowerCase().indexOf('office hours') >= 0) { icon = 'bi-building' - } else if (item.name.toLowerCase().indexOf('hackathon') >= 0) { + } else if (item.groupAcronym === 'hackathon') { icon = 'bi-command bi-pink' } break diff --git a/playwright/helpers/meeting.js b/playwright/helpers/meeting.js index 52bc331fd6..9722ffc68b 100644 --- a/playwright/helpers/meeting.js +++ b/playwright/helpers/meeting.js @@ -395,7 +395,7 @@ module.exports = { name: 'Hackathon Kickoff', startDateTime: day1.set({ hour: 10, minute: 30 }), duration: '30m', - ...findAreaGroup('hackathon-kickoff', categories[2]), + ...findAreaGroup('hackathon', categories[2]), showAgenda: true, hasAgenda: true, hasRecordings: true, diff --git a/playwright/tests/meeting/agenda.spec.js b/playwright/tests/meeting/agenda.spec.js index d31dbd5c2c..e6c6e17031 100644 --- a/playwright/tests/meeting/agenda.spec.js +++ b/playwright/tests/meeting/agenda.spec.js @@ -238,7 +238,7 @@ test.describe('past - desktop', () => { // Name column // ----------- // Event icon - if (['break', 'plenary'].includes(event.type) || (event.type === 'other' && ['office hours', 'hackathon'].some(s => event.name.toLowerCase().indexOf(s) >= 0))) { + if (['break', 'plenary'].includes(event.type) || (event.type === 'other' && event.name.toLowerCase().indexOf('office hours') >= 0)) { await expect(row.locator('.agenda-table-cell-name > i.bi')).toBeVisible() } // Name link @@ -286,7 +286,7 @@ test.describe('past - desktop', () => { // No meeting materials yet warning badge await expect(eventButtons.locator('.no-meeting-materials')).toBeVisible() } - if (event.name.toLowerCase().includes('hackathon')) { + if (event.groupAcronym === 'hackathon') { // Hackathon Wiki button const hackathonWikiLink = `https://wiki.ietf.org/meeting/${meetingData.meeting.number}/hackathon` await expect(eventButtons.locator(`#btn-lnk-${event.id}-wiki`)).toHaveAttribute('href', hackathonWikiLink) @@ -1169,7 +1169,7 @@ test.describe('future - desktop', () => { // No meeting materials yet warning badge await expect(eventButtons.locator('.no-meeting-materials')).toBeVisible() } - if (event.name.toLowerCase().includes('hackathon')) { + if (event.groupAcronym === 'hackathon') { // Hackathon Wiki button const hackathonWikiLink = `https://wiki.ietf.org/meeting/${meetingData.meeting.number}/hackathon` await expect(eventButtons.locator(`#btn-lnk-${event.id}-wiki`)).toHaveAttribute('href', hackathonWikiLink) From 853de88546b25d1a59477e31580014351c9f0913 Mon Sep 17 00:00:00 2001 From: Matthew Holloway Date: Sat, 15 Feb 2025 12:30:24 +1300 Subject: [PATCH 005/405] feat: CSS other-session-selected. Fixes #8525 (#8529) --- ietf/static/css/ietf.scss | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ietf/static/css/ietf.scss b/ietf/static/css/ietf.scss index f1f2b94a19..aee93dcfe1 100644 --- a/ietf/static/css/ietf.scss +++ b/ietf/static/css/ietf.scss @@ -695,8 +695,8 @@ td.position-empty { } .edit-meeting-schedule .session.other-session-selected { - outline: var(--bs-info) solid 0.2em; - /* width matches margin on .session */ + outline: 0.3em solid var(--bs-info); + box-shadow: 0 0 1em var(--bs-info); z-index: 2; /* render above timeslot outlines */ } From 04952d58fa05f8b47884f621d9c1c510df0eb9a6 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Tue, 18 Feb 2025 12:33:33 -0400 Subject: [PATCH 006/405] fix: remove all staging files for submission (#8535) --- ietf/submit/utils.py | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/ietf/submit/utils.py b/ietf/submit/utils.py index 49f28c4196..fe52f1c774 100644 --- a/ietf/submit/utils.py +++ b/ietf/submit/utils.py @@ -671,20 +671,16 @@ def move_files_to_repository(submission): raise ValueError("Intended to move '%s' to '%s', but found source and destination missing.") -def remove_staging_files(name, rev, exts=None): - """Remove staging files corresponding to a submission - - exts is a list of extensions to be removed. If None, defaults to settings.IDSUBMIT_FILE_TYPES. - """ - if exts is None: - exts = [f'.{ext}' for ext in settings.IDSUBMIT_FILE_TYPES] +def remove_staging_files(name, rev): + """Remove staging files corresponding to a submission""" basename = pathlib.Path(settings.IDSUBMIT_STAGING_PATH) / f'{name}-{rev}' + exts = [f'.{ext}' for ext in settings.IDSUBMIT_FILE_TYPES] for ext in exts: basename.with_suffix(ext).unlink(missing_ok=True) def remove_submission_files(submission): - remove_staging_files(submission.name, submission.rev, submission.file_types.split(',')) + remove_staging_files(submission.name, submission.rev) def approvable_submissions_for_user(user): From e71272fd2f2047d092fca76ad56b1ebe7899a27c Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Wed, 19 Feb 2025 15:09:17 -0600 Subject: [PATCH 007/405] chore: mark a function unreachable (#8553) --- ietf/submit/utils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/ietf/submit/utils.py b/ietf/submit/utils.py index fe52f1c774..f19f2384c8 100644 --- a/ietf/submit/utils.py +++ b/ietf/submit/utils.py @@ -646,6 +646,7 @@ def cancel_submission(submission): def rename_submission_files(submission, prev_rev, new_rev): + log.unreachable("2025-2-19") for ext in settings.IDSUBMIT_FILE_TYPES: staging_path = Path(settings.IDSUBMIT_STAGING_PATH) source = staging_path / f"{submission.name}-{prev_rev}.{ext}" From 997239a2eaf70d39a95c1bb465f4106c2ab1d544 Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Wed, 19 Feb 2025 17:41:10 -0600 Subject: [PATCH 008/405] feat: write objects to blob storage (#8557) * feat: basic blobstore infrastructure for dev * refactor: (broken) attempt to put minio console behind nginx * feat: initialize blobstore with boto3 * fix: abandon attempt to proxy minio. Use docker compose instead. * feat: beginning of blob writes * feat: storage utilities * feat: test buckets * chore: black * chore: remove unused import * chore: avoid f string when not needed * fix: inform all settings files about blobstores * fix: declare types for some settings * ci: point to new target base * ci: adjust test workflow * fix: give the tests debug environment a blobstore * fix: "better" name declarations * ci: use devblobstore container * chore: identify places to write to blobstorage * chore: remove unreachable code * feat: store materials * feat: store statements * feat: store status changes * feat: store liaison attachments * feat: store agendas provided with Interim session requests * chore: capture TODOs * feat: store polls and chatlogs * chore: remove unneeded TODO * feat: store drafts on submit and post * fix: handle storage during doc expiration and resurrection * fix: mirror an unlink * chore: add/refine TODOs * feat: store slide submissions * fix: structure slide test correctly * fix: correct sense of existence check * feat: store some indexes * feat: BlobShadowFileSystemStorage * feat: shadow floorplans / host logos to the blob * chore: remove unused import * feat: strip path from blob shadow names * feat: shadow photos / thumbs * refactor: combine photo and photothumb blob kinds The photos / thumbs were already dropped in the same directory, so let's not add a distinction at this point. * style: whitespace * refactor: use kwargs consistently * chore: migrations * refactor: better deconstruct(); rebuild migrations * fix: use new class in mack patch * chore: add TODO * feat: store group index documents * chore: identify more TODO * feat: store reviews * fix: repair merge * chore: remove unnecessary TODO * feat: StoredObject metadata * fix: deburr some debugging code * fix: only set the deleted timestamp once * chore: correct typo * fix: get_or_create vs get and test * fix: avoid the questionable is_seekable helper * chore: capture future design consideration * chore: blob store cfg for k8s * chore: black * chore: copyright * ci: bucket name prefix option + run Black Adds/uses DATATRACKER_BLOB_STORE_BUCKET_PREFIX option. Other changes are just Black styling. * ci: fix typo in bucket name expression * chore: parameters in app-configure-blobstore Allows use with other blob stores. * ci: remove verify=False option * fix: don't return value from __init__ * feat: option to log timing of S3Storage calls * chore: units * fix: deleted->null when storing a file * style: Black * feat: log as JSON; refactor to share code; handle exceptions * ci: add ietf_log_blob_timing option for k8s * test: --no-manage-blobstore option for running tests * test: use blob store settings from env, if set * test: actually set a couple more storage opts * feat: offswitch (#8541) * feat: offswitch * fix: apply ENABLE_BLOBSTORAGE to BlobShadowFileSystemStorage behavior * chore: log timing of blob reads * chore: import Config from botocore.config * chore(deps): import boto3-stubs / botocore botocore is implicitly imported, but make it explicit since we refer to it directly * chore: drop type annotation that mypy loudly ignores * refactor: add storage methods via mixin Shares code between Document and DocHistory without putting it in the base DocumentInfo class, which lacks the name field. Also makes mypy happy. * feat: add timeout / retry limit to boto client * ci: let k8s config the timeouts via env * chore: repair merge resolution typo * chore: tweak settings imports * chore: simplify k8s/settings_local.py imports --------- Co-authored-by: Jennifer Richards --- .devcontainer/docker-compose.extend.yml | 4 + .github/workflows/tests.yml | 2 + README.md | 17 ++ dev/deploy-to-container/settings_local.py | 23 ++- dev/diff/settings_local.py | 23 ++- dev/tests/docker-compose.debug.yml | 3 + dev/tests/settings_local.py | 23 ++- docker-compose.yml | 10 + docker/app.Dockerfile | 4 +- docker/configs/settings_local.py | 27 ++- docker/docker-compose.extend.yml | 4 + docker/scripts/app-configure-blobstore.py | 28 +++ docker/scripts/app-init.sh | 5 + ietf/api/tests.py | 5 + ietf/doc/admin.py | 8 +- ietf/doc/expire.py | 14 ++ ...ject_storedobject_unique_name_per_store.py | 66 ++++++ ietf/doc/models.py | 85 +++++++- ietf/doc/resources.py | 25 ++- ietf/doc/storage_backends.py | 192 ++++++++++++++++++ ietf/doc/storage_utils.py | 103 ++++++++++ ietf/doc/tasks.py | 4 +- ietf/doc/tests_bofreq.py | 3 + ietf/doc/tests_charter.py | 6 + ietf/doc/tests_conflict_review.py | 2 + ietf/doc/tests_draft.py | 13 ++ ietf/doc/tests_material.py | 6 + ietf/doc/tests_review.py | 5 + ietf/doc/tests_statement.py | 17 ++ ietf/doc/tests_status_change.py | 18 +- ietf/doc/utils.py | 2 +- ietf/doc/views_bofreq.py | 2 + ietf/doc/views_charter.py | 8 +- ietf/doc/views_conflict_review.py | 6 +- ietf/doc/views_draft.py | 6 + ietf/doc/views_material.py | 2 + ietf/doc/views_review.py | 1 + ietf/doc/views_statement.py | 13 +- ietf/doc/views_status_change.py | 6 +- ietf/group/tasks.py | 11 + ietf/group/tests_info.py | 35 ++-- ietf/idindex/tasks.py | 4 + ietf/idindex/tests.py | 5 + ietf/liaisons/forms.py | 2 + ietf/liaisons/tests.py | 38 +++- ietf/meeting/factories.py | 5 + ietf/meeting/forms.py | 1 + ietf/meeting/helpers.py | 5 + ..._floorplan_image_alter_meetinghost_logo.py | 56 +++++ ietf/meeting/models.py | 15 +- ietf/meeting/tests_views.py | 126 ++++++++++-- ietf/meeting/utils.py | 17 +- ietf/meeting/views.py | 17 +- ietf/nomcom/models.py | 1 + ...r_person_photo_alter_person_photo_thumb.py | 38 ++++ ietf/person/models.py | 16 +- ietf/settings.py | 38 ++++ ietf/settings_test.py | 30 ++- ietf/submit/tests.py | 94 ++++++++- ietf/submit/utils.py | 20 +- ietf/utils/storage.py | 56 ++++- ietf/utils/test_runner.py | 70 ++++++- k8s/settings_local.py | 103 ++++++++-- requirements.txt | 6 +- 64 files changed, 1484 insertions(+), 116 deletions(-) create mode 100755 docker/scripts/app-configure-blobstore.py create mode 100644 ietf/doc/migrations/0025_storedobject_storedobject_unique_name_per_store.py create mode 100644 ietf/doc/storage_backends.py create mode 100644 ietf/doc/storage_utils.py create mode 100644 ietf/meeting/migrations/0010_alter_floorplan_image_alter_meetinghost_logo.py create mode 100644 ietf/person/migrations/0004_alter_person_photo_alter_person_photo_thumb.py diff --git a/.devcontainer/docker-compose.extend.yml b/.devcontainer/docker-compose.extend.yml index fa9a412cf2..286eefb29c 100644 --- a/.devcontainer/docker-compose.extend.yml +++ b/.devcontainer/docker-compose.extend.yml @@ -14,6 +14,10 @@ services: # - datatracker-vscode-ext:/root/.vscode-server/extensions # Runs app on the same network as the database container, allows "forwardPorts" in devcontainer.json function. network_mode: service:db + blobstore: + ports: + - '9000' + - '9001' volumes: datatracker-vscode-ext: diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 5457415f59..f10c1db9a3 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -28,6 +28,8 @@ jobs: services: db: image: ghcr.io/ietf-tools/datatracker-db:latest + blobstore: + image: ghcr.io/ietf-tools/datatracker-devblobstore:latest steps: - uses: actions/checkout@v4 diff --git a/README.md b/README.md index ee9865ba21..0ece0eb03b 100644 --- a/README.md +++ b/README.md @@ -106,6 +106,23 @@ Nightly database dumps of the datatracker are available as Docker images: `ghcr. > Note that to update the database in your dev environment to the latest version, you should run the `docker/cleandb` script. +### Blob storage for dev/test + +The dev and test environments use [minio](https://github.com/minio/minio) to provide local blob storage. See the settings files for how the app container communicates with the blobstore container. If you need to work with minio directly from outside the containers (to interact with its api or console), use `docker compose` from the top level directory of your clone to expose it at an ephemeral port. + +``` +$ docker compose port blobstore 9001 +0.0.0.0: + +$ curl -I http://localhost: +HTTP/1.1 200 OK +... +``` + + +The minio container exposes the minio api at port 9000 and the minio console at port 9001 + + ### Frontend Development #### Intro diff --git a/dev/deploy-to-container/settings_local.py b/dev/deploy-to-container/settings_local.py index 0a991ae9fe..e878206bd5 100644 --- a/dev/deploy-to-container/settings_local.py +++ b/dev/deploy-to-container/settings_local.py @@ -1,7 +1,9 @@ # Copyright The IETF Trust 2007-2019, All Rights Reserved # -*- coding: utf-8 -*- -from ietf.settings import * # pyflakes:ignore +from ietf.settings import * # pyflakes:ignore +from ietf.settings import STORAGES, MORE_STORAGE_NAMES, BLOBSTORAGE_CONNECT_TIMEOUT, BLOBSTORAGE_READ_TIMEOUT, BLOBSTORAGE_MAX_ATTEMPTS +import botocore.config ALLOWED_HOSTS = ['*'] @@ -79,3 +81,22 @@ # OIDC configuration SITE_URL = 'https://__HOSTNAME__' + +for storagename in MORE_STORAGE_NAMES: + STORAGES[storagename] = { + "BACKEND": "ietf.doc.storage_backends.CustomS3Storage", + "OPTIONS": dict( + endpoint_url="http://blobstore:9000", + access_key="minio_root", + secret_key="minio_pass", + security_token=None, + client_config=botocore.config.Config( + signature_version="s3v4", + connect_timeout=BLOBSTORAGE_CONNECT_TIMEOUT, + read_timeout=BLOBSTORAGE_READ_TIMEOUT, + retries={"total_max_attempts": BLOBSTORAGE_MAX_ATTEMPTS}, + ), + verify=False, + bucket_name=f"test-{storagename}", + ), + } diff --git a/dev/diff/settings_local.py b/dev/diff/settings_local.py index 95d1e481c9..9e0806a8a6 100644 --- a/dev/diff/settings_local.py +++ b/dev/diff/settings_local.py @@ -1,7 +1,9 @@ # Copyright The IETF Trust 2007-2019, All Rights Reserved # -*- coding: utf-8 -*- -from ietf.settings import * # pyflakes:ignore +from ietf.settings import * # pyflakes:ignore +from ietf.settings import STORAGES, MORE_STORAGE_NAMES, BLOBSTORAGE_CONNECT_TIMEOUT, BLOBSTORAGE_READ_TIMEOUT, BLOBSTORAGE_MAX_ATTEMPTS +import botocore.config ALLOWED_HOSTS = ['*'] @@ -66,3 +68,22 @@ SLIDE_STAGING_PATH = 'test/staging/' DE_GFM_BINARY = '/usr/local/bin/de-gfm' + +for storagename in MORE_STORAGE_NAMES: + STORAGES[storagename] = { + "BACKEND": "ietf.doc.storage_backends.CustomS3Storage", + "OPTIONS": dict( + endpoint_url="http://blobstore:9000", + access_key="minio_root", + secret_key="minio_pass", + security_token=None, + client_config=botocore.config.Config( + signature_version="s3v4", + connect_timeout=BLOBSTORAGE_CONNECT_TIMEOUT, + read_timeout=BLOBSTORAGE_READ_TIMEOUT, + retries={"total_max_attempts": BLOBSTORAGE_MAX_ATTEMPTS}, + ), + verify=False, + bucket_name=f"test-{storagename}", + ), + } diff --git a/dev/tests/docker-compose.debug.yml b/dev/tests/docker-compose.debug.yml index 8d939e0ea2..8117b92375 100644 --- a/dev/tests/docker-compose.debug.yml +++ b/dev/tests/docker-compose.debug.yml @@ -28,5 +28,8 @@ services: volumes: - postgresdb-data:/var/lib/postgresql/data + blobstore: + image: ghcr.io/ietf-tools/datatracker-devblobstore:latest + volumes: postgresdb-data: diff --git a/dev/tests/settings_local.py b/dev/tests/settings_local.py index 7b10bee06a..f2166053a7 100644 --- a/dev/tests/settings_local.py +++ b/dev/tests/settings_local.py @@ -1,7 +1,9 @@ # Copyright The IETF Trust 2007-2019, All Rights Reserved # -*- coding: utf-8 -*- -from ietf.settings import * # pyflakes:ignore +from ietf.settings import * # pyflakes:ignore +from ietf.settings import STORAGES, MORE_STORAGE_NAMES, BLOBSTORAGE_CONNECT_TIMEOUT, BLOBSTORAGE_READ_TIMEOUT, BLOBSTORAGE_MAX_ATTEMPTS +import botocore.config ALLOWED_HOSTS = ['*'] @@ -65,3 +67,22 @@ SLIDE_STAGING_PATH = 'test/staging/' DE_GFM_BINARY = '/usr/local/bin/de-gfm' + +for storagename in MORE_STORAGE_NAMES: + STORAGES[storagename] = { + "BACKEND": "ietf.doc.storage_backends.CustomS3Storage", + "OPTIONS": dict( + endpoint_url="http://blobstore:9000", + access_key="minio_root", + secret_key="minio_pass", + security_token=None, + client_config=botocore.config.Config( + signature_version="s3v4", + connect_timeout=BLOBSTORAGE_CONNECT_TIMEOUT, + read_timeout=BLOBSTORAGE_READ_TIMEOUT, + retries={"total_max_attempts": BLOBSTORAGE_MAX_ATTEMPTS}, + ), + verify=False, + bucket_name=f"test-{storagename}", + ), + } diff --git a/docker-compose.yml b/docker-compose.yml index 9988b10c2e..30ce8ba4d2 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -15,6 +15,7 @@ services: depends_on: - db - mq + - blobstore ipc: host @@ -83,6 +84,14 @@ services: - .:/workspace - app-assets:/assets + blobstore: + image: ghcr.io/ietf-tools/datatracker-devblobstore:latest + restart: unless-stopped + volumes: + - "minio-data:/data" + + + # Celery Beat is a periodic task runner. It is not normally needed for development, # but can be enabled by uncommenting the following. # @@ -106,3 +115,4 @@ services: volumes: postgresdb-data: app-assets: + minio-data: diff --git a/docker/app.Dockerfile b/docker/app.Dockerfile index b7dd44b6f1..fee3833733 100644 --- a/docker/app.Dockerfile +++ b/docker/app.Dockerfile @@ -43,8 +43,8 @@ RUN rm -rf /tmp/library-scripts # Copy the startup file COPY docker/scripts/app-init.sh /docker-init.sh COPY docker/scripts/app-start.sh /docker-start.sh -RUN sed -i 's/\r$//' /docker-init.sh && chmod +x /docker-init.sh -RUN sed -i 's/\r$//' /docker-start.sh && chmod +x /docker-start.sh +RUN sed -i 's/\r$//' /docker-init.sh && chmod +rx /docker-init.sh +RUN sed -i 's/\r$//' /docker-start.sh && chmod +rx /docker-start.sh # Fix user UID / GID to match host RUN groupmod --gid $USER_GID $USERNAME \ diff --git a/docker/configs/settings_local.py b/docker/configs/settings_local.py index 5df5d15e82..46833451c1 100644 --- a/docker/configs/settings_local.py +++ b/docker/configs/settings_local.py @@ -1,11 +1,13 @@ -# Copyright The IETF Trust 2007-2019, All Rights Reserved +# Copyright The IETF Trust 2007-2025, All Rights Reserved # -*- coding: utf-8 -*- -from ietf.settings import * # pyflakes:ignore +from ietf.settings import * # pyflakes:ignore +from ietf.settings import STORAGES, MORE_STORAGE_NAMES, BLOBSTORAGE_CONNECT_TIMEOUT, BLOBSTORAGE_READ_TIMEOUT, BLOBSTORAGE_MAX_ATTEMPTS +import botocore.config ALLOWED_HOSTS = ['*'] -from ietf.settings_postgresqldb import DATABASES # pyflakes:ignore +from ietf.settings_postgresqldb import DATABASES # pyflakes:ignore IDSUBMIT_IDNITS_BINARY = "/usr/local/bin/idnits" IDSUBMIT_STAGING_PATH = "/assets/www6s/staging/" @@ -37,6 +39,25 @@ # DEV_TEMPLATE_CONTEXT_PROCESSORS = [ # 'ietf.context_processors.sql_debug', # ] +for storagename in MORE_STORAGE_NAMES: + STORAGES[storagename] = { + "BACKEND": "ietf.doc.storage_backends.CustomS3Storage", + "OPTIONS": dict( + endpoint_url="http://blobstore:9000", + access_key="minio_root", + secret_key="minio_pass", + security_token=None, + client_config=botocore.config.Config( + signature_version="s3v4", + connect_timeout=BLOBSTORAGE_CONNECT_TIMEOUT, + read_timeout=BLOBSTORAGE_READ_TIMEOUT, + retries={"total_max_attempts": BLOBSTORAGE_MAX_ATTEMPTS}, + ), + verify=False, + bucket_name=storagename, + ), + } + DOCUMENT_PATH_PATTERN = '/assets/ietfdata/doc/{doc.type_id}/' INTERNET_DRAFT_PATH = '/assets/ietf-ftp/internet-drafts/' diff --git a/docker/docker-compose.extend.yml b/docker/docker-compose.extend.yml index 0538c0d3e9..a69a453110 100644 --- a/docker/docker-compose.extend.yml +++ b/docker/docker-compose.extend.yml @@ -16,6 +16,10 @@ services: pgadmin: ports: - '5433' + blobstore: + ports: + - '9000' + - '9001' celery: volumes: - .:/workspace diff --git a/docker/scripts/app-configure-blobstore.py b/docker/scripts/app-configure-blobstore.py new file mode 100755 index 0000000000..7b5ce962eb --- /dev/null +++ b/docker/scripts/app-configure-blobstore.py @@ -0,0 +1,28 @@ +#!/usr/bin/env python +# Copyright The IETF Trust 2024, All Rights Reserved + +import boto3 +import os +import sys + +from ietf.settings import MORE_STORAGE_NAMES + + +def init_blobstore(): + blobstore = boto3.resource( + "s3", + endpoint_url=os.environ.get("BLOB_STORE_ENDPOINT_URL", "http://blobstore:9000"), + aws_access_key_id=os.environ.get("BLOB_STORE_ACCESS_KEY", "minio_root"), + aws_secret_access_key=os.environ.get("BLOB_STORE_SECRET_KEY", "minio_pass"), + aws_session_token=None, + config=botocore.config.Config(signature_version="s3v4"), + verify=False, + ) + for bucketname in MORE_STORAGE_NAMES: + blobstore.create_bucket( + Bucket=f"{os.environ.get('BLOB_STORE_BUCKET_PREFIX', '')}{bucketname}".strip() + ) + + +if __name__ == "__main__": + sys.exit(init_blobstore()) diff --git a/docker/scripts/app-init.sh b/docker/scripts/app-init.sh index b96b88f1f5..e970398ac2 100755 --- a/docker/scripts/app-init.sh +++ b/docker/scripts/app-init.sh @@ -73,6 +73,11 @@ echo "Creating data directories..." chmod +x ./docker/scripts/app-create-dirs.sh ./docker/scripts/app-create-dirs.sh +# Configure the development blobstore + +echo "Configuring blobstore..." +PYTHONPATH=/workspace python ./docker/scripts/app-configure-blobstore.py + # Download latest coverage results file echo "Downloading latest coverage results file..." diff --git a/ietf/api/tests.py b/ietf/api/tests.py index d9af457e95..ac0b37a608 100644 --- a/ietf/api/tests.py +++ b/ietf/api/tests.py @@ -25,6 +25,7 @@ import debug # pyflakes:ignore import ietf +from ietf.doc.storage_utils import retrieve_str from ietf.doc.utils import get_unicode_document_content from ietf.doc.models import RelatedDocument, State from ietf.doc.factories import IndividualDraftFactory, WgDraftFactory, WgRfcFactory @@ -553,6 +554,10 @@ def test_api_upload_polls_and_chatlog(self): newdoc = session.presentations.get(document__type_id=type_id).document newdoccontent = get_unicode_document_content(newdoc.name, Path(session.meeting.get_materials_path()) / type_id / newdoc.uploaded_filename) self.assertEqual(json.loads(content), json.loads(newdoccontent)) + self.assertEqual( + json.loads(retrieve_str(type_id, newdoc.uploaded_filename)), + json.loads(content) + ) def test_api_upload_bluesheet(self): url = urlreverse("ietf.meeting.views.api_upload_bluesheet") diff --git a/ietf/doc/admin.py b/ietf/doc/admin.py index 301d32d7cc..db3b24b2d2 100644 --- a/ietf/doc/admin.py +++ b/ietf/doc/admin.py @@ -12,7 +12,7 @@ TelechatDocEvent, BallotPositionDocEvent, ReviewRequestDocEvent, InitialReviewDocEvent, AddedMessageEvent, SubmissionDocEvent, DeletedEvent, EditedAuthorsDocEvent, DocumentURL, ReviewAssignmentDocEvent, IanaExpertDocEvent, IRSGBallotDocEvent, DocExtResource, DocumentActionHolder, - BofreqEditorDocEvent, BofreqResponsibleDocEvent ) + BofreqEditorDocEvent, BofreqResponsibleDocEvent, StoredObject ) from ietf.utils.validators import validate_external_resource_value @@ -218,3 +218,9 @@ class DocExtResourceAdmin(admin.ModelAdmin): search_fields = ['doc__name', 'value', 'display_name', 'name__slug',] raw_id_fields = ['doc', ] admin.site.register(DocExtResource, DocExtResourceAdmin) + +class StoredObjectAdmin(admin.ModelAdmin): + list_display = ['store', 'name', 'modified', 'deleted'] + list_filter = ['deleted'] + search_fields = ['store', 'name', 'doc_name', 'doc_rev', 'deleted'] +admin.site.register(StoredObject, StoredObjectAdmin) diff --git a/ietf/doc/expire.py b/ietf/doc/expire.py index 98554bae0e..bf8523aa98 100644 --- a/ietf/doc/expire.py +++ b/ietf/doc/expire.py @@ -13,6 +13,7 @@ from typing import List, Optional # pyflakes:ignore +from ietf.doc.storage_utils import exists_in_storage, remove_from_storage from ietf.doc.utils import update_action_holders from ietf.utils import log from ietf.utils.mail import send_mail @@ -156,11 +157,17 @@ def remove_ftp_copy(f): if mark.exists(): mark.unlink() + def remove_from_active_draft_storage(file): + # Assumes the glob will never find a file with no suffix + ext = file.suffix[1:] + remove_from_storage("active-draft", f"{ext}/{file.name}", warn_if_missing=False) + # Note that the object is already in the "draft" storage. src_dir = Path(settings.INTERNET_DRAFT_PATH) for file in src_dir.glob("%s-%s.*" % (doc.name, rev)): move_file(str(file.name)) remove_ftp_copy(str(file.name)) + remove_from_active_draft_storage(file) def expire_draft(doc): # clean up files @@ -218,6 +225,13 @@ def move_file_to(subdir): mark = Path(settings.FTP_DIR) / "internet-drafts" / basename if mark.exists(): mark.unlink() + if ext: + # Note that we're not moving these strays anywhere - the assumption + # is that the active-draft blobstore will not get strays. + # See, however, the note about "major system failures" at "unknown_ids" + blobname = f"{ext[1:]}/{basename}" + if exists_in_storage("active-draft", blobname): + remove_from_storage("active-draft", blobname) try: doc = Document.objects.get(name=filename, rev=revision) diff --git a/ietf/doc/migrations/0025_storedobject_storedobject_unique_name_per_store.py b/ietf/doc/migrations/0025_storedobject_storedobject_unique_name_per_store.py new file mode 100644 index 0000000000..e948ca3011 --- /dev/null +++ b/ietf/doc/migrations/0025_storedobject_storedobject_unique_name_per_store.py @@ -0,0 +1,66 @@ +# Copyright The IETF Trust 2025, All Rights Reserved + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("doc", "0024_remove_ad_is_watching_states"), + ] + + operations = [ + migrations.CreateModel( + name="StoredObject", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("store", models.CharField(max_length=256)), + ("name", models.CharField(max_length=1024)), + ("sha384", models.CharField(max_length=96)), + ("len", models.PositiveBigIntegerField()), + ( + "store_created", + models.DateTimeField( + help_text="The instant the object ws first placed in the store" + ), + ), + ( + "created", + models.DateTimeField( + help_text="Instant object became known. May not be the same as the storage's created value for the instance. It will hold ctime for objects imported from older disk storage" + ), + ), + ( + "modified", + models.DateTimeField( + help_text="Last instant object was modified. May not be the same as the storage's modified value for the instance. It will hold mtime for objects imported from older disk storage unless they've actually been overwritten more recently" + ), + ), + ("doc_name", models.CharField(blank=True, max_length=255, null=True)), + ("doc_rev", models.CharField(blank=True, max_length=16, null=True)), + ("deleted", models.DateTimeField(null=True)), + ], + options={ + "indexes": [ + models.Index( + fields=["doc_name", "doc_rev"], + name="doc_storedo_doc_nam_d04465_idx", + ) + ], + }, + ), + migrations.AddConstraint( + model_name="storedobject", + constraint=models.UniqueConstraint( + fields=("store", "name"), name="unique_name_per_store" + ), + ), + ] diff --git a/ietf/doc/models.py b/ietf/doc/models.py index 03698c80c3..55da70972c 100644 --- a/ietf/doc/models.py +++ b/ietf/doc/models.py @@ -9,14 +9,16 @@ import django.db import rfc2html +from io import BufferedReader from pathlib import Path from lxml import etree -from typing import Optional, TYPE_CHECKING +from typing import Optional, Protocol, TYPE_CHECKING, Union from weasyprint import HTML as wpHTML from weasyprint.text.fonts import FontConfiguration from django.db import models from django.core import checks +from django.core.files.base import File from django.core.cache import caches from django.core.validators import URLValidator, RegexValidator from django.urls import reverse as urlreverse @@ -30,6 +32,11 @@ import debug # pyflakes:ignore from ietf.group.models import Group +from ietf.doc.storage_utils import ( + store_str as utils_store_str, + store_bytes as utils_store_bytes, + store_file as utils_store_file +) from ietf.name.models import ( DocTypeName, DocTagName, StreamName, IntendedStdLevelName, StdLevelName, DocRelationshipName, DocReminderTypeName, BallotPositionName, ReviewRequestStateName, ReviewAssignmentStateName, FormalLanguageName, DocUrlTagName, ExtResourceName) @@ -714,10 +721,52 @@ def referenced_by_rfcs_as_rfc_or_draft(self): if self.type_id == "rfc" and self.came_from_draft(): refs_to |= self.came_from_draft().referenced_by_rfcs() return refs_to - + class Meta: abstract = True + +class HasNameRevAndTypeIdProtocol(Protocol): + """Typing Protocol describing a class that has name, rev, and type_id properties""" + @property + def name(self) -> str: ... + @property + def rev(self) -> str: ... + @property + def type_id(self) -> str: ... + + +class StorableMixin: + """Mixin that adds storage helpers to a DocumentInfo subclass""" + def store_str( + self: HasNameRevAndTypeIdProtocol, + name: str, + content: str, + allow_overwrite: bool = False + ) -> None: + return utils_store_str(self.type_id, name, content, allow_overwrite, self.name, self.rev) + + def store_bytes( + self: HasNameRevAndTypeIdProtocol, + name: str, + content: bytes, + allow_overwrite: bool = False, + doc_name: Optional[str] = None, + doc_rev: Optional[str] = None + ) -> None: + return utils_store_bytes(self.type_id, name, content, allow_overwrite, self.name, self.rev) + + def store_file( + self: HasNameRevAndTypeIdProtocol, + name: str, + file: Union[File, BufferedReader], + allow_overwrite: bool = False, + doc_name: Optional[str] = None, + doc_rev: Optional[str] = None + ) -> None: + return utils_store_file(self.type_id, name, file, allow_overwrite, self.name, self.rev) + + STATUSCHANGE_RELATIONS = ('tops','tois','tohist','toinf','tobcp','toexp') class RelatedDocument(models.Model): @@ -870,7 +919,7 @@ def role_for_doc(self): 'invalid' ) -class Document(DocumentInfo): +class Document(StorableMixin, DocumentInfo): name = models.CharField(max_length=255, validators=[validate_docname,], unique=True) # immutable action_holders = models.ManyToManyField(Person, through=DocumentActionHolder, blank=True) @@ -1192,7 +1241,7 @@ class DocHistoryAuthor(DocumentAuthorInfo): def __str__(self): return u"%s %s (%s)" % (self.document.doc.name, self.person, self.order) -class DocHistory(DocumentInfo): +class DocHistory(StorableMixin, DocumentInfo): doc = ForeignKey(Document, related_name="history_set") name = models.CharField(max_length=255) @@ -1538,3 +1587,31 @@ class BofreqEditorDocEvent(DocEvent): class BofreqResponsibleDocEvent(DocEvent): """ Capture the responsible leadership (IAB and IESG members) for a BOF Request """ responsible = models.ManyToManyField('person.Person', blank=True) + +class StoredObject(models.Model): + """Hold metadata about objects placed in object storage""" + + store = models.CharField(max_length=256) + name = models.CharField(max_length=1024, null=False, blank=False) # N.B. the 1024 limit on name comes from S3 + sha384 = models.CharField(max_length=96) + len = models.PositiveBigIntegerField() + store_created = models.DateTimeField(help_text="The instant the object ws first placed in the store") + created = models.DateTimeField( + null=False, + help_text="Instant object became known. May not be the same as the storage's created value for the instance. It will hold ctime for objects imported from older disk storage" + ) + modified = models.DateTimeField( + null=False, + help_text="Last instant object was modified. May not be the same as the storage's modified value for the instance. It will hold mtime for objects imported from older disk storage unless they've actually been overwritten more recently" + ) + doc_name = models.CharField(max_length=255, null=True, blank=True) + doc_rev = models.CharField(max_length=16, null=True, blank=True) + deleted = models.DateTimeField(null=True) + + class Meta: + constraints = [ + models.UniqueConstraint(fields=['store', 'name'], name='unique_name_per_store'), + ] + indexes = [ + models.Index(fields=["doc_name", "doc_rev"]), + ] diff --git a/ietf/doc/resources.py b/ietf/doc/resources.py index bba57013b9..157a3ad556 100644 --- a/ietf/doc/resources.py +++ b/ietf/doc/resources.py @@ -18,7 +18,7 @@ RelatedDocHistory, BallotPositionDocEvent, AddedMessageEvent, SubmissionDocEvent, ReviewRequestDocEvent, ReviewAssignmentDocEvent, EditedAuthorsDocEvent, DocumentURL, IanaExpertDocEvent, IRSGBallotDocEvent, DocExtResource, DocumentActionHolder, - BofreqEditorDocEvent,BofreqResponsibleDocEvent) + BofreqEditorDocEvent, BofreqResponsibleDocEvent, StoredObject) from ietf.name.resources import BallotPositionNameResource, DocTypeNameResource class BallotTypeResource(ModelResource): @@ -842,3 +842,26 @@ class Meta: "responsible": ALL_WITH_RELATIONS, } api.doc.register(BofreqResponsibleDocEventResource()) + + +class StoredObjectResource(ModelResource): + class Meta: + queryset = StoredObject.objects.all() + serializer = api.Serializer() + cache = SimpleCache() + #resource_name = 'storedobject' + ordering = ['id', ] + filtering = { + "id": ALL, + "store": ALL, + "name": ALL, + "sha384": ALL, + "len": ALL, + "store_created": ALL, + "created": ALL, + "modified": ALL, + "doc_name": ALL, + "doc_rev": ALL, + "deleted": ALL, + } +api.doc.register(StoredObjectResource()) diff --git a/ietf/doc/storage_backends.py b/ietf/doc/storage_backends.py new file mode 100644 index 0000000000..5eeab040e5 --- /dev/null +++ b/ietf/doc/storage_backends.py @@ -0,0 +1,192 @@ +# Copyright The IETF Trust 2025, All Rights Reserved + +import debug # pyflakes:ignore +import json + +from contextlib import contextmanager +from hashlib import sha384 +from io import BufferedReader +from storages.backends.s3 import S3Storage +from typing import Optional, Union + +from django.core.files.base import File + +from ietf.doc.models import StoredObject +from ietf.utils.log import log +from ietf.utils.timezone import timezone + + +@contextmanager +def maybe_log_timing(enabled, op, **kwargs): + """If enabled, log elapsed time and additional data from kwargs + + Emits log even if an exception occurs + """ + before = timezone.now() + exception = None + try: + yield + except Exception as err: + exception = err + raise + finally: + if enabled: + dt = timezone.now() - before + log( + json.dumps( + { + "log": "S3Storage_timing", + "seconds": dt.total_seconds(), + "op": op, + "exception": "" if exception is None else repr(exception), + **kwargs, + } + ) + ) + + +# TODO-BLOBSTORE +# Consider overriding save directly so that +# we capture metadata for, e.g., ImageField objects +class CustomS3Storage(S3Storage): + + def __init__(self, **settings): + self.in_flight_custom_metadata = {} # type is Dict[str, Dict[str, str]] + super().__init__(**settings) + + def get_default_settings(self): + # add a default for the ietf_log_blob_timing boolean + return super().get_default_settings() | {"ietf_log_blob_timing": False} + + def _save(self, name, content): + with maybe_log_timing( + self.ietf_log_blob_timing, "_save", bucket_name=self.bucket_name, name=name + ): + return super()._save(name, content) + + def _open(self, name, mode="rb"): + with maybe_log_timing( + self.ietf_log_blob_timing, + "_open", + bucket_name=self.bucket_name, + name=name, + mode=mode, + ): + return super()._open(name, mode) + + def delete(self, name): + with maybe_log_timing( + self.ietf_log_blob_timing, "delete", bucket_name=self.bucket_name, name=name + ): + super().delete(name) + + def store_file( + self, + kind: str, + name: str, + file: Union[File, BufferedReader], + allow_overwrite: bool = False, + doc_name: Optional[str] = None, + doc_rev: Optional[str] = None, + ): + is_new = not self.exists_in_storage(kind, name) + # debug.show('f"Asked to store {name} in {kind}: is_new={is_new}, allow_overwrite={allow_overwrite}"') + if not allow_overwrite and not is_new: + log(f"Failed to save {kind}:{name} - name already exists in store") + debug.show('f"Failed to save {kind}:{name} - name already exists in store"') + # raise Exception("Not ignoring overwrite attempts while testing") + else: + try: + new_name = self.save(name, file) + now = timezone.now() + record, created = StoredObject.objects.get_or_create( + store=kind, + name=name, + defaults=dict( + sha384=self.in_flight_custom_metadata[name]["sha384"], + len=int(self.in_flight_custom_metadata[name]["len"]), + store_created=now, + created=now, + modified=now, + doc_name=doc_name, # Note that these are assumed to be invariant + doc_rev=doc_rev, # for a given name + ), + ) + if not created: + record.sha384 = self.in_flight_custom_metadata[name]["sha384"] + record.len = int(self.in_flight_custom_metadata[name]["len"]) + record.modified = now + record.deleted = None + record.save() + if new_name != name: + complaint = f"Error encountered saving '{name}' - results stored in '{new_name}' instead." + log(complaint) + debug.show("complaint") + # Note that we are otherwise ignoring this condition - it should become an error later. + except Exception as e: + # Log and then swallow the exception while we're learning. + # Don't let failure pass so quietly when these are the autoritative bits. + complaint = f"Failed to save {kind}:{name}" + log(complaint, e) + debug.show('f"{complaint}: {e}"') + finally: + del self.in_flight_custom_metadata[name] + return None + + def exists_in_storage(self, kind: str, name: str) -> bool: + try: + # open is realized with a HEAD + # See https://github.com/jschneier/django-storages/blob/b79ea310201e7afd659fe47e2882fe59aae5b517/storages/backends/s3.py#L528 + with self.open(name): + return True + except FileNotFoundError: + return False + + def remove_from_storage( + self, kind: str, name: str, warn_if_missing: bool = True + ) -> None: + now = timezone.now() + try: + with self.open(name): + pass + self.delete(name) + # debug.show('f"deleted {name} from {kind} storage"') + except FileNotFoundError: + if warn_if_missing: + complaint = ( + f"WARNING: Asked to delete non-existent {name} from {kind} storage" + ) + log(complaint) + debug.show("complaint") + existing_record = StoredObject.objects.filter(store=kind, name=name) + if not existing_record.exists() and warn_if_missing: + complaint = f"WARNING: Asked to delete {name} from {kind} storage, but there was no matching StorageObject" + log(complaint) + debug.show("complaint") + else: + # Note that existing_record is a queryset that will have one matching object + existing_record.filter(deleted__isnull=True).update(deleted=now) + + def _get_write_parameters(self, name, content=None): + # debug.show('f"getting write parameters for {name}"') + params = super()._get_write_parameters(name, content) + if "Metadata" not in params: + params["Metadata"] = {} + try: + content.seek(0) + except AttributeError: # TODO-BLOBSTORE + debug.say("Encountered Non-Seekable content") + raise NotImplementedError("cannot handle unseekable content") + content_bytes = content.read() + if not isinstance( + content_bytes, bytes + ): # TODO-BLOBSTORE: This is sketch-development only -remove before committing + raise Exception(f"Expected bytes - got {type(content_bytes)}") + content.seek(0) + metadata = { + "len": f"{len(content_bytes)}", + "sha384": f"{sha384(content_bytes).hexdigest()}", + } + params["Metadata"].update(metadata) + self.in_flight_custom_metadata[name] = metadata + return params diff --git a/ietf/doc/storage_utils.py b/ietf/doc/storage_utils.py new file mode 100644 index 0000000000..4f0516339a --- /dev/null +++ b/ietf/doc/storage_utils.py @@ -0,0 +1,103 @@ +# Copyright The IETF Trust 2025, All Rights Reserved + +from io import BufferedReader +from typing import Optional, Union +import debug # pyflakes ignore + +from django.conf import settings +from django.core.files.base import ContentFile, File +from django.core.files.storage import storages + + +# TODO-BLOBSTORE (Future, maybe after leaving 3.9) : add a return type +def _get_storage(kind: str): + + if kind in settings.MORE_STORAGE_NAMES: + # TODO-BLOBSTORE - add a checker that verifies configuration will only return CustomS3Storages + return storages[kind] + else: + debug.say(f"Got into not-implemented looking for {kind}") + raise NotImplementedError(f"Don't know how to store {kind}") + + +def exists_in_storage(kind: str, name: str) -> bool: + if settings.ENABLE_BLOBSTORAGE: + store = _get_storage(kind) + return store.exists_in_storage(kind, name) + else: + return False + + +def remove_from_storage(kind: str, name: str, warn_if_missing: bool = True) -> None: + if settings.ENABLE_BLOBSTORAGE: + store = _get_storage(kind) + store.remove_from_storage(kind, name, warn_if_missing) + return None + + +# TODO-BLOBSTORE: Try to refactor `kind` out of the signature of the methods already on the custom store (which knows its kind) +def store_file( + kind: str, + name: str, + file: Union[File, BufferedReader], + allow_overwrite: bool = False, + doc_name: Optional[str] = None, + doc_rev: Optional[str] = None, +) -> None: + # debug.show('f"asked to store {name} into {kind}"') + if settings.ENABLE_BLOBSTORAGE: + store = _get_storage(kind) + store.store_file(kind, name, file, allow_overwrite, doc_name, doc_rev) + return None + + +def store_bytes( + kind: str, + name: str, + content: bytes, + allow_overwrite: bool = False, + doc_name: Optional[str] = None, + doc_rev: Optional[str] = None, +) -> None: + if settings.ENABLE_BLOBSTORAGE: + store_file(kind, name, ContentFile(content), allow_overwrite) + return None + + +def store_str( + kind: str, + name: str, + content: str, + allow_overwrite: bool = False, + doc_name: Optional[str] = None, + doc_rev: Optional[str] = None, +) -> None: + if settings.ENABLE_BLOBSTORAGE: + content_bytes = content.encode("utf-8") + store_bytes(kind, name, content_bytes, allow_overwrite) + return None + + +def retrieve_bytes(kind: str, name: str) -> bytes: + from ietf.doc.storage_backends import maybe_log_timing + content = b"" + if settings.ENABLE_BLOBSTORAGE: + store = _get_storage(kind) + with store.open(name) as f: + with maybe_log_timing( + hasattr(store, "ietf_log_blob_timing") and store.ietf_log_blob_timing, + "read", + bucket_name=store.bucket_name if hasattr(store, "bucket_name") else "", + name=name, + ): + content = f.read() + return content + + +def retrieve_str(kind: str, name: str) -> str: + content = "" + if settings.ENABLE_BLOBSTORAGE: + content_bytes = retrieve_bytes(kind, name) + # TODO-BLOBSTORE: try to decode all the different ways doc.text() does + content = content_bytes.decode("utf-8") + return content diff --git a/ietf/doc/tasks.py b/ietf/doc/tasks.py index 6eb901e6c7..e24c58e1e7 100644 --- a/ietf/doc/tasks.py +++ b/ietf/doc/tasks.py @@ -84,7 +84,7 @@ def generate_idnits2_rfc_status_task(): outpath = Path(settings.DERIVED_DIR) / "idnits2-rfc-status" blob = generate_idnits2_rfc_status() try: - outpath.write_text(blob, encoding="utf8") + outpath.write_text(blob, encoding="utf8") # TODO-BLOBSTORE except Exception as e: log.log(f"failed to write idnits2-rfc-status: {e}") @@ -94,7 +94,7 @@ def generate_idnits2_rfcs_obsoleted_task(): outpath = Path(settings.DERIVED_DIR) / "idnits2-rfcs-obsoleted" blob = generate_idnits2_rfcs_obsoleted() try: - outpath.write_text(blob, encoding="utf8") + outpath.write_text(blob, encoding="utf8") # TODO-BLOBSTORE except Exception as e: log.log(f"failed to write idnits2-rfcs-obsoleted: {e}") diff --git a/ietf/doc/tests_bofreq.py b/ietf/doc/tests_bofreq.py index 2e27efd627..6a7c9393ef 100644 --- a/ietf/doc/tests_bofreq.py +++ b/ietf/doc/tests_bofreq.py @@ -16,6 +16,7 @@ from django.template.loader import render_to_string from django.utils import timezone +from ietf.doc.storage_utils import retrieve_str from ietf.group.factories import RoleFactory from ietf.doc.factories import BofreqFactory, NewRevisionDocEventFactory from ietf.doc.models import State, Document, NewRevisionDocEvent @@ -340,6 +341,7 @@ def test_submit(self): doc = reload_db_objects(doc) self.assertEqual('%02d'%(int(rev)+1) ,doc.rev) self.assertEqual(f'# {username}', doc.text()) + self.assertEqual(f'# {username}', retrieve_str('bofreq',doc.get_base_name())) self.assertEqual(docevent_count+1, doc.docevent_set.count()) self.assertEqual(1, len(outbox)) rev = doc.rev @@ -379,6 +381,7 @@ def test_start_new_bofreq(self): self.assertEqual(list(bofreq_editors(bofreq)), [nobody]) self.assertEqual(bofreq.latest_event(NewRevisionDocEvent).rev, '00') self.assertEqual(bofreq.text_or_error(), 'some stuff') + self.assertEqual(retrieve_str('bofreq',bofreq.get_base_name()), 'some stuff') self.assertEqual(len(outbox),1) finally: os.unlink(file.name) diff --git a/ietf/doc/tests_charter.py b/ietf/doc/tests_charter.py index e0207fe842..62e49559e2 100644 --- a/ietf/doc/tests_charter.py +++ b/ietf/doc/tests_charter.py @@ -16,6 +16,7 @@ from ietf.doc.factories import CharterFactory, NewRevisionDocEventFactory, TelechatDocEventFactory from ietf.doc.models import ( Document, State, BallotDocEvent, BallotType, NewRevisionDocEvent, TelechatDocEvent, WriteupDocEvent ) +from ietf.doc.storage_utils import retrieve_str from ietf.doc.utils_charter import ( next_revision, default_review_text, default_action_text, charter_name_for_group ) from ietf.doc.utils import close_open_ballots @@ -519,6 +520,11 @@ def test_submit_charter(self): ftp_charter_path = Path(settings.FTP_DIR) / "charter" / charter_path.name self.assertTrue(ftp_charter_path.exists()) self.assertTrue(charter_path.samefile(ftp_charter_path)) + blobstore_contents = retrieve_str("charter", charter.get_base_name()) + self.assertEqual( + blobstore_contents, + "Windows line\nMac line\nUnix line\n" + utf_8_snippet.decode("utf-8"), + ) def test_submit_initial_charter(self): diff --git a/ietf/doc/tests_conflict_review.py b/ietf/doc/tests_conflict_review.py index d2f94922b2..791db17f5a 100644 --- a/ietf/doc/tests_conflict_review.py +++ b/ietf/doc/tests_conflict_review.py @@ -16,6 +16,7 @@ from ietf.doc.factories import IndividualDraftFactory, ConflictReviewFactory, RgDraftFactory from ietf.doc.models import Document, DocEvent, NewRevisionDocEvent, BallotPositionDocEvent, TelechatDocEvent, State, DocTagName +from ietf.doc.storage_utils import retrieve_str from ietf.doc.utils import create_ballot_if_not_open from ietf.doc.views_conflict_review import default_approval_text from ietf.group.models import Person @@ -422,6 +423,7 @@ def test_initial_submission(self): f.close() self.assertTrue(ftp_path.exists()) self.assertTrue( "submission-00" in doc.latest_event(NewRevisionDocEvent).desc) + self.assertEqual(retrieve_str("conflrev",basename), "Some initial review text\n") def test_subsequent_submission(self): doc = Document.objects.get(name='conflict-review-imaginary-irtf-submission') diff --git a/ietf/doc/tests_draft.py b/ietf/doc/tests_draft.py index 2405806682..4753c4ff0c 100644 --- a/ietf/doc/tests_draft.py +++ b/ietf/doc/tests_draft.py @@ -24,6 +24,7 @@ from ietf.doc.models import ( Document, DocReminder, DocEvent, ConsensusDocEvent, LastCallDocEvent, RelatedDocument, State, TelechatDocEvent, WriteupDocEvent, DocRelationshipName, IanaExpertDocEvent ) +from ietf.doc.storage_utils import exists_in_storage, store_str from ietf.doc.utils import get_tags_for_stream_id, create_ballot_if_not_open from ietf.doc.views_draft import AdoptDraftForm from ietf.name.models import DocTagName, RoleName @@ -577,6 +578,11 @@ def setUp(self): def write_draft_file(self, name, size): with (Path(settings.INTERNET_DRAFT_PATH) / name).open('w') as f: f.write("a" * size) + _, ext = os.path.splitext(name) + if ext: + ext=ext[1:] + store_str("active-draft", f"{ext}/{name}", "a"*size, allow_overwrite=True) + store_str("draft", f"{ext}/{name}", "a"*size, allow_overwrite=True) class ResurrectTests(DraftFileMixin, TestCase): @@ -649,6 +655,7 @@ def test_resurrect(self): # ensure file restored from archive directory self.assertTrue(os.path.exists(os.path.join(settings.INTERNET_DRAFT_PATH, txt))) self.assertTrue(not os.path.exists(os.path.join(settings.INTERNET_DRAFT_ARCHIVE_DIR, txt))) + self.assertTrue(exists_in_storage("active-draft",f"txt/{txt}")) class ExpireIDsTests(DraftFileMixin, TestCase): @@ -775,6 +782,7 @@ def test_expire_drafts(self): self.assertEqual(draft.action_holders.count(), 0) self.assertIn('Removed all action holders', draft.latest_event(type='changed_action_holders').desc) self.assertTrue(not os.path.exists(os.path.join(settings.INTERNET_DRAFT_PATH, txt))) + self.assertFalse(exists_in_storage("active-draft", f"txt/{txt}")) self.assertTrue(os.path.exists(os.path.join(settings.INTERNET_DRAFT_ARCHIVE_DIR, txt))) draft.delete() @@ -798,6 +806,7 @@ def test_clean_up_draft_files(self): clean_up_draft_files() self.assertTrue(not os.path.exists(os.path.join(settings.INTERNET_DRAFT_PATH, unknown))) + self.assertFalse(exists_in_storage("active-draft", f"txt/{unknown}")) self.assertTrue(os.path.exists(os.path.join(settings.INTERNET_DRAFT_ARCHIVE_DIR, "unknown_ids", unknown))) @@ -808,6 +817,7 @@ def test_clean_up_draft_files(self): clean_up_draft_files() self.assertTrue(not os.path.exists(os.path.join(settings.INTERNET_DRAFT_PATH, malformed))) + self.assertFalse(exists_in_storage("active-draft", f"txt/{malformed}")) self.assertTrue(os.path.exists(os.path.join(settings.INTERNET_DRAFT_ARCHIVE_DIR, "unknown_ids", malformed))) @@ -822,9 +832,11 @@ def test_clean_up_draft_files(self): clean_up_draft_files() self.assertTrue(not os.path.exists(os.path.join(settings.INTERNET_DRAFT_PATH, txt))) + self.assertFalse(exists_in_storage("active-draft", f"txt/{txt}")) self.assertTrue(os.path.exists(os.path.join(settings.INTERNET_DRAFT_ARCHIVE_DIR, txt))) self.assertTrue(not os.path.exists(os.path.join(settings.INTERNET_DRAFT_PATH, pdf))) + self.assertFalse(exists_in_storage("active-draft", f"pdf/{pdf}")) self.assertTrue(os.path.exists(os.path.join(settings.INTERNET_DRAFT_ARCHIVE_DIR, pdf))) # expire draft @@ -843,6 +855,7 @@ def test_clean_up_draft_files(self): clean_up_draft_files() self.assertTrue(not os.path.exists(os.path.join(settings.INTERNET_DRAFT_PATH, txt))) + self.assertFalse(exists_in_storage("active-draft", f"txt/{txt}")) self.assertTrue(os.path.exists(os.path.join(settings.INTERNET_DRAFT_ARCHIVE_DIR, txt))) diff --git a/ietf/doc/tests_material.py b/ietf/doc/tests_material.py index aaea8fec3d..c87341c95b 100644 --- a/ietf/doc/tests_material.py +++ b/ietf/doc/tests_material.py @@ -18,6 +18,7 @@ from django.utils import timezone from ietf.doc.models import Document, State, NewRevisionDocEvent +from ietf.doc.storage_utils import retrieve_str from ietf.group.factories import RoleFactory from ietf.group.models import Group from ietf.meeting.factories import MeetingFactory, SessionFactory, SessionPresentationFactory @@ -123,6 +124,9 @@ def test_upload_slides(self): ftp_filepath=Path(settings.FTP_DIR) / "slides" / basename with ftp_filepath.open() as f: self.assertEqual(f.read(), content) + # This test is very sloppy wrt the actual file content. + # Working with/around that for the moment. + self.assertEqual(retrieve_str("slides", basename), content) # check that posting same name is prevented test_file.seek(0) @@ -237,4 +241,6 @@ def test_revise(self, mock_slides_manager_cls): with io.open(os.path.join(doc.get_file_path(), doc.name + "-" + doc.rev + ".txt")) as f: self.assertEqual(f.read(), content) + self.assertEqual(retrieve_str("slides", f"{doc.name}-{doc.rev}.txt"), content) + diff --git a/ietf/doc/tests_review.py b/ietf/doc/tests_review.py index a956fd3287..e93bc02181 100644 --- a/ietf/doc/tests_review.py +++ b/ietf/doc/tests_review.py @@ -20,6 +20,7 @@ import debug # pyflakes:ignore +from ietf.doc.storage_utils import retrieve_str import ietf.review.mailarch from ietf.doc.factories import ( NewRevisionDocEventFactory, IndividualDraftFactory, WgDraftFactory, @@ -63,6 +64,10 @@ def verify_review_files_were_written(self, assignment, expected_content = "This review_file = Path(self.review_subdir) / f"{assignment.review.name}.txt" content = review_file.read_text() self.assertEqual(content, expected_content) + self.assertEqual( + retrieve_str("review", review_file.name), + expected_content + ) review_ftp_file = Path(settings.FTP_DIR) / "review" / review_file.name self.assertTrue(review_file.samefile(review_ftp_file)) diff --git a/ietf/doc/tests_statement.py b/ietf/doc/tests_statement.py index 2071018b10..fea42b97d6 100644 --- a/ietf/doc/tests_statement.py +++ b/ietf/doc/tests_statement.py @@ -14,6 +14,7 @@ from ietf.doc.factories import StatementFactory, DocEventFactory from ietf.doc.models import Document, State, NewRevisionDocEvent +from ietf.doc.storage_utils import retrieve_str from ietf.group.models import Group from ietf.person.factories import PersonFactory from ietf.utils.mail import outbox, empty_outbox @@ -185,8 +186,16 @@ def test_submit(self): self.assertEqual("%02d" % (int(rev) + 1), doc.rev) if postdict["statement_submission"] == "enter": self.assertEqual(f"# {username}", doc.text()) + self.assertEqual( + retrieve_str("statement", f"{doc.name}-{doc.rev}.md"), + f"# {username}" + ) else: self.assertEqual("not valid pdf", doc.text()) + self.assertEqual( + retrieve_str("statement", f"{doc.name}-{doc.rev}.pdf"), + "not valid pdf" + ) self.assertEqual(docevent_count + 1, doc.docevent_set.count()) self.assertEqual(0, len(outbox)) rev = doc.rev @@ -255,8 +264,16 @@ def test_start_new_statement(self): self.assertIsNotNone(statement.history_set.last().latest_event(type="published_statement")) if postdict["statement_submission"] == "enter": self.assertEqual(statement.text_or_error(), "some stuff") + self.assertEqual( + retrieve_str("statement", statement.uploaded_filename), + "some stuff" + ) else: self.assertTrue(statement.uploaded_filename.endswith("pdf")) + self.assertEqual( + retrieve_str("statement", f"{statement.name}-{statement.rev}.pdf"), + "not valid pdf" + ) self.assertEqual(len(outbox), 0) existing_statement = StatementFactory() diff --git a/ietf/doc/tests_status_change.py b/ietf/doc/tests_status_change.py index bd4da4c092..cbdc1a049a 100644 --- a/ietf/doc/tests_status_change.py +++ b/ietf/doc/tests_status_change.py @@ -19,6 +19,7 @@ WgRfcFactory, DocEventFactory, WgDraftFactory ) from ietf.doc.models import ( Document, State, DocEvent, BallotPositionDocEvent, NewRevisionDocEvent, TelechatDocEvent, WriteupDocEvent ) +from ietf.doc.storage_utils import retrieve_str from ietf.doc.utils import create_ballot_if_not_open from ietf.doc.views_status_change import default_approval_text from ietf.group.models import Person @@ -71,7 +72,7 @@ def test_start_review(self): statchg_relation_row_blah="tois") ) self.assertEqual(r.status_code, 302) - status_change = Document.objects.get(name='status-change-imaginary-new') + status_change = Document.objects.get(name='status-change-imaginary-new') self.assertEqual(status_change.get_state('statchg').slug,'adrev') self.assertEqual(status_change.rev,'00') self.assertEqual(status_change.ad.name,'Areað Irector') @@ -563,6 +564,8 @@ def test_initial_submission(self): ftp_filepath = Path(settings.FTP_DIR) / "status-changes" / basename self.assertFalse(filepath.exists()) self.assertFalse(ftp_filepath.exists()) + with self.assertRaises(FileNotFoundError): + retrieve_str("statchg",basename) r = self.client.post(url,dict(content="Some initial review text\n",submit_response="1")) self.assertEqual(r.status_code,302) doc = Document.objects.get(name='status-change-imaginary-mid-review') @@ -571,6 +574,10 @@ def test_initial_submission(self): self.assertEqual(f.read(),"Some initial review text\n") with ftp_filepath.open() as f: self.assertEqual(f.read(),"Some initial review text\n") + self.assertEqual( + retrieve_str("statchg", basename), + "Some initial review text\n" + ) self.assertTrue( "mid-review-00" in doc.latest_event(NewRevisionDocEvent).desc) def test_subsequent_submission(self): @@ -607,7 +614,8 @@ def test_subsequent_submission(self): self.assertContains(r, "does not appear to be a text file") # sane post uploading a file - test_file = StringIO("This is a new proposal.") + test_content = "This is a new proposal." + test_file = StringIO(test_content) test_file.name = "unnamed" r = self.client.post(url,dict(txt=test_file,submit_response="1")) self.assertEqual(r.status_code, 302) @@ -615,8 +623,12 @@ def test_subsequent_submission(self): self.assertEqual(doc.rev,'01') path = os.path.join(settings.STATUS_CHANGE_PATH, '%s-%s.txt' % (doc.name, doc.rev)) with io.open(path) as f: - self.assertEqual(f.read(),"This is a new proposal.") + self.assertEqual(f.read(), test_content) f.close() + self.assertEqual( + retrieve_str("statchg", f"{doc.name}-{doc.rev}.txt"), + test_content + ) self.assertTrue( "mid-review-01" in doc.latest_event(NewRevisionDocEvent).desc) # verify reset text button works diff --git a/ietf/doc/utils.py b/ietf/doc/utils.py index 10fe9ff2d7..3ddd904c75 100644 --- a/ietf/doc/utils.py +++ b/ietf/doc/utils.py @@ -1510,7 +1510,7 @@ def update_or_create_draft_bibxml_file(doc, rev): existing_bibxml = "" if normalized_bibxml.strip() != existing_bibxml.strip(): log.log(f"Writing {ref_rev_file_path}") - ref_rev_file_path.write_text(normalized_bibxml, encoding="utf8") + ref_rev_file_path.write_text(normalized_bibxml, encoding="utf8") # TODO-BLOBSTORE def ensure_draft_bibxml_path_exists(): diff --git a/ietf/doc/views_bofreq.py b/ietf/doc/views_bofreq.py index 3bd10287b2..71cbe30491 100644 --- a/ietf/doc/views_bofreq.py +++ b/ietf/doc/views_bofreq.py @@ -101,6 +101,7 @@ def submit(request, name): content = form.cleaned_data['bofreq_content'] with io.open(bofreq.get_file_name(), 'w', encoding='utf-8') as destination: destination.write(content) + bofreq.store_str(bofreq.get_base_name(), content) email_bofreq_new_revision(request, bofreq) return redirect('ietf.doc.views_doc.document_main', name=bofreq.name) @@ -175,6 +176,7 @@ def new_bof_request(request): content = form.cleaned_data['bofreq_content'] with io.open(bofreq.get_file_name(), 'w', encoding='utf-8') as destination: destination.write(content) + bofreq.store_str(bofreq.get_base_name(), content) email_bofreq_new_revision(request, bofreq) return redirect('ietf.doc.views_doc.document_main', name=bofreq.name) diff --git a/ietf/doc/views_charter.py b/ietf/doc/views_charter.py index f8748d2126..e899f59227 100644 --- a/ietf/doc/views_charter.py +++ b/ietf/doc/views_charter.py @@ -441,9 +441,10 @@ def submit(request, name, option=None): ) # update rev with charter_filename.open("w", encoding="utf-8") as destination: if form.cleaned_data["txt"]: - destination.write(form.cleaned_data["txt"]) + content=form.cleaned_data["txt"] else: - destination.write(form.cleaned_data["content"]) + content=form.cleaned_data["content"] + destination.write(content) # Also provide a copy to the legacy ftp source directory, which is served by rsync # This replaces the hardlink copy that ghostlink has made in the past # Still using a hardlink as long as these are on the same filesystem. @@ -454,7 +455,8 @@ def submit(request, name, option=None): log( "There was an error creating a hardlink at %s pointing to %s" % (ftp_filename, charter_filename) - ) + ) + charter.store_str(charter_filename.name, content) if option in ["initcharter", "recharter"] and charter.ad == None: diff --git a/ietf/doc/views_conflict_review.py b/ietf/doc/views_conflict_review.py index e55661ccdf..159f1340a4 100644 --- a/ietf/doc/views_conflict_review.py +++ b/ietf/doc/views_conflict_review.py @@ -186,9 +186,10 @@ def save(self, review): filepath = Path(settings.CONFLICT_REVIEW_PATH) / basename with filepath.open('w', encoding='utf-8') as destination: if self.cleaned_data['txt']: - destination.write(self.cleaned_data['txt']) + content = self.cleaned_data['txt'] else: - destination.write(self.cleaned_data['content']) + content = self.cleaned_data['content'] + destination.write(content) ftp_filepath = Path(settings.FTP_DIR) / "conflict-reviews" / basename try: os.link(filepath, ftp_filepath) # Path.hardlink_to is not available until 3.10 @@ -197,6 +198,7 @@ def save(self, review): "There was an error creating a hardlink at %s pointing to %s: %s" % (ftp_filepath, filepath, e) ) + review.store_str(basename, content) #This is very close to submit on charter - can we get better reuse? @role_required('Area Director','Secretariat') diff --git a/ietf/doc/views_draft.py b/ietf/doc/views_draft.py index 34104b2005..c80537afb3 100644 --- a/ietf/doc/views_draft.py +++ b/ietf/doc/views_draft.py @@ -32,6 +32,7 @@ generate_publication_request, email_adopted, email_intended_status_changed, email_iesg_processing_document, email_ad_approved_doc, email_iana_expert_review_state_changed ) +from ietf.doc.storage_utils import retrieve_bytes, store_bytes from ietf.doc.utils import ( add_state_change_event, can_adopt_draft, can_unadopt_draft, get_tags_for_stream_id, nice_consensus, update_action_holders, update_reminder, update_telechat, make_notify_changed_event, get_initial_notify, @@ -897,6 +898,11 @@ def restore_draft_file(request, draft): except shutil.Error as ex: messages.warning(request, 'There was an error restoring the Internet-Draft file: {} ({})'.format(file, ex)) log.log(" Exception %s when attempting to move %s" % (ex, file)) + _, ext = os.path.splitext(os.path.basename(file)) + if ext: + ext = ext[1:] + blobname = f"{ext}/{basename}.{ext}" + store_bytes("active-draft", blobname, retrieve_bytes("draft", blobname)) class ShepherdWriteupUploadForm(forms.Form): diff --git a/ietf/doc/views_material.py b/ietf/doc/views_material.py index 361bf5f1e2..6f8b8a8f12 100644 --- a/ietf/doc/views_material.py +++ b/ietf/doc/views_material.py @@ -167,6 +167,8 @@ def edit_material(request, name=None, acronym=None, action=None, doc_type=None): with filepath.open('wb+') as dest: for chunk in f.chunks(): dest.write(chunk) + f.seek(0) + doc.store_file(basename, f) if not doc.meeting_related(): log.assertion('doc.type_id == "slides"') ftp_filepath = Path(settings.FTP_DIR) / doc.type_id / basename diff --git a/ietf/doc/views_review.py b/ietf/doc/views_review.py index bb9e56742d..1f23c435fa 100644 --- a/ietf/doc/views_review.py +++ b/ietf/doc/views_review.py @@ -805,6 +805,7 @@ def complete_review(request, name, assignment_id=None, acronym=None): review_path = Path(review.get_file_path()) / f"{review.name}.txt" review_path.write_text(content) + review.store_str(f"{review.name}.txt", content, allow_overwrite=True) # We have a bug that review revisions dont create a new version! review_ftp_path = Path(settings.FTP_DIR) / "review" / review_path.name # See https://github.com/ietf-tools/datatracker/issues/6941 - when that's # addressed, making this link should not be conditional diff --git a/ietf/doc/views_statement.py b/ietf/doc/views_statement.py index bf9f47ddfe..9dc8c8ad69 100644 --- a/ietf/doc/views_statement.py +++ b/ietf/doc/views_statement.py @@ -137,12 +137,15 @@ def submit(request, name): mode="wb" if writing_pdf else "w" ) as destination: if writing_pdf: - for chunk in form.cleaned_data["statement_file"].chunks(): + f = form.cleaned_data["statement_file"] + for chunk in f.chunks(): destination.write(chunk) + f.seek(0) + statement.store_file(statement.uploaded_filename, f) else: destination.write(markdown_content) + statement.store_str(statement.uploaded_filename, markdown_content) return redirect("ietf.doc.views_doc.document_main", name=statement.name) - else: if statement.uploaded_filename.endswith("pdf"): text = CONST_PDF_REV_NOTICE @@ -254,10 +257,14 @@ def new_statement(request): mode="wb" if writing_pdf else "w" ) as destination: if writing_pdf: - for chunk in form.cleaned_data["statement_file"].chunks(): + f = form.cleaned_data["statement_file"] + for chunk in f.chunks(): destination.write(chunk) + f.seek(0) + statement.store_file(statement.uploaded_filename, f) else: destination.write(markdown_content) + statement.store_str(statement.uploaded_filename, markdown_content) return redirect("ietf.doc.views_doc.document_main", name=statement.name) else: diff --git a/ietf/doc/views_status_change.py b/ietf/doc/views_status_change.py index 33b822348a..2bccc213c4 100644 --- a/ietf/doc/views_status_change.py +++ b/ietf/doc/views_status_change.py @@ -160,9 +160,11 @@ def save(self, doc): filename = Path(settings.STATUS_CHANGE_PATH) / basename with io.open(filename, 'w', encoding='utf-8') as destination: if self.cleaned_data['txt']: - destination.write(self.cleaned_data['txt']) + content = self.cleaned_data['txt'] else: - destination.write(self.cleaned_data['content']) + content = self.cleaned_data['content'] + destination.write(content) + doc.store_str(basename, content) try: ftp_filename = Path(settings.FTP_DIR) / "status-changes" / basename os.link(filename, ftp_filename) # Path.hardlink is not available until 3.10 diff --git a/ietf/group/tasks.py b/ietf/group/tasks.py index 8b4c994ba1..693aafb385 100644 --- a/ietf/group/tasks.py +++ b/ietf/group/tasks.py @@ -10,6 +10,7 @@ from django.conf import settings from django.template.loader import render_to_string +from ietf.doc.storage_utils import store_file from ietf.utils import log from .models import Group @@ -43,6 +44,11 @@ def generate_wg_charters_files_task(): encoding="utf8", ) + with charters_file.open("rb") as f: + store_file("indexes", "1wg-charters.txt", f, allow_overwrite=True) + with charters_by_acronym_file.open("rb") as f: + store_file("indexes", "1wg-charters-by-acronym.txt", f, allow_overwrite=True) + charter_copy_dests = [ getattr(settings, "CHARTER_COPY_PATH", None), getattr(settings, "CHARTER_COPY_OTHER_PATH", None), @@ -102,3 +108,8 @@ def generate_wg_summary_files_task(): ), encoding="utf8", ) + + with summary_file.open("rb") as f: + store_file("indexes", "1wg-summary.txt", f, allow_overwrite=True) + with summary_by_acronym_file.open("rb") as f: + store_file("indexes", "1wg-summary-by-acronym.txt", f, allow_overwrite=True) diff --git a/ietf/group/tests_info.py b/ietf/group/tests_info.py index 32d919c779..aaf937ee43 100644 --- a/ietf/group/tests_info.py +++ b/ietf/group/tests_info.py @@ -29,6 +29,7 @@ from ietf.community.utils import reset_name_contains_index_for_rule from ietf.doc.factories import WgDraftFactory, IndividualDraftFactory, CharterFactory, BallotDocEventFactory from ietf.doc.models import Document, DocEvent, State +from ietf.doc.storage_utils import retrieve_str from ietf.doc.utils_charter import charter_name_for_group from ietf.group.admin import GroupForm as AdminGroupForm from ietf.group.factories import (GroupFactory, RoleFactory, GroupEventFactory, @@ -303,20 +304,26 @@ def test_generate_wg_summary_files_task(self): generate_wg_summary_files_task() - summary_by_area_contents = ( - Path(settings.GROUP_SUMMARY_PATH) / "1wg-summary.txt" - ).read_text(encoding="utf8") - self.assertIn(group.parent.name, summary_by_area_contents) - self.assertIn(group.acronym, summary_by_area_contents) - self.assertIn(group.name, summary_by_area_contents) - self.assertIn(chair.address, summary_by_area_contents) - - summary_by_acronym_contents = ( - Path(settings.GROUP_SUMMARY_PATH) / "1wg-summary-by-acronym.txt" - ).read_text(encoding="utf8") - self.assertIn(group.acronym, summary_by_acronym_contents) - self.assertIn(group.name, summary_by_acronym_contents) - self.assertIn(chair.address, summary_by_acronym_contents) + for summary_by_area_contents in [ + ( + Path(settings.GROUP_SUMMARY_PATH) / "1wg-summary.txt" + ).read_text(encoding="utf8"), + retrieve_str("indexes", "1wg-summary.txt") + ]: + self.assertIn(group.parent.name, summary_by_area_contents) + self.assertIn(group.acronym, summary_by_area_contents) + self.assertIn(group.name, summary_by_area_contents) + self.assertIn(chair.address, summary_by_area_contents) + + for summary_by_acronym_contents in [ + ( + Path(settings.GROUP_SUMMARY_PATH) / "1wg-summary-by-acronym.txt" + ).read_text(encoding="utf8"), + retrieve_str("indexes", "1wg-summary-by-acronym.txt") + ]: + self.assertIn(group.acronym, summary_by_acronym_contents) + self.assertIn(group.name, summary_by_acronym_contents) + self.assertIn(chair.address, summary_by_acronym_contents) def test_chartering_groups(self): group = CharterFactory(group__type_id='wg',group__parent=GroupFactory(type_id='area'),states=[('charter','intrev')]).group diff --git a/ietf/idindex/tasks.py b/ietf/idindex/tasks.py index 5e7e193bba..2f5f1871d7 100644 --- a/ietf/idindex/tasks.py +++ b/ietf/idindex/tasks.py @@ -15,6 +15,8 @@ from django.conf import settings +from ietf.doc.storage_utils import store_file + from .index import all_id_txt, all_id2_txt, id_index_txt @@ -38,6 +40,8 @@ def move_into_place(self, src_path: Path, dest_path: Path, hardlink_dirs: List[P target = path / dest_path.name target.unlink(missing_ok=True) os.link(dest_path, target) # until python>=3.10 + with dest_path.open("rb") as f: + store_file("indexes", dest_path.name, f, allow_overwrite=True) def cleanup(self): for tf_path in self.cleanup_list: diff --git a/ietf/idindex/tests.py b/ietf/idindex/tests.py index 44abf805f0..5cc7a7b3bb 100644 --- a/ietf/idindex/tests.py +++ b/ietf/idindex/tests.py @@ -15,6 +15,7 @@ from ietf.doc.factories import WgDraftFactory, RfcFactory from ietf.doc.models import Document, RelatedDocument, State, LastCallDocEvent, NewRevisionDocEvent +from ietf.doc.storage_utils import retrieve_str from ietf.group.factories import GroupFactory from ietf.name.models import DocRelationshipName from ietf.idindex.index import all_id_txt, all_id2_txt, id_index_txt @@ -203,5 +204,9 @@ def test_temp_file_manager(self): self.assertFalse(path2.exists()) # left behind # check destination contents and permissions self.assertEqual(dest.read_text(), "yay") + self.assertEqual( + retrieve_str("indexes", "yay.txt"), + "yay" + ) self.assertEqual(dest.stat().st_mode & 0o777, 0o644) self.assertTrue(dest.samefile(other_path / "yay.txt")) diff --git a/ietf/liaisons/forms.py b/ietf/liaisons/forms.py index 1d91041b25..1af29044b3 100644 --- a/ietf/liaisons/forms.py +++ b/ietf/liaisons/forms.py @@ -379,6 +379,8 @@ def save_attachments(self): attach_file = io.open(os.path.join(settings.LIAISON_ATTACH_PATH, attach.name + extension), 'wb') attach_file.write(attached_file.read()) attach_file.close() + attached_file.seek(0) + attach.store_file(attach.uploaded_filename, attached_file) if not self.is_new: # create modified event diff --git a/ietf/liaisons/tests.py b/ietf/liaisons/tests.py index a0186f6a01..1742687f14 100644 --- a/ietf/liaisons/tests.py +++ b/ietf/liaisons/tests.py @@ -19,6 +19,7 @@ from io import StringIO from pyquery import PyQuery +from ietf.doc.storage_utils import retrieve_str from ietf.utils.test_utils import TestCase, login_testing_unauthorized from ietf.utils.mail import outbox @@ -414,7 +415,8 @@ def test_edit_liaison(self): # edit attachments_before = liaison.attachments.count() - test_file = StringIO("hello world") + test_content = "hello world" + test_file = StringIO(test_content) test_file.name = "unnamed" r = self.client.post(url, dict(from_groups=str(from_group.pk), @@ -452,9 +454,12 @@ def test_edit_liaison(self): self.assertEqual(attachment.title, "attachment") with (Path(settings.LIAISON_ATTACH_PATH) / attachment.uploaded_filename).open() as f: written_content = f.read() + self.assertEqual(written_content, test_content) + self.assertEqual( + retrieve_str(attachment.type_id, attachment.uploaded_filename), + test_content, + ) - test_file.seek(0) - self.assertEqual(written_content, test_file.read()) def test_incoming_access(self): '''Ensure only Secretariat, Liaison Managers, and Authorized Individuals @@ -704,7 +709,8 @@ def test_add_incoming_liaison(self): # add new mailbox_before = len(outbox) - test_file = StringIO("hello world") + test_content = "hello world" + test_file = StringIO(test_content) test_file.name = "unnamed" from_groups = [ str(g.pk) for g in Group.objects.filter(type="sdo") ] to_group = Group.objects.get(acronym="mars") @@ -756,6 +762,11 @@ def test_add_incoming_liaison(self): self.assertEqual(attachment.title, "attachment") with (Path(settings.LIAISON_ATTACH_PATH) / attachment.uploaded_filename).open() as f: written_content = f.read() + self.assertEqual(written_content, test_content) + self.assertEqual( + retrieve_str(attachment.type_id, attachment.uploaded_filename), + test_content + ) test_file.seek(0) self.assertEqual(written_content, test_file.read()) @@ -783,7 +794,8 @@ def test_add_outgoing_liaison(self): # add new mailbox_before = len(outbox) - test_file = StringIO("hello world") + test_content = "hello world" + test_file = StringIO(test_content) test_file.name = "unnamed" from_group = Group.objects.get(acronym="mars") to_group = Group.objects.filter(type="sdo")[0] @@ -835,9 +847,11 @@ def test_add_outgoing_liaison(self): self.assertEqual(attachment.title, "attachment") with (Path(settings.LIAISON_ATTACH_PATH) / attachment.uploaded_filename).open() as f: written_content = f.read() - - test_file.seek(0) - self.assertEqual(written_content, test_file.read()) + self.assertEqual(written_content, test_content) + self.assertEqual( + retrieve_str(attachment.type_id, attachment.uploaded_filename), + test_content + ) self.assertEqual(len(outbox), mailbox_before + 1) self.assertTrue("Liaison Statement" in outbox[-1]["Subject"]) @@ -882,7 +896,8 @@ def test_liaison_add_attachment(self): # get minimum edit post data - file = StringIO('dummy file') + test_data = "dummy file" + file = StringIO(test_data) file.name = "upload.txt" post_data = dict( from_groups = ','.join([ str(x.pk) for x in liaison.from_groups.all() ]), @@ -909,6 +924,11 @@ def test_liaison_add_attachment(self): self.assertEqual(liaison.attachments.count(),1) event = liaison.liaisonstatementevent_set.order_by('id').last() self.assertTrue(event.desc.startswith('Added attachment')) + attachment = liaison.attachments.get() + self.assertEqual( + retrieve_str(attachment.type_id, attachment.uploaded_filename), + test_data + ) def test_liaison_edit_attachment(self): diff --git a/ietf/meeting/factories.py b/ietf/meeting/factories.py index 69c1f0421b..eb36e9e756 100644 --- a/ietf/meeting/factories.py +++ b/ietf/meeting/factories.py @@ -9,6 +9,7 @@ from django.core.files.base import ContentFile from django.db.models import Q +from ietf.doc.storage_utils import store_str from ietf.meeting.models import (Attended, Meeting, Session, SchedulingEvent, Schedule, TimeSlot, SessionPresentation, FloorPlan, Room, SlideSubmission, Constraint, MeetingHost, ProceedingsMaterial) @@ -239,6 +240,10 @@ class Meta: make_file = factory.PostGeneration( lambda obj, create, extracted, **kwargs: open(obj.staged_filepath(),'a').close() ) + + store_submission = factory.PostGeneration( + lambda obj, create, extracted, **kwargs: store_str("staging", obj.filename, "") + ) class ConstraintFactory(factory.django.DjangoModelFactory): class Meta: diff --git a/ietf/meeting/forms.py b/ietf/meeting/forms.py index 3b66d2cd29..e1d1e90b8d 100644 --- a/ietf/meeting/forms.py +++ b/ietf/meeting/forms.py @@ -361,6 +361,7 @@ def save_agenda(self): os.makedirs(directory) with io.open(path, "w", encoding='utf-8') as file: file.write(self.cleaned_data['agenda']) + doc.store_str(doc.uploaded_filename, self.cleaned_data['agenda']) class InterimAnnounceForm(forms.ModelForm): diff --git a/ietf/meeting/helpers.py b/ietf/meeting/helpers.py index 7f1c85990e..39d271ae6b 100644 --- a/ietf/meeting/helpers.py +++ b/ietf/meeting/helpers.py @@ -649,6 +649,11 @@ def read_session_file(type, num, doc): def read_agenda_file(num, doc): return read_session_file('agenda', num, doc) +# TODO-BLOBSTORE: this is _yet another_ draft derived variant created when users +# ask for drafts from the meeting agenda page. Consider whether to refactor this +# now to not call out to external binaries, and consider whether we need this extra +# format at all in the draft blobstore. if so, it would probably be stored under +# something like plainpdf/ def convert_draft_to_pdf(doc_name): inpath = os.path.join(settings.IDSUBMIT_REPOSITORY_PATH, doc_name + ".txt") outpath = os.path.join(settings.INTERNET_DRAFT_PDF_PATH, doc_name + ".pdf") diff --git a/ietf/meeting/migrations/0010_alter_floorplan_image_alter_meetinghost_logo.py b/ietf/meeting/migrations/0010_alter_floorplan_image_alter_meetinghost_logo.py new file mode 100644 index 0000000000..594a1a4048 --- /dev/null +++ b/ietf/meeting/migrations/0010_alter_floorplan_image_alter_meetinghost_logo.py @@ -0,0 +1,56 @@ +# Copyright The IETF Trust 2025, All Rights Reserved + +from django.db import migrations, models +import ietf.meeting.models +import ietf.utils.fields +import ietf.utils.storage +import ietf.utils.validators + + +class Migration(migrations.Migration): + + dependencies = [ + ("meeting", "0009_session_meetecho_recording_name"), + ] + + operations = [ + migrations.AlterField( + model_name="floorplan", + name="image", + field=models.ImageField( + blank=True, + default=None, + storage=ietf.utils.storage.BlobShadowFileSystemStorage( + kind="", location=None + ), + upload_to=ietf.meeting.models.floorplan_path, + ), + ), + migrations.AlterField( + model_name="meetinghost", + name="logo", + field=ietf.utils.fields.MissingOkImageField( + height_field="logo_height", + storage=ietf.utils.storage.BlobShadowFileSystemStorage( + kind="", location=None + ), + upload_to=ietf.meeting.models._host_upload_path, + validators=[ + ietf.utils.validators.MaxImageSizeValidator(400, 400), + ietf.utils.validators.WrappedValidator( + ietf.utils.validators.validate_file_size, True + ), + ietf.utils.validators.WrappedValidator( + ietf.utils.validators.validate_file_extension, + [".png", ".jpg", ".jpeg"], + ), + ietf.utils.validators.WrappedValidator( + ietf.utils.validators.validate_mime_type, + ["image/jpeg", "image/png"], + True, + ), + ], + width_field="logo_width", + ), + ), + ] diff --git a/ietf/meeting/models.py b/ietf/meeting/models.py index 8c6fb97413..5284420731 100644 --- a/ietf/meeting/models.py +++ b/ietf/meeting/models.py @@ -39,7 +39,7 @@ from ietf.person.models import Person from ietf.utils.decorators import memoize from ietf.utils.history import find_history_replacements_active_at, find_history_active_at -from ietf.utils.storage import NoLocationMigrationFileSystemStorage +from ietf.utils.storage import BlobShadowFileSystemStorage from ietf.utils.text import xslugify from ietf.utils.timezone import datetime_from_date, date_today from ietf.utils.models import ForeignKey @@ -527,7 +527,12 @@ class FloorPlan(models.Model): modified= models.DateTimeField(auto_now=True) meeting = ForeignKey(Meeting) order = models.SmallIntegerField() - image = models.ImageField(storage=NoLocationMigrationFileSystemStorage(), upload_to=floorplan_path, blank=True, default=None) + image = models.ImageField( + storage=BlobShadowFileSystemStorage(kind="floorplan"), + upload_to=floorplan_path, + blank=True, + default=None, + ) # class Meta: ordering = ['-id',] @@ -1431,8 +1436,12 @@ class MeetingHost(models.Model): """Meeting sponsor""" meeting = ForeignKey(Meeting, related_name='meetinghosts') name = models.CharField(max_length=255, blank=False) + # TODO-BLOBSTORE - capture these logos and look for other ImageField like model fields. logo = MissingOkImageField( - storage=NoLocationMigrationFileSystemStorage(location=settings.MEETINGHOST_LOGO_PATH), + storage=BlobShadowFileSystemStorage( + kind="meetinghostlogo", + location=settings.MEETINGHOST_LOGO_PATH, + ), upload_to=_host_upload_path, width_field='logo_width', height_field='logo_height', diff --git a/ietf/meeting/tests_views.py b/ietf/meeting/tests_views.py index 581725dbc8..848c9b7723 100644 --- a/ietf/meeting/tests_views.py +++ b/ietf/meeting/tests_views.py @@ -38,6 +38,7 @@ import debug # pyflakes:ignore from ietf.doc.models import Document, NewRevisionDocEvent +from ietf.doc.storage_utils import exists_in_storage, remove_from_storage, retrieve_bytes, retrieve_str from ietf.group.models import Group, Role, GroupFeatures from ietf.group.utils import can_manage_group from ietf.person.models import Person @@ -55,6 +56,7 @@ from ietf.name.models import SessionStatusName, ImportantDateName, RoleName, ProceedingsMaterialTypeName from ietf.utils.decorators import skip_coverage from ietf.utils.mail import outbox, empty_outbox, get_payload_text +from ietf.utils.test_runner import TestBlobstoreManager from ietf.utils.test_utils import TestCase, login_testing_unauthorized, unicontent from ietf.utils.timezone import date_today, time_now @@ -112,7 +114,7 @@ def setUp(self): # files will upload to the locations specified in settings.py. # Note that this will affect any use of the storage class in # meeting.models - i.e., FloorPlan.image and MeetingHost.logo - self.patcher = patch('ietf.meeting.models.NoLocationMigrationFileSystemStorage.base_location', + self.patcher = patch('ietf.meeting.models.BlobShadowFileSystemStorage.base_location', new_callable=PropertyMock) mocked = self.patcher.start() mocked.return_value = self.storage_dir @@ -5228,6 +5230,7 @@ def test_interim_request_options(self): def do_interim_request_single_virtual(self, emails_expected): make_meeting_test_data() + TestBlobstoreManager().emptyTestBlobstores() group = Group.objects.get(acronym='mars') date = date_today() + datetime.timedelta(days=30) time = time_now().replace(microsecond=0,second=0) @@ -5278,6 +5281,12 @@ def do_interim_request_single_virtual(self, emails_expected): doc = session.materials.first() path = os.path.join(doc.get_file_path(),doc.filename_with_rev()) self.assertTrue(os.path.exists(path)) + with Path(path).open() as f: + self.assertEqual(f.read(), agenda) + self.assertEqual( + retrieve_str("agenda",doc.uploaded_filename), + agenda + ) # check notices to secretariat and chairs self.assertEqual(len(outbox), length_before + emails_expected) return meeting @@ -5299,6 +5308,7 @@ def test_interim_request_single_virtual_settings_approval_not_required(self): def test_interim_request_single_in_person(self): make_meeting_test_data() + TestBlobstoreManager().emptyTestBlobstores() group = Group.objects.get(acronym='mars') date = date_today() + datetime.timedelta(days=30) time = time_now().replace(microsecond=0,second=0) @@ -5345,6 +5355,10 @@ def test_interim_request_single_in_person(self): timeslot = session.official_timeslotassignment().timeslot self.assertEqual(timeslot.time,dt) self.assertEqual(timeslot.duration,duration) + self.assertEqual( + retrieve_str("agenda",session.agenda().uploaded_filename), + agenda + ) def test_interim_request_multi_day(self): make_meeting_test_data() @@ -5412,6 +5426,11 @@ def test_interim_request_multi_day(self): self.assertEqual(timeslot.time,dt2) self.assertEqual(timeslot.duration,duration) self.assertEqual(session.agenda_note,agenda_note) + for session in meeting.session_set.all(): + self.assertEqual( + retrieve_str("agenda",session.agenda().uploaded_filename), + agenda + ) def test_interim_request_multi_day_non_consecutive(self): make_meeting_test_data() @@ -5474,6 +5493,7 @@ def test_interim_request_multi_day_cancel(self): def test_interim_request_series(self): make_meeting_test_data() + TestBlobstoreManager().emptyTestBlobstores() meeting_count_before = Meeting.objects.filter(type='interim').count() date = date_today() + datetime.timedelta(days=30) if (date.month, date.day) == (12, 31): @@ -5561,6 +5581,11 @@ def test_interim_request_series(self): self.assertEqual(timeslot.time,dt2) self.assertEqual(timeslot.duration,duration) self.assertEqual(session.agenda_note,agenda_note) + for session in meeting.session_set.all(): + self.assertEqual( + retrieve_str("agenda",session.agenda().uploaded_filename), + agenda + ) # test_interim_pending subsumed by test_appears_on_pending @@ -6099,6 +6124,7 @@ def strfdelta(self, tdelta, fmt): def test_interim_request_edit_agenda_updates_doc(self): """Updating the agenda through the request edit form should update the doc correctly""" make_interim_test_data() + TestBlobstoreManager().emptyTestBlobstores() meeting = add_event_info_to_session_qs(Session.objects.filter(meeting__type='interim', group__acronym='mars')).filter(current_status='sched').first().meeting group = meeting.session_set.first().group url = urlreverse('ietf.meeting.views.interim_request_edit', kwargs={'number': meeting.number}) @@ -6134,6 +6160,10 @@ def test_interim_request_edit_agenda_updates_doc(self): self.assertNotEqual(agenda_doc.uploaded_filename, uploaded_filename_before, 'Uploaded filename should be updated') with (Path(agenda_doc.get_file_path()) / agenda_doc.uploaded_filename).open() as f: self.assertEqual(f.read(), 'modified agenda contents', 'New agenda contents should be saved') + self.assertEqual( + retrieve_str(agenda_doc.type_id, agenda_doc.uploaded_filename), + "modified agenda contents" + ) def test_interim_request_details_permissions(self): make_interim_test_data() @@ -6354,12 +6384,14 @@ def test_upload_bluesheets(self): q = PyQuery(r.content) self.assertIn('Upload', str(q("title"))) self.assertFalse(session.presentations.exists()) - test_file = StringIO('%PDF-1.4\n%âãÏÓ\nthis is some text for a test') + test_content = '%PDF-1.4\n%âãÏÓ\nthis is some text for a test' + test_file = StringIO(test_content) test_file.name = "not_really.pdf" r = self.client.post(url,dict(file=test_file)) self.assertEqual(r.status_code, 302) bs_doc = session.presentations.filter(document__type_id='bluesheets').first().document self.assertEqual(bs_doc.rev,'00') + self.assertEqual(retrieve_str("bluesheets", f"{bs_doc.name}-{bs_doc.rev}.pdf"), test_content) r = self.client.get(url) self.assertEqual(r.status_code, 200) q = PyQuery(r.content) @@ -6389,12 +6421,14 @@ def test_upload_bluesheets_interim(self): q = PyQuery(r.content) self.assertIn('Upload', str(q("title"))) self.assertFalse(session.presentations.exists()) - test_file = StringIO('%PDF-1.4\n%âãÏÓ\nthis is some text for a test') + test_content = '%PDF-1.4\n%âãÏÓ\nthis is some text for a test' + test_file = StringIO(test_content) test_file.name = "not_really.pdf" r = self.client.post(url,dict(file=test_file)) self.assertEqual(r.status_code, 302) bs_doc = session.presentations.filter(document__type_id='bluesheets').first().document self.assertEqual(bs_doc.rev,'00') + self.assertEqual(retrieve_str("bluesheets", f"{bs_doc.name}-{bs_doc.rev}.pdf"), test_content) def test_upload_bluesheets_interim_chair_access(self): make_meeting_test_data() @@ -6467,27 +6501,36 @@ def test_upload_minutes_agenda(self): text = doc.text() self.assertIn('Some text', text) self.assertNotIn('
', text) - + text = retrieve_str(doctype, f"{doc.name}-{doc.rev}.html") + self.assertIn('Some text', text) + self.assertNotIn('
', text) + # txt upload - test_file = BytesIO(b'This is some text for a test, with the word\nvirtual at the beginning of a line.') + test_bytes = b'This is some text for a test, with the word\nvirtual at the beginning of a line.' + test_file = BytesIO(test_bytes) test_file.name = "some.txt" r = self.client.post(url,dict(submission_method="upload",file=test_file,apply_to_all=False)) self.assertEqual(r.status_code, 302) doc = session.presentations.filter(document__type_id=doctype).first().document self.assertEqual(doc.rev,'01') self.assertFalse(session2.presentations.filter(document__type_id=doctype)) + retrieved_bytes = retrieve_bytes(doctype, f"{doc.name}-{doc.rev}.txt") + self.assertEqual(retrieved_bytes, test_bytes) r = self.client.get(url) self.assertEqual(r.status_code, 200) q = PyQuery(r.content) self.assertIn('Revise', str(q("Title"))) - test_file = BytesIO(b'this is some different text for a test') + test_bytes = b'this is some different text for a test' + test_file = BytesIO(test_bytes) test_file.name = "also_some.txt" r = self.client.post(url,dict(submission_method="upload",file=test_file,apply_to_all=True)) self.assertEqual(r.status_code, 302) doc = Document.objects.get(pk=doc.pk) self.assertEqual(doc.rev,'02') self.assertTrue(session2.presentations.filter(document__type_id=doctype)) + retrieved_bytes = retrieve_bytes(doctype, f"{doc.name}-{doc.rev}.txt") + self.assertEqual(retrieved_bytes, test_bytes) # Test bad encoding test_file = BytesIO('

Title

Some\x93text
'.encode('latin1')) @@ -6540,12 +6583,15 @@ def test_upload_minutes_agenda_interim(self): q = PyQuery(r.content) self.assertIn('Upload', str(q("title"))) self.assertFalse(session.presentations.filter(document__type_id=doctype)) - test_file = BytesIO(b'this is some text for a test') + test_bytes = b'this is some text for a test' + test_file = BytesIO(test_bytes) test_file.name = "not_really.txt" r = self.client.post(url,dict(submission_method="upload",file=test_file)) self.assertEqual(r.status_code, 302) doc = session.presentations.filter(document__type_id=doctype).first().document self.assertEqual(doc.rev,'00') + retrieved_bytes = retrieve_bytes(doctype, f"{doc.name}-{doc.rev}.txt") + self.assertEqual(retrieved_bytes, test_bytes) # Verify that we don't have dead links url = urlreverse('ietf.meeting.views.session_details', kwargs={'num':session.meeting.number, 'acronym': session.group.acronym}) @@ -6567,12 +6613,15 @@ def test_upload_narrativeminutes(self): q = PyQuery(r.content) self.assertIn('Upload', str(q("title"))) self.assertFalse(session.presentations.filter(document__type_id=doctype)) - test_file = BytesIO(b'this is some text for a test') + test_bytes = b'this is some text for a test' + test_file = BytesIO(test_bytes) test_file.name = "not_really.txt" r = self.client.post(url,dict(submission_method="upload",file=test_file)) self.assertEqual(r.status_code, 302) doc = session.presentations.filter(document__type_id=doctype).first().document self.assertEqual(doc.rev,'00') + retrieved_bytes = retrieve_bytes(doctype, f"{doc.name}-{doc.rev}.txt") + self.assertEqual(retrieved_bytes, test_bytes) # Verify that we don't have dead links url = urlreverse('ietf.meeting.views.session_details', kwargs={'num':session.meeting.number, 'acronym': session.group.acronym}) @@ -6597,18 +6646,22 @@ def test_enter_agenda(self): self.assertRedirects(r, redirect_url) doc = session.presentations.filter(document__type_id='agenda').first().document self.assertEqual(doc.rev,'00') + self.assertEqual(retrieve_str("agenda",f"{doc.name}-{doc.rev}.md"), test_text) r = self.client.get(url) self.assertEqual(r.status_code, 200) q = PyQuery(r.content) self.assertIn('Revise', str(q("Title"))) - test_file = BytesIO(b'Upload after enter') + test_bytes = b'Upload after enter' + test_file = BytesIO(test_bytes) test_file.name = "some.txt" r = self.client.post(url,dict(submission_method="upload",file=test_file)) self.assertRedirects(r, redirect_url) doc = Document.objects.get(pk=doc.pk) self.assertEqual(doc.rev,'01') + retrieved_bytes = retrieve_bytes("agenda", f"{doc.name}-{doc.rev}.txt") + self.assertEqual(retrieved_bytes, test_bytes) r = self.client.get(url) self.assertEqual(r.status_code, 200) @@ -6620,6 +6673,8 @@ def test_enter_agenda(self): self.assertRedirects(r, redirect_url) doc = Document.objects.get(pk=doc.pk) self.assertEqual(doc.rev,'02') + self.assertEqual(retrieve_str("agenda",f"{doc.name}-{doc.rev}.md"), test_text) + @override_settings(MEETECHO_API_CONFIG="fake settings") # enough to trigger API calls @patch("ietf.meeting.views.SlidesManager") @@ -6635,7 +6690,8 @@ def test_upload_slides(self, mock_slides_manager_cls): q = PyQuery(r.content) self.assertIn('Upload', str(q("title"))) self.assertFalse(session1.presentations.filter(document__type_id='slides')) - test_file = BytesIO(b'this is not really a slide') + test_bytes = b'this is not really a slide' + test_file = BytesIO(test_bytes) test_file.name = 'not_really.txt' r = self.client.post(url,dict(file=test_file,title='a test slide file',apply_to_all=True,approved=True)) self.assertEqual(r.status_code, 302) @@ -6647,6 +6703,7 @@ def test_upload_slides(self, mock_slides_manager_cls): self.assertEqual(mock_slides_manager_cls.call_count, 1) self.assertEqual(mock_slides_manager_cls.call_args, call(api_config="fake settings")) self.assertEqual(mock_slides_manager_cls.return_value.add.call_count, 2) + self.assertEqual(retrieve_bytes("slides", f"{sp.document.name}-{sp.document.rev}.txt"), test_bytes) # don't care which order they were called in, just that both sessions were updated self.assertCountEqual( mock_slides_manager_cls.return_value.add.call_args_list, @@ -6658,7 +6715,8 @@ def test_upload_slides(self, mock_slides_manager_cls): mock_slides_manager_cls.reset_mock() url = urlreverse('ietf.meeting.views.upload_session_slides',kwargs={'num':session2.meeting.number,'session_id':session2.id}) - test_file = BytesIO(b'some other thing still not slidelike') + test_bytes = b'some other thing still not slidelike' + test_file = BytesIO(test_bytes) test_file.name = 'also_not_really.txt' r = self.client.post(url,dict(file=test_file,title='a different slide file',apply_to_all=False,approved=True)) self.assertEqual(r.status_code, 302) @@ -6671,6 +6729,7 @@ def test_upload_slides(self, mock_slides_manager_cls): self.assertEqual(mock_slides_manager_cls.call_count, 1) self.assertEqual(mock_slides_manager_cls.call_args, call(api_config="fake settings")) self.assertEqual(mock_slides_manager_cls.return_value.add.call_count, 1) + self.assertEqual(retrieve_bytes("slides", f"{sp.document.name}-{sp.document.rev}.txt"), test_bytes) self.assertEqual( mock_slides_manager_cls.return_value.add.call_args, call(session=session2, slides=sp.document, order=2), @@ -6682,7 +6741,8 @@ def test_upload_slides(self, mock_slides_manager_cls): self.assertTrue(r.status_code, 200) q = PyQuery(r.content) self.assertIn('Revise', str(q("title"))) - test_file = BytesIO(b'new content for the second slide deck') + test_bytes = b'new content for the second slide deck' + test_file = BytesIO(test_bytes) test_file.name = 'doesnotmatter.txt' r = self.client.post(url,dict(file=test_file,title='rename the presentation',apply_to_all=False, approved=True)) self.assertEqual(r.status_code, 302) @@ -6692,6 +6752,7 @@ def test_upload_slides(self, mock_slides_manager_cls): self.assertEqual(replacement_sp.rev,'01') self.assertEqual(replacement_sp.document.rev,'01') self.assertEqual(mock_slides_manager_cls.call_count, 1) + self.assertEqual(retrieve_bytes("slides", f"{replacement_sp.document.name}-{replacement_sp.document.rev}.txt"), test_bytes) self.assertEqual(mock_slides_manager_cls.call_args, call(api_config="fake settings")) self.assertEqual(mock_slides_manager_cls.return_value.revise.call_count, 1) self.assertEqual( @@ -6771,7 +6832,6 @@ def test_remove_sessionpresentation(self, mock_slides_manager_cls): self.assertEqual(2, agenda.docevent_set.count()) self.assertFalse(mock_slides_manager_cls.called) - def test_propose_session_slides(self): for type_id in ['ietf','interim']: session = SessionFactory(meeting__type_id=type_id) @@ -6798,7 +6858,8 @@ def test_propose_session_slides(self): login_testing_unauthorized(self,newperson.user.username,upload_url) r = self.client.get(upload_url) self.assertEqual(r.status_code,200) - test_file = BytesIO(b'this is not really a slide') + test_bytes = b'this is not really a slide' + test_file = BytesIO(test_bytes) test_file.name = 'not_really.txt' empty_outbox() r = self.client.post(upload_url,dict(file=test_file,title='a test slide file',apply_to_all=True,approved=False)) @@ -6806,6 +6867,10 @@ def test_propose_session_slides(self): session = Session.objects.get(pk=session.pk) self.assertEqual(session.slidesubmission_set.count(),1) self.assertEqual(len(outbox),1) + self.assertEqual( + retrieve_bytes("staging", session.slidesubmission_set.get().filename), + test_bytes + ) r = self.client.get(session_overview_url) self.assertEqual(r.status_code, 200) @@ -6825,13 +6890,20 @@ def test_propose_session_slides(self): login_testing_unauthorized(self,chair.user.username,upload_url) r = self.client.get(upload_url) self.assertEqual(r.status_code,200) - test_file = BytesIO(b'this is not really a slide either') + test_bytes = b'this is not really a slide either' + test_file = BytesIO(test_bytes) test_file.name = 'again_not_really.txt' empty_outbox() r = self.client.post(upload_url,dict(file=test_file,title='a selfapproved test slide file',apply_to_all=True,approved=True)) self.assertEqual(r.status_code, 302) self.assertEqual(len(outbox),0) self.assertEqual(session.slidesubmission_set.count(),2) + sp = session.presentations.get(document__title__contains="selfapproved") + self.assertFalse(exists_in_storage("staging", sp.document.uploaded_filename)) + self.assertEqual( + retrieve_bytes("slides", sp.document.uploaded_filename), + test_bytes + ) self.client.logout() self.client.login(username=chair.user.username, password=chair.user.username+"+password") @@ -6854,6 +6926,8 @@ def test_disapprove_proposed_slides(self): self.assertEqual(r.status_code,302) self.assertEqual(SlideSubmission.objects.filter(status__slug = 'rejected').count(), 1) self.assertEqual(SlideSubmission.objects.filter(status__slug = 'pending').count(), 0) + if submission.filename is not None and submission.filename != "": + self.assertFalse(exists_in_storage("staging", submission.filename)) r = self.client.get(url) self.assertEqual(r.status_code, 200) self.assertRegex(r.content.decode(), r"These\s+slides\s+have\s+already\s+been\s+rejected") @@ -6872,6 +6946,7 @@ def test_approve_proposed_slides(self, mock_slides_manager_cls): r = self.client.get(url) self.assertEqual(r.status_code,200) empty_outbox() + self.assertTrue(exists_in_storage("staging", submission.filename)) r = self.client.post(url,dict(title='different title',approve='approve')) self.assertEqual(r.status_code,302) self.assertEqual(SlideSubmission.objects.filter(status__slug = 'pending').count(), 0) @@ -6881,6 +6956,8 @@ def test_approve_proposed_slides(self, mock_slides_manager_cls): self.assertIsNotNone(submission.doc) self.assertEqual(session.presentations.count(),1) self.assertEqual(session.presentations.first().document.title,'different title') + self.assertTrue(exists_in_storage("slides", submission.doc.uploaded_filename)) + self.assertFalse(exists_in_storage("staging", submission.filename)) self.assertEqual(mock_slides_manager_cls.call_count, 1) self.assertEqual(mock_slides_manager_cls.call_args, call(api_config="fake settings")) self.assertEqual(mock_slides_manager_cls.return_value.add.call_count, 1) @@ -6900,6 +6977,7 @@ def test_approve_proposed_slides(self, mock_slides_manager_cls): @override_settings(MEETECHO_API_CONFIG="fake settings") # enough to trigger API calls @patch("ietf.meeting.views.SlidesManager") def test_approve_proposed_slides_multisession_apply_one(self, mock_slides_manager_cls): + TestBlobstoreManager().emptyTestBlobstores() submission = SlideSubmissionFactory(session__meeting__type_id='ietf') session1 = submission.session session2 = SessionFactory(group=submission.session.group, meeting=submission.session.meeting) @@ -6928,6 +7006,7 @@ def test_approve_proposed_slides_multisession_apply_one(self, mock_slides_manage @override_settings(MEETECHO_API_CONFIG="fake settings") # enough to trigger API calls @patch("ietf.meeting.views.SlidesManager") def test_approve_proposed_slides_multisession_apply_all(self, mock_slides_manager_cls): + TestBlobstoreManager().emptyTestBlobstores() submission = SlideSubmissionFactory(session__meeting__type_id='ietf') session1 = submission.session session2 = SessionFactory(group=submission.session.group, meeting=submission.session.meeting) @@ -6972,12 +7051,15 @@ def test_submit_and_approve_multiple_versions(self, mock_slides_manager_cls): submission = SlideSubmission.objects.get(session=session) + self.assertTrue(exists_in_storage("staging", submission.filename)) approve_url = urlreverse('ietf.meeting.views.approve_proposed_slides', kwargs={'slidesubmission_id':submission.pk,'num':submission.session.meeting.number}) login_testing_unauthorized(self, chair.user.username, approve_url) r = self.client.post(approve_url,dict(title=submission.title,approve='approve')) submission.refresh_from_db() self.assertEqual(r.status_code,302) self.client.logout() + self.assertFalse(exists_in_storage("staging", submission.filename)) + self.assertTrue(exists_in_storage("slides", submission.doc.uploaded_filename)) self.assertEqual(mock_slides_manager_cls.call_count, 1) self.assertEqual(mock_slides_manager_cls.call_args, call(api_config="fake settings")) self.assertEqual(mock_slides_manager_cls.return_value.add.call_count, 1) @@ -7003,11 +7085,16 @@ def test_submit_and_approve_multiple_versions(self, mock_slides_manager_cls): (first_submission, second_submission) = SlideSubmission.objects.filter(session=session, status__slug = 'pending').order_by('id') + self.assertTrue(exists_in_storage("staging", first_submission.filename)) + self.assertTrue(exists_in_storage("staging", second_submission.filename)) approve_url = urlreverse('ietf.meeting.views.approve_proposed_slides', kwargs={'slidesubmission_id':second_submission.pk,'num':second_submission.session.meeting.number}) login_testing_unauthorized(self, chair.user.username, approve_url) r = self.client.post(approve_url,dict(title=submission.title,approve='approve')) first_submission.refresh_from_db() second_submission.refresh_from_db() + self.assertTrue(exists_in_storage("staging", first_submission.filename)) + self.assertFalse(exists_in_storage("staging", second_submission.filename)) + self.assertTrue(exists_in_storage("slides", second_submission.doc.uploaded_filename)) self.assertEqual(r.status_code,302) self.assertEqual(mock_slides_manager_cls.call_count, 1) self.assertEqual(mock_slides_manager_cls.call_args, call(api_config="fake settings")) @@ -7024,6 +7111,7 @@ def test_submit_and_approve_multiple_versions(self, mock_slides_manager_cls): self.assertEqual(r.status_code,302) self.client.logout() self.assertFalse(mock_slides_manager_cls.called) + self.assertFalse(exists_in_storage("staging", first_submission.filename)) self.assertEqual(SlideSubmission.objects.filter(status__slug = 'pending').count(),0) self.assertEqual(SlideSubmission.objects.filter(status__slug = 'rejected').count(),1) @@ -7114,6 +7202,10 @@ def test_imports_previewed_text(self): minutes_path = Path(self.meeting.get_materials_path()) / 'minutes' with (minutes_path / self.session.minutes().uploaded_filename).open() as f: self.assertEqual(f.read(), 'original markdown text') + self.assertEqual( + retrieve_str("minutes", self.session.minutes().uploaded_filename), + 'original markdown text' + ) def test_refuses_identical_import(self): """Should not be able to import text identical to the current revision""" @@ -7173,7 +7265,9 @@ def test_handles_missing_previous_revision_file(self): # remove the file uploaded for the first rev minutes_docs = self.session.presentations.filter(document__type='minutes') self.assertEqual(minutes_docs.count(), 1) - Path(minutes_docs.first().document.get_file_name()).unlink() + to_remove = Path(minutes_docs.first().document.get_file_name()) + to_remove.unlink() + remove_from_storage("minutes", to_remove.name) self.assertEqual(r.status_code, 302) with requests_mock.Mocker() as mock: diff --git a/ietf/meeting/utils.py b/ietf/meeting/utils.py index 92bae5ac23..9603278399 100644 --- a/ietf/meeting/utils.py +++ b/ietf/meeting/utils.py @@ -24,6 +24,7 @@ import debug # pyflakes:ignore from ietf.dbtemplate.models import DBTemplate +from ietf.doc.storage_utils import store_bytes, store_str from ietf.meeting.models import (Session, SchedulingEvent, TimeSlot, Constraint, SchedTimeSessAssignment, SessionPresentation, Attended) from ietf.doc.models import Document, State, NewRevisionDocEvent, StateDocEvent @@ -772,7 +773,12 @@ def handle_upload_file(file, filename, meeting, subdir, request=None, encoding=N # Whole file sanitization; add back what's missing from a complete # document (sanitize will remove these). clean = clean_html(text) - destination.write(clean.encode("utf8")) + clean_bytes = clean.encode('utf8') + destination.write(clean_bytes) + # Assumes contents of subdir are always document type ids + # TODO-BLOBSTORE: see if we can refactor this so that the connection to the document isn't lost + # In the meantime, consider faking it by parsing filename (shudder). + store_bytes(subdir, filename.name, clean_bytes) if request and clean != text: messages.warning(request, ( @@ -783,6 +789,11 @@ def handle_upload_file(file, filename, meeting, subdir, request=None, encoding=N else: for chunk in chunks: destination.write(chunk) + file.seek(0) + if hasattr(file, "chunks"): + chunks = file.chunks() + # TODO-BLOBSTORE: See above question about refactoring + store_bytes(subdir, filename.name, b"".join(chunks)) return None @@ -809,13 +820,15 @@ def new_doc_for_session(type_id, session): session.presentations.create(document=doc,rev='00') return doc +# TODO-BLOBSTORE - consider adding doc to this signature and factoring away type_id def write_doc_for_session(session, type_id, filename, contents): filename = Path(filename) path = Path(session.meeting.get_materials_path()) / type_id path.mkdir(parents=True, exist_ok=True) with open(path / filename, "wb") as file: file.write(contents.encode('utf-8')) - return + store_str(type_id, filename.name, contents) + return None def create_recording(session, url, title=None, user=None): ''' diff --git a/ietf/meeting/views.py b/ietf/meeting/views.py index 1226e30d60..3fa605ed7e 100644 --- a/ietf/meeting/views.py +++ b/ietf/meeting/views.py @@ -52,6 +52,7 @@ from ietf.doc.fields import SearchableDocumentsField from ietf.doc.models import Document, State, DocEvent, NewRevisionDocEvent +from ietf.doc.storage_utils import remove_from_storage, retrieve_bytes, store_file from ietf.group.models import Group from ietf.group.utils import can_manage_session_materials, can_manage_some_groups, can_manage_group from ietf.person.models import Person, User @@ -3091,6 +3092,8 @@ def upload_session_slides(request, session_id, num, name=None): for chunk in file.chunks(): destination.write(chunk) destination.close() + file.seek(0) + store_file("staging", filename, file) submission.filename = filename submission.save() @@ -4645,7 +4648,6 @@ def err(code, text): save_err = save_bluesheet(request, session, file) if save_err: return err(400, save_err) - return HttpResponse("Done", status=200, content_type='text/plain') @@ -4957,6 +4959,8 @@ def approve_proposed_slides(request, slidesubmission_id, num): if not os.path.exists(path): os.makedirs(path) shutil.move(submission.staged_filepath(), os.path.join(path, target_filename)) + doc.store_bytes(target_filename, retrieve_bytes("staging", submission.filename)) + remove_from_storage("staging", submission.filename) post_process(doc) DocEvent.objects.create(type="approved_slides", doc=doc, rev=doc.rev, by=request.user.person, desc="Slides approved") @@ -4994,11 +4998,14 @@ def approve_proposed_slides(request, slidesubmission_id, num): # in a SlideSubmission object without a file. Handle # this case and keep processing the 'disapprove' even if # the filename doesn't exist. - try: - if submission.filename != None and submission.filename != '': + + if submission.filename != None and submission.filename != '': + try: os.unlink(submission.staged_filepath()) - except (FileNotFoundError, IsADirectoryError): - pass + except (FileNotFoundError, IsADirectoryError): + pass + remove_from_storage("staging", submission.filename) + acronym = submission.session.group.acronym submission.status = SlideSubmissionStatusName.objects.get(slug='rejected') submission.save() diff --git a/ietf/nomcom/models.py b/ietf/nomcom/models.py index 2ed1124c5c..c206e467bd 100644 --- a/ietf/nomcom/models.py +++ b/ietf/nomcom/models.py @@ -42,6 +42,7 @@ class ReminderDates(models.Model): class NomCom(models.Model): + # TODO-BLOBSTORE: migrate this to a database field instead of a FileField and update code accordingly public_key = models.FileField(storage=NoLocationMigrationFileSystemStorage(location=settings.NOMCOM_PUBLIC_KEYS_DIR), upload_to=upload_path_handler, blank=True, null=True) diff --git a/ietf/person/migrations/0004_alter_person_photo_alter_person_photo_thumb.py b/ietf/person/migrations/0004_alter_person_photo_alter_person_photo_thumb.py new file mode 100644 index 0000000000..f34382fa70 --- /dev/null +++ b/ietf/person/migrations/0004_alter_person_photo_alter_person_photo_thumb.py @@ -0,0 +1,38 @@ +# Copyright The IETF Trust 2025, All Rights Reserved + +from django.db import migrations, models +import ietf.utils.storage + + +class Migration(migrations.Migration): + + dependencies = [ + ("person", "0003_alter_personalapikey_endpoint"), + ] + + operations = [ + migrations.AlterField( + model_name="person", + name="photo", + field=models.ImageField( + blank=True, + default=None, + storage=ietf.utils.storage.BlobShadowFileSystemStorage( + kind="", location=None + ), + upload_to="photo", + ), + ), + migrations.AlterField( + model_name="person", + name="photo_thumb", + field=models.ImageField( + blank=True, + default=None, + storage=ietf.utils.storage.BlobShadowFileSystemStorage( + kind="", location=None + ), + upload_to="photo", + ), + ), + ] diff --git a/ietf/person/models.py b/ietf/person/models.py index 85989acfc1..93364478ae 100644 --- a/ietf/person/models.py +++ b/ietf/person/models.py @@ -29,7 +29,7 @@ from ietf.name.models import ExtResourceName from ietf.person.name import name_parts, initials, plain_name from ietf.utils.mail import send_mail_preformatted -from ietf.utils.storage import NoLocationMigrationFileSystemStorage +from ietf.utils.storage import BlobShadowFileSystemStorage from ietf.utils.mail import formataddr from ietf.person.name import unidecode_name from ietf.utils import log @@ -60,8 +60,18 @@ class Person(models.Model): pronouns_selectable = jsonfield.JSONCharField("Pronouns", max_length=120, blank=True, null=True, default=list ) pronouns_freetext = models.CharField(" ", max_length=30, null=True, blank=True, help_text="Optionally provide your personal pronouns. These will be displayed on your public profile page and alongside your name in Meetecho and, in future, other systems. Select any number of the checkboxes OR provide a custom string up to 30 characters.") biography = models.TextField(blank=True, help_text="Short biography for use on leadership pages. Use plain text or reStructuredText markup.") - photo = models.ImageField(storage=NoLocationMigrationFileSystemStorage(), upload_to=settings.PHOTOS_DIRNAME, blank=True, default=None) - photo_thumb = models.ImageField(storage=NoLocationMigrationFileSystemStorage(), upload_to=settings.PHOTOS_DIRNAME, blank=True, default=None) + photo = models.ImageField( + storage=BlobShadowFileSystemStorage(kind="photo"), + upload_to=settings.PHOTOS_DIRNAME, + blank=True, + default=None, + ) + photo_thumb = models.ImageField( + storage=BlobShadowFileSystemStorage(kind="photo"), + upload_to=settings.PHOTOS_DIRNAME, + blank=True, + default=None, + ) name_from_draft = models.CharField("Full Name (from submission)", null=True, max_length=255, editable=False, help_text="Name as found in an Internet-Draft submission.") def __str__(self): diff --git a/ietf/settings.py b/ietf/settings.py index 125127ba15..faee42237c 100644 --- a/ietf/settings.py +++ b/ietf/settings.py @@ -183,6 +183,12 @@ # Server-side static.ietf.org URL (used in pdfized) STATIC_IETF_ORG_INTERNAL = STATIC_IETF_ORG +ENABLE_BLOBSTORAGE = True + +BLOBSTORAGE_MAX_ATTEMPTS = 1 +BLOBSTORAGE_CONNECT_TIMEOUT = 2 +BLOBSTORAGE_READ_TIMEOUT = 2 + WSGI_APPLICATION = "ietf.wsgi.application" AUTHENTICATION_BACKENDS = ( 'ietf.ietfauth.backends.CaseInsensitiveModelBackend', ) @@ -736,6 +742,38 @@ def skip_unreadable_post(record): "schedule_name": r"(?P[A-Za-z0-9-:_]+)", } +STORAGES: dict[str, Any] = { + "default": {"BACKEND": "django.core.files.storage.FileSystemStorage"}, + "staticfiles": {"BACKEND": "django.contrib.staticfiles.storage.StaticFilesStorage"}, +} + +# settings_local will need to configure storages for these names +MORE_STORAGE_NAMES: list[str] = [ + "bofreq", + "charter", + "conflrev", + "active-draft", + "draft", + "slides", + "minutes", + "agenda", + "bluesheets", + "procmaterials", + "narrativeminutes", + "statement", + "statchg", + "liai-att", + "chatlog", + "polls", + "staging", + "bibxml-ids", + "indexes", + "floorplan", + "meetinghostlogo", + "photo", + "review", +] + # Override this in settings_local.py if needed # *_PATH variables ends with a slash/ . diff --git a/ietf/settings_test.py b/ietf/settings_test.py index 94ca22c71b..fe77152d42 100755 --- a/ietf/settings_test.py +++ b/ietf/settings_test.py @@ -14,7 +14,8 @@ import shutil import tempfile from ietf.settings import * # pyflakes:ignore -from ietf.settings import TEST_CODE_COVERAGE_CHECKER +from ietf.settings import STORAGES, TEST_CODE_COVERAGE_CHECKER, MORE_STORAGE_NAMES, BLOBSTORAGE_CONNECT_TIMEOUT, BLOBSTORAGE_READ_TIMEOUT, BLOBSTORAGE_MAX_ATTEMPTS +import botocore.config import debug # pyflakes:ignore debug.debug = True @@ -105,3 +106,30 @@ def tempdir_with_cleanup(**kwargs): 'level': 'INFO', }, } + +# Configure storages for the blob store - use env settings if present. See the --no-manage-blobstore test option. +_blob_store_endpoint_url = os.environ.get("DATATRACKER_BLOB_STORE_ENDPOINT_URL", "http://blobstore:9000") +_blob_store_access_key = os.environ.get("DATATRACKER_BLOB_STORE_ACCESS_KEY", "minio_root") +_blob_store_secret_key = os.environ.get("DATATRACKER_BLOB_STORE_SECRET_KEY", "minio_pass") +_blob_store_bucket_prefix = os.environ.get("DATATRACKER_BLOB_STORE_BUCKET_PREFIX", "test-") +_blob_store_enable_profiling = ( + os.environ.get("DATATRACKER_BLOB_STORE_ENABLE_PROFILING", "false").lower() == "true" +) +for storagename in MORE_STORAGE_NAMES: + STORAGES[storagename] = { + "BACKEND": "ietf.doc.storage_backends.CustomS3Storage", + "OPTIONS": dict( + endpoint_url=_blob_store_endpoint_url, + access_key=_blob_store_access_key, + secret_key=_blob_store_secret_key, + security_token=None, + client_config=botocore.config.Config( + signature_version="s3v4", + connect_timeout=BLOBSTORAGE_CONNECT_TIMEOUT, + read_timeout=BLOBSTORAGE_READ_TIMEOUT, + retries={"total_max_attempts": BLOBSTORAGE_MAX_ATTEMPTS}, + ), + bucket_name=f"{_blob_store_bucket_prefix}{storagename}", + ietf_log_blob_timing=_blob_store_enable_profiling, + ), + } diff --git a/ietf/submit/tests.py b/ietf/submit/tests.py index 6a56839177..9a993480cd 100644 --- a/ietf/submit/tests.py +++ b/ietf/submit/tests.py @@ -31,6 +31,7 @@ ReviewFactory, WgRfcFactory) from ietf.doc.models import ( Document, DocEvent, State, BallotPositionDocEvent, DocumentAuthor, SubmissionDocEvent ) +from ietf.doc.storage_utils import exists_in_storage, retrieve_str, store_str from ietf.doc.utils import create_ballot_if_not_open, can_edit_docextresources, update_action_holders from ietf.group.factories import GroupFactory, RoleFactory from ietf.group.models import Group @@ -53,6 +54,7 @@ from ietf.utils import tool_version from ietf.utils.accesstoken import generate_access_token from ietf.utils.mail import outbox, get_payload_text +from ietf.utils.test_runner import TestBlobstoreManager from ietf.utils.test_utils import login_testing_unauthorized, TestCase from ietf.utils.timezone import date_today from ietf.utils.draft import PlaintextDraft @@ -355,6 +357,7 @@ def verify_bibxml_ids_creation(self, draft): def submit_new_wg(self, formats): # submit new -> supply submitter info -> approve + TestBlobstoreManager().emptyTestBlobstores() GroupFactory(type_id='wg',acronym='ames') mars = GroupFactory(type_id='wg', acronym='mars') RoleFactory(name_id='chair', group=mars, person__user__username='marschairman') @@ -428,6 +431,13 @@ def submit_new_wg(self, formats): self.assertTrue(draft.latest_event(type="added_suggested_replaces")) self.assertTrue(not os.path.exists(os.path.join(self.staging_dir, "%s-%s.txt" % (name, rev)))) self.assertTrue(os.path.exists(os.path.join(self.repository_dir, "%s-%s.txt" % (name, rev)))) + check_ext = ["xml", "txt", "html"] if "xml" in formats else ["txt"] + for ext in check_ext: + basename=f"{name}-{rev}.{ext}" + extname=f"{ext}/{basename}" + self.assertFalse(exists_in_storage("staging", basename)) + self.assertTrue(exists_in_storage("active-draft", extname)) + self.assertTrue(exists_in_storage("draft", extname)) self.assertEqual(draft.type_id, "draft") self.assertEqual(draft.stream_id, "ietf") self.assertTrue(draft.expires >= timezone.now() + datetime.timedelta(days=settings.INTERNET_DRAFT_DAYS_TO_EXPIRE - 1)) @@ -535,6 +545,7 @@ def test_submit_new_wg_as_author_bad_submitter(self): def submit_new_concluded_wg_as_author(self, group_state_id='conclude'): """A new concluded WG submission by a logged-in author needs AD approval""" + TestBlobstoreManager().emptyTestBlobstores() mars = GroupFactory(type_id='wg', acronym='mars', state_id=group_state_id) draft = WgDraftFactory(group=mars) setup_default_community_list_for_group(draft.group) @@ -580,6 +591,7 @@ def test_submit_new_wg_with_extresources(self): def submit_existing(self, formats, change_authors=True, group_type='wg', stream_type='ietf'): # submit new revision of existing -> supply submitter info -> prev authors confirm + TestBlobstoreManager().emptyTestBlobstores() def _assert_authors_are_action_holders(draft, expect=True): for author in draft.authors(): @@ -771,6 +783,13 @@ def inspect_docevents(docevents, event_delta, event_type, be_in_desc, by_name): self.assertTrue(os.path.exists(os.path.join(self.archive_dir, "%s-%s.txt" % (name, old_rev)))) self.assertTrue(not os.path.exists(os.path.join(self.staging_dir, "%s-%s.txt" % (name, rev)))) self.assertTrue(os.path.exists(os.path.join(self.repository_dir, "%s-%s.txt" % (name, rev)))) + check_ext = ["xml", "txt", "html"] if "xml" in formats else ["txt"] + for ext in check_ext: + basename=f"{name}-{rev}.{ext}" + extname=f"{ext}/{basename}" + self.assertFalse(exists_in_storage("staging", basename)) + self.assertTrue(exists_in_storage("active-draft", extname)) + self.assertTrue(exists_in_storage("draft", extname)) self.assertEqual(draft.type_id, "draft") if stream_type == 'ietf': self.assertEqual(draft.stream_id, "ietf") @@ -909,6 +928,7 @@ def test_submit_existing_iab_with_extresources(self): def submit_new_individual(self, formats): # submit new -> supply submitter info -> confirm + TestBlobstoreManager().emptyTestBlobstores() name = "draft-authorname-testing-tests" rev = "00" @@ -971,7 +991,13 @@ def submit_new_individual(self, formats): self.assertTrue(variant_path.samefile(variant_ftp_path)) variant_all_archive_path = Path(settings.INTERNET_ALL_DRAFTS_ARCHIVE_DIR) / variant_path.name self.assertTrue(variant_path.samefile(variant_all_archive_path)) - + check_ext = ["xml", "txt", "html"] if "xml" in formats else ["txt"] + for ext in check_ext: + basename=f"{name}-{rev}.{ext}" + extname=f"{ext}/{basename}" + self.assertFalse(exists_in_storage("staging", basename)) + self.assertTrue(exists_in_storage("active-draft", extname)) + self.assertTrue(exists_in_storage("draft", extname)) def test_submit_new_individual_txt(self): @@ -988,6 +1014,7 @@ def test_submit_new_individual_txt_xml(self): self.submit_new_individual(["txt", "xml"]) def submit_new_draft_no_org_or_address(self, formats): + TestBlobstoreManager().emptyTestBlobstores() name = 'draft-testing-no-org-or-address' author = PersonFactory() @@ -1078,6 +1105,7 @@ def _assert_extresource_change_event(self, doc, is_present=True): self.assertIsNone(event, 'External resource change event was unexpectedly created') def submit_new_draft_with_extresources(self, group): + TestBlobstoreManager().emptyTestBlobstores() name = 'draft-testing-with-extresources' status_url, author = self.do_submission(name, rev='00', group=group) @@ -1107,6 +1135,7 @@ def test_submit_new_individual_with_extresources(self): def submit_new_individual_logged_in(self, formats): # submit new -> supply submitter info -> done + TestBlobstoreManager().emptyTestBlobstores() name = "draft-authorname-testing-logged-in" rev = "00" @@ -1250,6 +1279,7 @@ def submit_existing_with_extresources(self, group_type, stream_type='ietf'): Unlike some other tests in this module, does not confirm draft if this would be required. """ + TestBlobstoreManager().emptyTestBlobstores() orig_draft: Document = DocumentFactory( # type: ignore[annotation-unchecked] type_id='draft', group=GroupFactory(type_id=group_type) if group_type else None, @@ -1290,6 +1320,7 @@ def test_submit_update_individual_with_extresources(self): def submit_new_individual_replacing_wg(self, logged_in=False, group_state_id='active', notify_ad=False): """Chair of an active WG should be notified if individual draft is proposed to replace a WG draft""" + TestBlobstoreManager().emptyTestBlobstores() name = "draft-authorname-testing-tests" rev = "00" group = None @@ -1416,6 +1447,7 @@ def test_cancel_submission(self): # cancel r = self.client.post(status_url, dict(action=action)) self.assertTrue(not os.path.exists(os.path.join(self.staging_dir, "%s-%s.txt" % (name, rev)))) + self.assertFalse(exists_in_storage("staging",f"{name}-{rev}.txt")) def test_edit_submission_and_force_post(self): # submit -> edit @@ -1605,16 +1637,21 @@ def test_submit_all_file_types(self): self.assertEqual(Submission.objects.filter(name=name).count(), 1) self.assertTrue(os.path.exists(os.path.join(self.staging_dir, "%s-%s.txt" % (name, rev)))) + self.assertTrue(exists_in_storage("staging",f"{name}-{rev}.txt")) fd = io.open(os.path.join(self.staging_dir, "%s-%s.txt" % (name, rev))) txt_contents = fd.read() fd.close() self.assertTrue(name in txt_contents) self.assertTrue(os.path.exists(os.path.join(self.staging_dir, "%s-%s.xml" % (name, rev)))) + self.assertTrue(exists_in_storage("staging",f"{name}-{rev}.txt")) fd = io.open(os.path.join(self.staging_dir, "%s-%s.xml" % (name, rev))) xml_contents = fd.read() fd.close() self.assertTrue(name in xml_contents) self.assertTrue('' in xml_contents) + xml_contents = retrieve_str("staging", f"{name}-{rev}.xml") + self.assertTrue(name in xml_contents) + self.assertTrue('' in xml_contents) def test_expire_submissions(self): s = Submission.objects.create(name="draft-ietf-mars-foo", @@ -1901,6 +1938,7 @@ def do_wg_approval_auth_test(self, state, chair_can_approve=False): Assumes approval allowed by AD and secretary and, optionally, chair of WG """ + TestBlobstoreManager().emptyTestBlobstores() class _SubmissionFactory: """Helper class to generate fresh submissions""" def __init__(self, author, state): @@ -2750,6 +2788,7 @@ class AsyncSubmissionTests(BaseSubmitTestCase): """Tests of async submission-related tasks""" def test_process_and_accept_uploaded_submission(self): """process_and_accept_uploaded_submission should properly process a submission""" + TestBlobstoreManager().emptyTestBlobstores() _today = date_today() xml, author = submission_file('draft-somebody-test-00', 'draft-somebody-test-00.xml', None, 'test_submission.xml') xml_data = xml.read() @@ -2765,10 +2804,13 @@ def test_process_and_accept_uploaded_submission(self): xml_path = Path(settings.IDSUBMIT_STAGING_PATH) / 'draft-somebody-test-00.xml' with xml_path.open('w') as f: f.write(xml_data) + store_str("staging", "draft-somebody-test-00.xml", xml_data) txt_path = xml_path.with_suffix('.txt') self.assertFalse(txt_path.exists()) html_path = xml_path.with_suffix('.html') self.assertFalse(html_path.exists()) + for ext in ["txt", "html"]: + self.assertFalse(exists_in_storage("staging",f"draft-somebody-test-00.{ext}")) process_and_accept_uploaded_submission(submission) submission = Submission.objects.get(pk=submission.pk) # refresh @@ -2784,6 +2826,8 @@ def test_process_and_accept_uploaded_submission(self): # at least test that these were created self.assertTrue(txt_path.exists()) self.assertTrue(html_path.exists()) + for ext in ["txt", "html"]: + self.assertTrue(exists_in_storage("staging", f"draft-somebody-test-00.{ext}")) self.assertEqual(submission.file_size, os.stat(txt_path).st_size) self.assertIn('Completed submission validation checks', submission.submissionevent_set.last().desc) @@ -2798,6 +2842,7 @@ def test_process_and_accept_uploaded_submission_invalid(self): txt.close() # submitter is not an author + TestBlobstoreManager().emptyTestBlobstores() submitter = PersonFactory() submission = SubmissionFactory( name='draft-somebody-test', @@ -2809,12 +2854,14 @@ def test_process_and_accept_uploaded_submission_invalid(self): xml_path = Path(settings.IDSUBMIT_STAGING_PATH) / 'draft-somebody-test-00.xml' with xml_path.open('w') as f: f.write(xml_data) + store_str("staging", "draft-somebody-test-00.xml", xml_data) process_and_accept_uploaded_submission(submission) submission = Submission.objects.get(pk=submission.pk) # refresh self.assertEqual(submission.state_id, 'cancel') self.assertIn('not one of the document authors', submission.submissionevent_set.last().desc) # author has no email address in XML + TestBlobstoreManager().emptyTestBlobstores() submission = SubmissionFactory( name='draft-somebody-test', rev='00', @@ -2825,12 +2872,14 @@ def test_process_and_accept_uploaded_submission_invalid(self): xml_path = Path(settings.IDSUBMIT_STAGING_PATH) / 'draft-somebody-test-00.xml' with xml_path.open('w') as f: f.write(re.sub(r'.*', '', xml_data)) + store_str("staging", "draft-somebody-test-00.xml", re.sub(r'.*', '', xml_data)) process_and_accept_uploaded_submission(submission) submission = Submission.objects.get(pk=submission.pk) # refresh self.assertEqual(submission.state_id, 'cancel') self.assertIn('Email address not found for all authors', submission.submissionevent_set.last().desc) # no title + TestBlobstoreManager().emptyTestBlobstores() submission = SubmissionFactory( name='draft-somebody-test', rev='00', @@ -2841,12 +2890,14 @@ def test_process_and_accept_uploaded_submission_invalid(self): xml_path = Path(settings.IDSUBMIT_STAGING_PATH) / 'draft-somebody-test-00.xml' with xml_path.open('w') as f: f.write(re.sub(r'.*', '', xml_data)) + store_str("staging", "draft-somebody-test-00.xml", re.sub(r'.*', '', xml_data)) process_and_accept_uploaded_submission(submission) submission = Submission.objects.get(pk=submission.pk) # refresh self.assertEqual(submission.state_id, 'cancel') self.assertIn('Could not extract a valid title', submission.submissionevent_set.last().desc) # draft name mismatch + TestBlobstoreManager().emptyTestBlobstores() submission = SubmissionFactory( name='draft-different-name', rev='00', @@ -2857,12 +2908,14 @@ def test_process_and_accept_uploaded_submission_invalid(self): xml_path = Path(settings.IDSUBMIT_STAGING_PATH) / 'draft-different-name-00.xml' with xml_path.open('w') as f: f.write(xml_data) + store_str("staging", "draft-different-name-00.xml", xml_data) process_and_accept_uploaded_submission(submission) submission = Submission.objects.get(pk=submission.pk) # refresh self.assertEqual(submission.state_id, 'cancel') self.assertIn('Submission rejected: XML Internet-Draft filename', submission.submissionevent_set.last().desc) # rev mismatch + TestBlobstoreManager().emptyTestBlobstores() submission = SubmissionFactory( name='draft-somebody-test', rev='01', @@ -2873,12 +2926,14 @@ def test_process_and_accept_uploaded_submission_invalid(self): xml_path = Path(settings.IDSUBMIT_STAGING_PATH) / 'draft-somebody-test-01.xml' with xml_path.open('w') as f: f.write(xml_data) + store_str("staging", "draft-somebody-test-01.xml", xml_data) process_and_accept_uploaded_submission(submission) submission = Submission.objects.get(pk=submission.pk) # refresh self.assertEqual(submission.state_id, 'cancel') self.assertIn('Submission rejected: XML Internet-Draft revision', submission.submissionevent_set.last().desc) # not xml + TestBlobstoreManager().emptyTestBlobstores() submission = SubmissionFactory( name='draft-somebody-test', rev='00', @@ -2889,12 +2944,14 @@ def test_process_and_accept_uploaded_submission_invalid(self): txt_path = Path(settings.IDSUBMIT_STAGING_PATH) / 'draft-somebody-test-00.txt' with txt_path.open('w') as f: f.write(txt_data) + store_str("staging", "draft-somebody-test-00.txt", txt_data) process_and_accept_uploaded_submission(submission) submission = Submission.objects.get(pk=submission.pk) # refresh self.assertEqual(submission.state_id, 'cancel') self.assertIn('Only XML Internet-Draft submissions', submission.submissionevent_set.last().desc) # wrong state + TestBlobstoreManager().emptyTestBlobstores() submission = SubmissionFactory( name='draft-somebody-test', rev='00', @@ -2903,8 +2960,9 @@ def test_process_and_accept_uploaded_submission_invalid(self): state_id='uploaded', ) xml_path = Path(settings.IDSUBMIT_STAGING_PATH) / 'draft-somebody-test-00.xml' - with xml_path.open('w') as f: + with xml_path.open('w') as f: # Why is this state being written if the thing that uses it is mocked out? f.write(xml_data) + store_str("staging", "draft-somebody-test-00.xml", xml_data) with mock.patch('ietf.submit.utils.process_submission_xml') as mock_proc_xml: process_and_accept_uploaded_submission(submission) submission = Submission.objects.get(pk=submission.pk) # refresh @@ -2912,6 +2970,7 @@ def test_process_and_accept_uploaded_submission_invalid(self): self.assertEqual(submission.state_id, 'uploaded', 'State should not be changed') # failed checker + TestBlobstoreManager().emptyTestBlobstores() submission = SubmissionFactory( name='draft-somebody-test', rev='00', @@ -2922,6 +2981,7 @@ def test_process_and_accept_uploaded_submission_invalid(self): xml_path = Path(settings.IDSUBMIT_STAGING_PATH) / 'draft-somebody-test-00.xml' with xml_path.open('w') as f: f.write(xml_data) + store_str("staging", "draft-somebody-test-00.xml", xml_data) with mock.patch( 'ietf.submit.utils.apply_checkers', side_effect = lambda _, __: submission.checks.create( @@ -2958,6 +3018,7 @@ def test_process_and_accept_uploaded_submission_task_ignores_invalid_id(self, mo self.assertEqual(mock_method.call_count, 0) def test_process_submission_xml(self): + TestBlobstoreManager().emptyTestBlobstores() xml_path = Path(settings.IDSUBMIT_STAGING_PATH) / "draft-somebody-test-00.xml" xml, _ = submission_file( "draft-somebody-test-00", @@ -2968,6 +3029,7 @@ def test_process_submission_xml(self): ) xml_contents = xml.read() xml_path.write_text(xml_contents) + store_str("staging", "draft-somebody-test-00.xml", xml_contents) output = process_submission_xml("draft-somebody-test", "00") self.assertEqual(output["filename"], "draft-somebody-test") self.assertEqual(output["rev"], "00") @@ -2983,23 +3045,32 @@ def test_process_submission_xml(self): self.assertEqual(output["xml_version"], "3") # Should behave on missing or partial elements + TestBlobstoreManager().emptyTestBlobstores() xml_path.write_text(re.sub(r"", "", xml_contents)) # strip entirely + store_str("staging", "draft-somebody-test-00.xml", re.sub(r"", "", xml_contents)) output = process_submission_xml("draft-somebody-test", "00") self.assertEqual(output["document_date"], None) + TestBlobstoreManager().emptyTestBlobstores() xml_path.write_text(re.sub(r")", r"\1 day=\2", xml_contents)) # remove month + store_str("staging", "draft-somebody-test-00.xml", re.sub(r"()", r"\1 day=\2", xml_contents)) output = process_submission_xml("draft-somebody-test", "00") self.assertEqual(output["document_date"], date_today()) + TestBlobstoreManager().emptyTestBlobstores() xml_path.write_text(re.sub(r"", r"", xml_contents)) # remove day + store_str("staging", "draft-somebody-test-00.xml", re.sub(r"", r"", xml_contents)) output = process_submission_xml("draft-somebody-test", "00") self.assertEqual(output["document_date"], date_today()) # name mismatch + TestBlobstoreManager().emptyTestBlobstores() xml, _ = submission_file( "draft-somebody-wrong-name-00", # name that appears in the file "draft-somebody-test-00.xml", @@ -3008,10 +3079,13 @@ def test_process_submission_xml(self): title="Correct Draft Title", ) xml_path.write_text(xml.read()) + xml.seek(0) + store_str("staging", "draft-somebody-test-00.xml", xml.read()) with self.assertRaisesMessage(SubmissionError, "disagrees with submission filename"): process_submission_xml("draft-somebody-test", "00") # rev mismatch + TestBlobstoreManager().emptyTestBlobstores() xml, _ = submission_file( "draft-somebody-test-01", # name that appears in the file "draft-somebody-test-00.xml", @@ -3020,10 +3094,13 @@ def test_process_submission_xml(self): title="Correct Draft Title", ) xml_path.write_text(xml.read()) + xml.seek(0) + store_str("staging", "draft-somebody-test-00.xml", xml.read()) with self.assertRaisesMessage(SubmissionError, "disagrees with submission revision"): process_submission_xml("draft-somebody-test", "00") # missing title + TestBlobstoreManager().emptyTestBlobstores() xml, _ = submission_file( "draft-somebody-test-00", # name that appears in the file "draft-somebody-test-00.xml", @@ -3032,10 +3109,13 @@ def test_process_submission_xml(self): title="", ) xml_path.write_text(xml.read()) + xml.seek(0) + store_str("staging", "draft-somebody-test-00.xml", xml.read()) with self.assertRaisesMessage(SubmissionError, "Could not extract a valid title"): process_submission_xml("draft-somebody-test", "00") def test_process_submission_text(self): + TestBlobstoreManager().emptyTestBlobstores() txt_path = Path(settings.IDSUBMIT_STAGING_PATH) / "draft-somebody-test-00.txt" txt, _ = submission_file( "draft-somebody-test-00", @@ -3045,6 +3125,8 @@ def test_process_submission_text(self): title="Correct Draft Title", ) txt_path.write_text(txt.read()) + txt.seek(0) + store_str("staging", "draft-somebody-test-00.txt", txt.read()) output = process_submission_text("draft-somebody-test", "00") self.assertEqual(output["filename"], "draft-somebody-test") self.assertEqual(output["rev"], "00") @@ -3060,6 +3142,7 @@ def test_process_submission_text(self): self.assertIsNone(output["xml_version"]) # name mismatch + TestBlobstoreManager().emptyTestBlobstores() txt, _ = submission_file( "draft-somebody-wrong-name-00", # name that appears in the file "draft-somebody-test-00.txt", @@ -3069,11 +3152,14 @@ def test_process_submission_text(self): ) with txt_path.open('w') as fd: fd.write(txt.read()) + txt.seek(0) + store_str("staging", "draft-somebody-test-00.txt", txt.read()) txt.close() with self.assertRaisesMessage(SubmissionError, 'disagrees with submission filename'): process_submission_text("draft-somebody-test", "00") # rev mismatch + TestBlobstoreManager().emptyTestBlobstores() txt, _ = submission_file( "draft-somebody-test-01", # name that appears in the file "draft-somebody-test-00.txt", @@ -3083,6 +3169,8 @@ def test_process_submission_text(self): ) with txt_path.open('w') as fd: fd.write(txt.read()) + txt.seek(0) + store_str("staging", "draft-somebody-test-00.txt", txt.read()) txt.close() with self.assertRaisesMessage(SubmissionError, 'disagrees with submission revision'): process_submission_text("draft-somebody-test", "00") @@ -3221,6 +3309,7 @@ def test_find_submission_filenames(self): path = Path(self.staging_dir) for ext in ['txt', 'xml', 'pdf', 'md']: (path / f'{draft.name}-{draft.rev}.{ext}').touch() + store_str("staging", f"{draft.name}-{draft.rev}.{ext}", "") files = find_submission_filenames(draft) self.assertCountEqual( files, @@ -3280,6 +3369,7 @@ def test_validate_submission_rev(self): new_wg_doc = WgDraftFactory(rev='01', relations=[('replaces',old_wg_doc)]) path = Path(self.archive_dir) / f'{new_wg_doc.name}-{new_wg_doc.rev}.txt' path.touch() + store_str("staging", f"{new_wg_doc.name}-{new_wg_doc.rev}.txt", "") bad_revs = (None, '', '2', 'aa', '00', '01', '100', '002', u'öö') for rev in bad_revs: diff --git a/ietf/submit/utils.py b/ietf/submit/utils.py index f19f2384c8..3e06f45c54 100644 --- a/ietf/submit/utils.py +++ b/ietf/submit/utils.py @@ -36,6 +36,7 @@ DocumentAuthor, AddedMessageEvent ) from ietf.doc.models import NewRevisionDocEvent from ietf.doc.models import RelatedDocument, DocRelationshipName, DocExtResource +from ietf.doc.storage_utils import remove_from_storage, retrieve_bytes, store_bytes, store_file, store_str from ietf.doc.utils import (add_state_change_event, rebuild_reference_relations, set_replaces_for_document, prettify_std_name, update_doc_extresources, can_edit_docextresources, update_documentauthors, update_action_holders, @@ -455,6 +456,7 @@ def post_submission(request, submission, approved_doc_desc, approved_subm_desc): from ietf.doc.expire import move_draft_files_to_archive move_draft_files_to_archive(draft, prev_rev) + submission.draft = draft move_files_to_repository(submission) submission.state = DraftSubmissionStateName.objects.get(slug="posted") log.log(f"{submission.name}: moved files") @@ -488,7 +490,6 @@ def post_submission(request, submission, approved_doc_desc, approved_subm_desc): if new_possibly_replaces: send_review_possibly_replaces_request(request, draft, submitter_info) - submission.draft = draft submission.save() create_submission_event(request, submission, approved_subm_desc) @@ -498,6 +499,7 @@ def post_submission(request, submission, approved_doc_desc, approved_subm_desc): ref_rev_file_name = os.path.join(os.path.join(settings.BIBXML_BASE_PATH, 'bibxml-ids'), 'reference.I-D.%s-%s.xml' % (draft.name, draft.rev )) with io.open(ref_rev_file_name, "w", encoding='utf-8') as f: f.write(ref_text) + store_str("bibxml-ids", f"reference.I-D.{draft.name}-{draft.rev}.txt", ref_text) # TODO-BLOBSTORE verify with test log.log(f"{submission.name}: done") @@ -666,6 +668,12 @@ def move_files_to_repository(submission): ftp_dest = Path(settings.FTP_DIR) / "internet-drafts" / dest.name os.link(dest, all_archive_dest) os.link(dest, ftp_dest) + # Shadow what's happening to the fs in the blobstores. When the stores become + # authoritative, the source and dest checks will need to apply to the stores instead. + content_bytes = retrieve_bytes("staging", fname) + store_bytes("active-draft", f"{ext}/{fname}", content_bytes) + submission.draft.store_bytes(f"{ext}/{fname}", content_bytes) + remove_from_storage("staging", fname) elif dest.exists(): log.log("Intended to move '%s' to '%s', but found source missing while destination exists.") elif f".{ext}" in submission.file_types.split(','): @@ -678,6 +686,7 @@ def remove_staging_files(name, rev): exts = [f'.{ext}' for ext in settings.IDSUBMIT_FILE_TYPES] for ext in exts: basename.with_suffix(ext).unlink(missing_ok=True) + remove_from_storage("staging", basename.with_suffix(ext).name, warn_if_missing=False) def remove_submission_files(submission): @@ -766,6 +775,8 @@ def save_files(form): for chunk in f.chunks(): destination.write(chunk) log.log("saved file %s" % name) + f.seek(0) + store_file("staging", f"{form.filename}-{form.revision}.{ext}", f) return file_name @@ -988,6 +999,10 @@ def render_missing_formats(submission): xml_version, ) ) + # When the blobstores become autoritative - the guard at the + # containing if statement needs to be based on the store + with Path(txt_path).open("rb") as f: + store_file("staging", f"{submission.name}-{submission.rev}.txt", f) # --- Convert to html --- html_path = staging_path(submission.name, submission.rev, '.html') @@ -1010,6 +1025,8 @@ def render_missing_formats(submission): xml_version, ) ) + with Path(html_path).open("rb") as f: + store_file("staging", f"{submission.name}-{submission.rev}.html", f) def accept_submission(submission: Submission, request: Optional[HttpRequest] = None, autopost=False): @@ -1361,6 +1378,7 @@ def process_and_validate_submission(submission): except SubmissionError: raise # pass SubmissionErrors up the stack except Exception as err: + # (this is a good point to just `raise err` when diagnosing Submission test failures) # convert other exceptions into SubmissionErrors log.log(f'Unexpected exception while processing submission {submission.pk}.') log.log(traceback.format_exc()) diff --git a/ietf/utils/storage.py b/ietf/utils/storage.py index 0aa02cab86..9f41f3d50f 100644 --- a/ietf/utils/storage.py +++ b/ietf/utils/storage.py @@ -1,8 +1,56 @@ +# Copyright The IETF Trust 2020-2025, All Rights Reserved +"""Django Storage classes""" +from pathlib import Path + +from django.conf import settings from django.core.files.storage import FileSystemStorage +from ietf.doc.storage_utils import store_file +from .log import log + class NoLocationMigrationFileSystemStorage(FileSystemStorage): - def deconstruct(obj): # pylint: disable=no-self-argument - path, args, kwargs = FileSystemStorage.deconstruct(obj) - kwargs["location"] = None - return (path, args, kwargs) + def deconstruct(self): + path, args, kwargs = super().deconstruct() + kwargs["location"] = None # don't record location in migrations + return path, args, kwargs + + +class BlobShadowFileSystemStorage(NoLocationMigrationFileSystemStorage): + """FileSystemStorage that shadows writes to the blob store as well + + Strips directories from the filename when naming the blob. + """ + + def __init__( + self, + *, # disallow positional arguments + kind: str, + location=None, + base_url=None, + file_permissions_mode=None, + directory_permissions_mode=None, + ): + self.kind = kind + super().__init__( + location, base_url, file_permissions_mode, directory_permissions_mode + ) + + def save(self, name, content, max_length=None): + # Write content to the filesystem - this deals with chunks, etc... + saved_name = super().save(name, content, max_length) + + if settings.ENABLE_BLOBSTORAGE: + # Retrieve the content and write to the blob store + blob_name = Path(saved_name).name # strips path + try: + with self.open(saved_name, "rb") as f: + store_file(self.kind, blob_name, f, allow_overwrite=True) + except Exception as err: + log(f"Failed to shadow {saved_name} at {self.kind}:{blob_name}: {err}") + return saved_name # includes the path! + + def deconstruct(self): + path, args, kwargs = super().deconstruct() + kwargs["kind"] = "" # don't record "kind" in migrations + return path, args, kwargs diff --git a/ietf/utils/test_runner.py b/ietf/utils/test_runner.py index 49d53e1e1d..3c89a2d01c 100644 --- a/ietf/utils/test_runner.py +++ b/ietf/utils/test_runner.py @@ -48,6 +48,8 @@ import subprocess import tempfile import copy +import boto3 +import botocore.config import factory.random import urllib3 import warnings @@ -85,6 +87,8 @@ from ietf.utils.test_smtpserver import SMTPTestServerDriver from ietf.utils.test_utils import TestCase +from mypy_boto3_s3.service_resource import Bucket + loaded_templates = set() visited_urls = set() @@ -722,9 +726,25 @@ def add_arguments(cls, parser): parser.add_argument('--rerun-until-failure', action='store_true', dest='rerun', default=False, help='Run the indicated tests in a loop until a failure occurs. ' ) - - def __init__(self, ignore_lower_coverage=False, skip_coverage=False, save_version_coverage=None, html_report=None, permit_mixed_migrations=None, show_logging=None, validate_html=None, validate_html_harder=None, rerun=None, **kwargs): - # + parser.add_argument('--no-manage-blobstore', action='store_false', dest='manage_blobstore', + help='Disable creating/deleting test buckets in the blob store.' + 'When this argument is used, a set of buckets with "test-" prefixed to their ' + 'names must already exist.') + + def __init__( + self, + ignore_lower_coverage=False, + skip_coverage=False, + save_version_coverage=None, + html_report=None, + permit_mixed_migrations=None, + show_logging=None, + validate_html=None, + validate_html_harder=None, + rerun=None, + manage_blobstore=True, + **kwargs + ): # self.ignore_lower_coverage = ignore_lower_coverage self.check_coverage = not skip_coverage self.save_version_coverage = save_version_coverage @@ -752,6 +772,8 @@ def __init__(self, ignore_lower_coverage=False, skip_coverage=False, save_versio # contains parent classes to later subclasses, the parent classes will determine the ordering, so use the most # specific classes necessary to get the right ordering: self.reorder_by = (PyFlakesTestCase, MyPyTest,) + self.reorder_by + (StaticLiveServerTestCase, TemplateTagTest, CoverageTest,) + #self.buckets = set() + self.blobstoremanager = TestBlobstoreManager() if manage_blobstore else None def setup_test_environment(self, **kwargs): global template_coverage_collection @@ -936,6 +958,9 @@ def setup_test_environment(self, **kwargs): print(" (extra pedantically)") self.vnu = start_vnu_server() + if self.blobstoremanager is not None: + self.blobstoremanager.createTestBlobstores() + super(IetfTestRunner, self).setup_test_environment(**kwargs) def teardown_test_environment(self, **kwargs): @@ -966,6 +991,9 @@ def teardown_test_environment(self, **kwargs): if self.vnu: self.vnu.terminate() + if self.blobstoremanager is not None: + self.blobstoremanager.destroyTestBlobstores() + super(IetfTestRunner, self).teardown_test_environment(**kwargs) def validate(self, testcase): @@ -1220,3 +1248,39 @@ def tearDown(self): for k, v in self.replaced_settings.items(): setattr(settings, k, v) super().tearDown() + +class TestBlobstoreManager(): + # N.B. buckets and blobstore are intentional Class-level attributes + buckets: set[Bucket] = set() + + blobstore = boto3.resource("s3", + endpoint_url="http://blobstore:9000", + aws_access_key_id="minio_root", + aws_secret_access_key="minio_pass", + aws_session_token=None, + config = botocore.config.Config(signature_version="s3v4"), + #config=botocore.config.Config(signature_version=botocore.UNSIGNED), + verify=False + ) + + def createTestBlobstores(self): + for storagename in settings.MORE_STORAGE_NAMES: + bucketname = f"test-{storagename}" + try: + bucket = self.blobstore.create_bucket(Bucket=bucketname) + self.buckets.add(bucket) + except self.blobstore.meta.client.exceptions.BucketAlreadyOwnedByYou: + bucket = self.blobstore.Bucket(bucketname) + self.buckets.add(bucket) + + def destroyTestBlobstores(self): + self.emptyTestBlobstores(destroy=True) + + def emptyTestBlobstores(self, destroy=False): + # debug.show('f"Asked to empty test blobstores with destroy={destroy}"') + for bucket in self.buckets: + bucket.objects.delete() + if destroy: + bucket.delete() + if destroy: + self.buckets = set() diff --git a/k8s/settings_local.py b/k8s/settings_local.py index f266ffcd62..912607f466 100644 --- a/k8s/settings_local.py +++ b/k8s/settings_local.py @@ -6,7 +6,9 @@ import json from ietf import __release_hash__ -from ietf.settings import * # pyflakes:ignore +from ietf.settings import * # pyflakes:ignore +from ietf.settings import STORAGES, MORE_STORAGE_NAMES, BLOBSTORAGE_CONNECT_TIMEOUT, BLOBSTORAGE_READ_TIMEOUT, BLOBSTORAGE_MAX_ATTEMPTS +import botocore.config def _multiline_to_list(s): @@ -29,7 +31,7 @@ def _multiline_to_list(s): if _SECRET_KEY is not None: SECRET_KEY = _SECRET_KEY else: - raise RuntimeError("DATATRACKER_DJANGO_SECRET_KEY must be set") + raise RuntimeError("DATATRACKER_DJANGO_SECRET_KEY must be set") _NOMCOM_APP_SECRET_B64 = os.environ.get("DATATRACKER_NOMCOM_APP_SECRET_B64", None) if _NOMCOM_APP_SECRET_B64 is not None: @@ -41,7 +43,7 @@ def _multiline_to_list(s): if _IANA_SYNC_PASSWORD is not None: IANA_SYNC_PASSWORD = _IANA_SYNC_PASSWORD else: - raise RuntimeError("DATATRACKER_IANA_SYNC_PASSWORD must be set") + raise RuntimeError("DATATRACKER_IANA_SYNC_PASSWORD must be set") _RFC_EDITOR_SYNC_PASSWORD = os.environ.get("DATATRACKER_RFC_EDITOR_SYNC_PASSWORD", None) if _RFC_EDITOR_SYNC_PASSWORD is not None: @@ -59,25 +61,25 @@ def _multiline_to_list(s): if _GITHUB_BACKUP_API_KEY is not None: GITHUB_BACKUP_API_KEY = _GITHUB_BACKUP_API_KEY else: - raise RuntimeError("DATATRACKER_GITHUB_BACKUP_API_KEY must be set") + raise RuntimeError("DATATRACKER_GITHUB_BACKUP_API_KEY must be set") _API_KEY_TYPE = os.environ.get("DATATRACKER_API_KEY_TYPE", None) if _API_KEY_TYPE is not None: API_KEY_TYPE = _API_KEY_TYPE else: - raise RuntimeError("DATATRACKER_API_KEY_TYPE must be set") + raise RuntimeError("DATATRACKER_API_KEY_TYPE must be set") _API_PUBLIC_KEY_PEM_B64 = os.environ.get("DATATRACKER_API_PUBLIC_KEY_PEM_B64", None) if _API_PUBLIC_KEY_PEM_B64 is not None: API_PUBLIC_KEY_PEM = b64decode(_API_PUBLIC_KEY_PEM_B64) else: - raise RuntimeError("DATATRACKER_API_PUBLIC_KEY_PEM_B64 must be set") + raise RuntimeError("DATATRACKER_API_PUBLIC_KEY_PEM_B64 must be set") _API_PRIVATE_KEY_PEM_B64 = os.environ.get("DATATRACKER_API_PRIVATE_KEY_PEM_B64", None) if _API_PRIVATE_KEY_PEM_B64 is not None: API_PRIVATE_KEY_PEM = b64decode(_API_PRIVATE_KEY_PEM_B64) else: - raise RuntimeError("DATATRACKER_API_PRIVATE_KEY_PEM_B64 must be set") + raise RuntimeError("DATATRACKER_API_PRIVATE_KEY_PEM_B64 must be set") # Set DEBUG if DATATRACKER_DEBUG env var is the word "true" DEBUG = os.environ.get("DATATRACKER_DEBUG", "false").lower() == "true" @@ -102,7 +104,9 @@ def _multiline_to_list(s): # Configure persistent connections. A setting of 0 is Django's default. _conn_max_age = os.environ.get("DATATRACKER_DB_CONN_MAX_AGE", "0") # A string "none" means unlimited age. -DATABASES["default"]["CONN_MAX_AGE"] = None if _conn_max_age.lower() == "none" else int(_conn_max_age) +DATABASES["default"]["CONN_MAX_AGE"] = ( + None if _conn_max_age.lower() == "none" else int(_conn_max_age) +) # Enable connection health checks if DATATRACKER_DB_CONN_HEALTH_CHECK is the string "true" _conn_health_checks = bool( os.environ.get("DATATRACKER_DB_CONN_HEALTH_CHECKS", "false").lower() == "true" @@ -114,9 +118,11 @@ def _multiline_to_list(s): if _admins_str is not None: ADMINS = [parseaddr(admin) for admin in _multiline_to_list(_admins_str)] else: - raise RuntimeError("DATATRACKER_ADMINS must be set") + raise RuntimeError("DATATRACKER_ADMINS must be set") -USING_DEBUG_EMAIL_SERVER = os.environ.get("DATATRACKER_EMAIL_DEBUG", "false").lower() == "true" +USING_DEBUG_EMAIL_SERVER = ( + os.environ.get("DATATRACKER_EMAIL_DEBUG", "false").lower() == "true" +) EMAIL_HOST = os.environ.get("DATATRACKER_EMAIL_HOST", "localhost") EMAIL_PORT = int(os.environ.get("DATATRACKER_EMAIL_PORT", "2025")) @@ -126,7 +132,7 @@ def _multiline_to_list(s): CELERY_BROKER_URL = "amqp://datatracker:{password}@{host}/{queue}".format( host=os.environ.get("RABBITMQ_HOSTNAME", "dt-rabbitmq"), password=_celery_password, - queue=os.environ.get("RABBITMQ_QUEUE", "dt") + queue=os.environ.get("RABBITMQ_QUEUE", "dt"), ) IANA_SYNC_USERNAME = "ietfsync" @@ -140,10 +146,10 @@ def _multiline_to_list(s): raise RuntimeError("DATATRACKER_REGISTRATION_API_KEY must be set") STATS_REGISTRATION_ATTENDEES_JSON_URL = f"https://registration.ietf.org/{{number}}/attendees/?apikey={_registration_api_key}" -#FIRST_CUTOFF_DAYS = 12 -#SECOND_CUTOFF_DAYS = 12 -#SUBMISSION_CUTOFF_DAYS = 26 -#SUBMISSION_CORRECTION_DAYS = 57 +# FIRST_CUTOFF_DAYS = 12 +# SECOND_CUTOFF_DAYS = 12 +# SUBMISSION_CUTOFF_DAYS = 26 +# SUBMISSION_CORRECTION_DAYS = 57 MEETING_MATERIALS_SUBMISSION_CUTOFF_DAYS = 26 MEETING_MATERIALS_SUBMISSION_CORRECTION_DAYS = 54 @@ -155,7 +161,7 @@ def _multiline_to_list(s): if _MEETECHO_CLIENT_ID is not None and _MEETECHO_CLIENT_SECRET is not None: MEETECHO_API_CONFIG = { "api_base": os.environ.get( - "DATATRACKER_MEETECHO_API_BASE", + "DATATRACKER_MEETECHO_API_BASE", "https://meetings.conf.meetecho.com/api/v1/", ), "client_id": _MEETECHO_CLIENT_ID, @@ -173,7 +179,9 @@ def _multiline_to_list(s): raise RuntimeError( "Only one of DATATRACKER_APP_API_TOKENS_JSON and DATATRACKER_APP_API_TOKENS_JSON_B64 may be set" ) - _APP_API_TOKENS_JSON = b64decode(os.environ.get("DATATRACKER_APP_API_TOKENS_JSON_B64")) + _APP_API_TOKENS_JSON = b64decode( + os.environ.get("DATATRACKER_APP_API_TOKENS_JSON_B64") + ) else: _APP_API_TOKENS_JSON = os.environ.get("DATATRACKER_APP_API_TOKENS_JSON", None) @@ -189,7 +197,9 @@ def _multiline_to_list(s): # Leave DATATRACKER_MATOMO_SITE_ID unset to disable Matomo reporting if "DATATRACKER_MATOMO_SITE_ID" in os.environ: - MATOMO_DOMAIN_PATH = os.environ.get("DATATRACKER_MATOMO_DOMAIN_PATH", "analytics.ietf.org") + MATOMO_DOMAIN_PATH = os.environ.get( + "DATATRACKER_MATOMO_DOMAIN_PATH", "analytics.ietf.org" + ) MATOMO_SITE_ID = os.environ.get("DATATRACKER_MATOMO_SITE_ID") MATOMO_DISABLE_COOKIES = True @@ -197,9 +207,13 @@ def _multiline_to_list(s): _SCOUT_KEY = os.environ.get("DATATRACKER_SCOUT_KEY", None) if _SCOUT_KEY is not None: if SERVER_MODE == "production": - PROD_PRE_APPS = ["scout_apm.django", ] + PROD_PRE_APPS = [ + "scout_apm.django", + ] else: - DEV_PRE_APPS = ["scout_apm.django", ] + DEV_PRE_APPS = [ + "scout_apm.django", + ] SCOUT_MONITOR = True SCOUT_KEY = _SCOUT_KEY SCOUT_NAME = os.environ.get("DATATRACKER_SCOUT_NAME", "Datatracker") @@ -216,16 +230,17 @@ def _multiline_to_list(s): STATIC_URL = os.environ.get("DATATRACKER_STATIC_URL", None) if STATIC_URL is None: from ietf import __version__ + STATIC_URL = f"https://static.ietf.org/dt/{__version__}/" # Set these to the same as "production" in settings.py, whether production mode or not MEDIA_ROOT = "/a/www/www6s/lib/dt/media/" -MEDIA_URL = "https://www.ietf.org/lib/dt/media/" +MEDIA_URL = "https://www.ietf.org/lib/dt/media/" PHOTOS_DIRNAME = "photo" PHOTOS_DIR = MEDIA_ROOT + PHOTOS_DIRNAME # Normally only set for debug, but needed until we have a real FS -DJANGO_VITE_MANIFEST_PATH = os.path.join(BASE_DIR, 'static/dist-neue/manifest.json') +DJANGO_VITE_MANIFEST_PATH = os.path.join(BASE_DIR, "static/dist-neue/manifest.json") # Binaries that are different in the docker image DE_GFM_BINARY = "/usr/local/bin/de-gfm" @@ -235,6 +250,7 @@ def _multiline_to_list(s): MEMCACHED_HOST = os.environ.get("DT_MEMCACHED_SERVICE_HOST", "127.0.0.1") MEMCACHED_PORT = os.environ.get("DT_MEMCACHED_SERVICE_PORT", "11211") from ietf import __version__ + CACHES = { "default": { "BACKEND": "ietf.utils.cache.LenientMemcacheCache", @@ -285,3 +301,46 @@ def _multiline_to_list(s): # Console logs as JSON instead of plain when running in k8s LOGGING["handlers"]["console"]["formatter"] = "json" + +# Configure storages for the blob store +_blob_store_endpoint_url = os.environ.get("DATATRACKER_BLOB_STORE_ENDPOINT_URL") +_blob_store_access_key = os.environ.get("DATATRACKER_BLOB_STORE_ACCESS_KEY") +_blob_store_secret_key = os.environ.get("DATATRACKER_BLOB_STORE_SECRET_KEY") +if None in (_blob_store_endpoint_url, _blob_store_access_key, _blob_store_secret_key): + raise RuntimeError( + "All of DATATRACKER_BLOB_STORE_ENDPOINT_URL, DATATRACKER_BLOB_STORE_ACCESS_KEY, " + "and DATATRACKER_BLOB_STORE_SECRET_KEY must be set" + ) +_blob_store_bucket_prefix = os.environ.get( + "DATATRACKER_BLOB_STORE_BUCKET_PREFIX", "" +) +_blob_store_enable_profiling = ( + os.environ.get("DATATRACKER_BLOB_STORE_ENABLE_PROFILING", "false").lower() == "true" +) +_blob_store_max_attempts = ( + os.environ.get("DATATRACKER_BLOB_STORE_MAX_ATTEMPTS", BLOBSTORAGE_MAX_ATTEMPTS) +) +_blob_store_connect_timeout = ( + os.environ.get("DATATRACKER_BLOB_STORE_CONNECT_TIMEOUT", BLOBSTORAGE_CONNECT_TIMEOUT) +) +_blob_store_read_timeout = ( + os.environ.get("DATATRACKER_BLOB_STORE_READ_TIMEOUT", BLOBSTORAGE_READ_TIMEOUT) +) +for storage_name in MORE_STORAGE_NAMES: + STORAGES[storage_name] = { + "BACKEND": "ietf.doc.storage_backends.CustomS3Storage", + "OPTIONS": dict( + endpoint_url=_blob_store_endpoint_url, + access_key=_blob_store_access_key, + secret_key=_blob_store_secret_key, + security_token=None, + client_config=botocore.config.Config( + signature_version="s3v4", + connect_timeout=_blob_store_connect_timeout, + read_timeout=_blob_store_read_timeout, + retries={"total_max_attempts": _blob_store_max_attempts}, + ), + bucket_name=f"{_blob_store_bucket_prefix}{storage_name}".strip(), + ietf_log_blob_timing=_blob_store_enable_profiling, + ), + } diff --git a/requirements.txt b/requirements.txt index 073a6bfa0a..66a785e929 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,6 +6,9 @@ beautifulsoup4>=4.11.1 # Only used in tests bibtexparser>=1.2.0 # Only used in tests bleach>=6 types-bleach>=6 +boto3>=1.35,<1.36 +boto3-stubs[s3]>=1.35,<1.36 +botocore>=1.35,<1.36 celery>=5.2.6 coverage>=4.5.4,<5.0 # Coverage 5.x moves from a json database to SQLite. Moving to 5.x will require substantial rewrites in ietf.utils.test_runner and ietf.release.views defusedxml>=0.7.1 # for TastyPie when using xml; not a declared dependency @@ -21,6 +24,7 @@ django-markup>=1.5 # Limited use - need to reconcile against direct use of ma django-oidc-provider==0.8.2 # 0.8.3 changes logout flow and claim return django-referrer-policy>=1.0 django-simple-history>=3.0.0 +django-storages>=1.14.4 django-stubs>=4.2.7,<5 # The django-stubs version used determines the the mypy version indicated below django-tastypie>=0.14.7,<0.15.0 # Version must be locked in sync with version of Django django-vite>=2.0.2,<3 @@ -75,7 +79,7 @@ tblib>=1.7.0 # So that the django test runner provides tracebacks tlds>=2022042700 # Used to teach bleach about which TLDs currently exist tqdm>=4.64.0 Unidecode>=1.3.4 -urllib3>=2 +urllib3>=1.26,<2 weasyprint>=59 xml2rfc[pdf]>=3.23.0 xym>=0.6,<1.0 From be7ad9acabca649cd77374e3aea574dfdcebc7d8 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Wed, 19 Feb 2025 21:42:07 -0400 Subject: [PATCH 009/405] ci: cast env vars to correct types (#8558) --- k8s/settings_local.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/k8s/settings_local.py b/k8s/settings_local.py index 912607f466..a0e197b7f5 100644 --- a/k8s/settings_local.py +++ b/k8s/settings_local.py @@ -317,13 +317,13 @@ def _multiline_to_list(s): _blob_store_enable_profiling = ( os.environ.get("DATATRACKER_BLOB_STORE_ENABLE_PROFILING", "false").lower() == "true" ) -_blob_store_max_attempts = ( +_blob_store_max_attempts = int( os.environ.get("DATATRACKER_BLOB_STORE_MAX_ATTEMPTS", BLOBSTORAGE_MAX_ATTEMPTS) ) -_blob_store_connect_timeout = ( +_blob_store_connect_timeout = float( os.environ.get("DATATRACKER_BLOB_STORE_CONNECT_TIMEOUT", BLOBSTORAGE_CONNECT_TIMEOUT) ) -_blob_store_read_timeout = ( +_blob_store_read_timeout = float( os.environ.get("DATATRACKER_BLOB_STORE_READ_TIMEOUT", BLOBSTORAGE_READ_TIMEOUT) ) for storage_name in MORE_STORAGE_NAMES: From 2f8b9c3cfa157ed14d19b372c877eaed3beb6391 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Thu, 20 Feb 2025 13:12:43 -0400 Subject: [PATCH 010/405] fix: ignore exceptions from blobstore ops (#8565) * fix: ignore exceptions from to blobstore ops * fix: log repr(err) instead of just err --- ietf/doc/storage_utils.py | 67 +++++++++++++++++++++++++++------------ ietf/utils/storage.py | 6 ++-- 2 files changed, 49 insertions(+), 24 deletions(-) diff --git a/ietf/doc/storage_utils.py b/ietf/doc/storage_utils.py index 4f0516339a..1bc2aa293c 100644 --- a/ietf/doc/storage_utils.py +++ b/ietf/doc/storage_utils.py @@ -8,6 +8,8 @@ from django.core.files.base import ContentFile, File from django.core.files.storage import storages +from ietf.utils.log import log + # TODO-BLOBSTORE (Future, maybe after leaving 3.9) : add a return type def _get_storage(kind: str): @@ -22,16 +24,22 @@ def _get_storage(kind: str): def exists_in_storage(kind: str, name: str) -> bool: if settings.ENABLE_BLOBSTORAGE: - store = _get_storage(kind) - return store.exists_in_storage(kind, name) + try: + store = _get_storage(kind) + return store.exists_in_storage(kind, name) + except Exception as err: + log(f"Blobstore Error: Failed to test existence of {kind}:{name}: {repr(err)}") else: return False def remove_from_storage(kind: str, name: str, warn_if_missing: bool = True) -> None: if settings.ENABLE_BLOBSTORAGE: - store = _get_storage(kind) - store.remove_from_storage(kind, name, warn_if_missing) + try: + store = _get_storage(kind) + store.remove_from_storage(kind, name, warn_if_missing) + except Exception as err: + log(f"Blobstore Error: Failed to remove {kind}:{name}: {repr(err)}") return None @@ -46,8 +54,11 @@ def store_file( ) -> None: # debug.show('f"asked to store {name} into {kind}"') if settings.ENABLE_BLOBSTORAGE: - store = _get_storage(kind) - store.store_file(kind, name, file, allow_overwrite, doc_name, doc_rev) + try: + store = _get_storage(kind) + store.store_file(kind, name, file, allow_overwrite, doc_name, doc_rev) + except Exception as err: + log(f"Blobstore Error: Failed to store file {kind}:{name}: {repr(err)}") return None @@ -60,7 +71,11 @@ def store_bytes( doc_rev: Optional[str] = None, ) -> None: if settings.ENABLE_BLOBSTORAGE: - store_file(kind, name, ContentFile(content), allow_overwrite) + try: + store_file(kind, name, ContentFile(content), allow_overwrite) + except Exception as err: + # n.b., not likely to get an exception here because store_file or store_bytes will catch it + log(f"Blobstore Error: Failed to store bytes to {kind}:{name}: {repr(err)}") return None @@ -73,8 +88,12 @@ def store_str( doc_rev: Optional[str] = None, ) -> None: if settings.ENABLE_BLOBSTORAGE: - content_bytes = content.encode("utf-8") - store_bytes(kind, name, content_bytes, allow_overwrite) + try: + content_bytes = content.encode("utf-8") + store_bytes(kind, name, content_bytes, allow_overwrite) + except Exception as err: + # n.b., not likely to get an exception here because store_file or store_bytes will catch it + log(f"Blobstore Error: Failed to store string to {kind}:{name}: {repr(err)}") return None @@ -82,22 +101,28 @@ def retrieve_bytes(kind: str, name: str) -> bytes: from ietf.doc.storage_backends import maybe_log_timing content = b"" if settings.ENABLE_BLOBSTORAGE: - store = _get_storage(kind) - with store.open(name) as f: - with maybe_log_timing( - hasattr(store, "ietf_log_blob_timing") and store.ietf_log_blob_timing, - "read", - bucket_name=store.bucket_name if hasattr(store, "bucket_name") else "", - name=name, - ): - content = f.read() + try: + store = _get_storage(kind) + with store.open(name) as f: + with maybe_log_timing( + hasattr(store, "ietf_log_blob_timing") and store.ietf_log_blob_timing, + "read", + bucket_name=store.bucket_name if hasattr(store, "bucket_name") else "", + name=name, + ): + content = f.read() + except Exception as err: + log(f"Blobstore Error: Failed to read bytes from {kind}:{name}: {repr(err)}") return content def retrieve_str(kind: str, name: str) -> str: content = "" if settings.ENABLE_BLOBSTORAGE: - content_bytes = retrieve_bytes(kind, name) - # TODO-BLOBSTORE: try to decode all the different ways doc.text() does - content = content_bytes.decode("utf-8") + try: + content_bytes = retrieve_bytes(kind, name) + # TODO-BLOBSTORE: try to decode all the different ways doc.text() does + content = content_bytes.decode("utf-8") + except Exception as err: + log(f"Blobstore Error: Failed to read string from {kind}:{name}: {repr(err)}") return content diff --git a/ietf/utils/storage.py b/ietf/utils/storage.py index 9f41f3d50f..42fcf884aa 100644 --- a/ietf/utils/storage.py +++ b/ietf/utils/storage.py @@ -41,13 +41,13 @@ def save(self, name, content, max_length=None): saved_name = super().save(name, content, max_length) if settings.ENABLE_BLOBSTORAGE: - # Retrieve the content and write to the blob store - blob_name = Path(saved_name).name # strips path try: + # Retrieve the content and write to the blob store + blob_name = Path(saved_name).name # strips path with self.open(saved_name, "rb") as f: store_file(self.kind, blob_name, f, allow_overwrite=True) except Exception as err: - log(f"Failed to shadow {saved_name} at {self.kind}:{blob_name}: {err}") + log(f"Blobstore Error: Failed to shadow {saved_name} at {self.kind}:{blob_name}: {repr(err)}") return saved_name # includes the path! def deconstruct(self): From aeba63bb41a02bab3f4ef043d5ac8d5e38e24b80 Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Thu, 20 Feb 2025 13:13:44 -0600 Subject: [PATCH 011/405] chore: ensure proper return type (#8566) * chore: ensure proper return type * test: comment out (intentionally) failing assertion --------- Co-authored-by: Jennifer Richards --- ietf/doc/storage_utils.py | 3 +-- ietf/doc/tests_status_change.py | 5 +++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/ietf/doc/storage_utils.py b/ietf/doc/storage_utils.py index 1bc2aa293c..012efc9071 100644 --- a/ietf/doc/storage_utils.py +++ b/ietf/doc/storage_utils.py @@ -29,8 +29,7 @@ def exists_in_storage(kind: str, name: str) -> bool: return store.exists_in_storage(kind, name) except Exception as err: log(f"Blobstore Error: Failed to test existence of {kind}:{name}: {repr(err)}") - else: - return False + return False def remove_from_storage(kind: str, name: str, warn_if_missing: bool = True) -> None: diff --git a/ietf/doc/tests_status_change.py b/ietf/doc/tests_status_change.py index cbdc1a049a..da1a4f1906 100644 --- a/ietf/doc/tests_status_change.py +++ b/ietf/doc/tests_status_change.py @@ -564,8 +564,9 @@ def test_initial_submission(self): ftp_filepath = Path(settings.FTP_DIR) / "status-changes" / basename self.assertFalse(filepath.exists()) self.assertFalse(ftp_filepath.exists()) - with self.assertRaises(FileNotFoundError): - retrieve_str("statchg",basename) + # TODO-BLOBSTORE: next assert is disabled because we currently suppress all exceptions + # with self.assertRaises(FileNotFoundError): + # retrieve_str("statchg",basename) r = self.client.post(url,dict(content="Some initial review text\n",submit_response="1")) self.assertEqual(r.status_code,302) doc = Document.objects.get(name='status-change-imaginary-mid-review') From 041fa83d21ce7d8320e09a2a08f9c0b7577f6fba Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Thu, 20 Feb 2025 16:45:22 -0400 Subject: [PATCH 012/405] chore: handle errors in app-configure-blobstore.py (#8567) --- docker/scripts/app-configure-blobstore.py | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/docker/scripts/app-configure-blobstore.py b/docker/scripts/app-configure-blobstore.py index 7b5ce962eb..ae87bf1afe 100755 --- a/docker/scripts/app-configure-blobstore.py +++ b/docker/scripts/app-configure-blobstore.py @@ -2,6 +2,8 @@ # Copyright The IETF Trust 2024, All Rights Reserved import boto3 +import botocore.config +import botocore.exceptions import os import sys @@ -16,13 +18,19 @@ def init_blobstore(): aws_secret_access_key=os.environ.get("BLOB_STORE_SECRET_KEY", "minio_pass"), aws_session_token=None, config=botocore.config.Config(signature_version="s3v4"), - verify=False, ) for bucketname in MORE_STORAGE_NAMES: - blobstore.create_bucket( - Bucket=f"{os.environ.get('BLOB_STORE_BUCKET_PREFIX', '')}{bucketname}".strip() - ) - + try: + blobstore.create_bucket( + Bucket=f"{os.environ.get('BLOB_STORE_BUCKET_PREFIX', '')}{bucketname}".strip() + ) + except botocore.exceptions.ClientError as err: + if err.response["Error"]["Code"] == "BucketAlreadyExists": + print(f"Bucket {bucketname} already exists") + else: + print(f"Error creating {bucketname}: {err.response['Error']['Code']}") + else: + print(f"Bucket {bucketname} created") if __name__ == "__main__": sys.exit(init_blobstore()) From fb310e5ce209ca3ce1b5e557e8beb795d9b75a02 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Fri, 21 Feb 2025 11:49:16 -0400 Subject: [PATCH 013/405] feat: useful error when submission has inconsistent date (#8576) * chore: handle errors in app-configure-blobstore.py * feat: sensible error for inconsistent --- ietf/submit/utils.py | 9 +++++++-- ietf/utils/xmldraft.py | 15 ++++++++++++++- 2 files changed, 21 insertions(+), 3 deletions(-) diff --git a/ietf/submit/utils.py b/ietf/submit/utils.py index 3e06f45c54..61274c0116 100644 --- a/ietf/submit/utils.py +++ b/ietf/submit/utils.py @@ -58,7 +58,7 @@ from ietf.utils.mail import is_valid_email from ietf.utils.text import parse_unicode, normalize_text from ietf.utils.timezone import date_today -from ietf.utils.xmldraft import XMLDraft +from ietf.utils.xmldraft import InvalidMetadataError, XMLDraft from ietf.person.name import unidecode_name @@ -1201,6 +1201,11 @@ def process_submission_xml(filename, revision): if not title: raise SubmissionError("Could not extract a valid title from the XML") + try: + document_date = xml_draft.get_creation_date() + except InvalidMetadataError as err: + raise SubmissionError(str(err)) from err + return { "filename": xml_draft.filename, "rev": xml_draft.revision, @@ -1210,7 +1215,7 @@ def process_submission_xml(filename, revision): for auth in xml_draft.get_author_list() ], "abstract": None, # not supported from XML - "document_date": xml_draft.get_creation_date(), + "document_date": document_date, "pages": None, # not supported from XML "words": None, # not supported from XML "first_two_pages": None, # not supported from XML diff --git a/ietf/utils/xmldraft.py b/ietf/utils/xmldraft.py index c39c4d0a06..73baf917d8 100644 --- a/ietf/utils/xmldraft.py +++ b/ietf/utils/xmldraft.py @@ -159,7 +159,16 @@ def parse_creation_date(date_elt): day = today.day else: day = 15 - return datetime.date(year, month, day) + try: + creation_date = datetime.date(year, month, day) + except Exception: + raise InvalidMetadataError( + "The element in the section specified an incomplete date " + "that was not consistent with today's date. If you specify only a year, " + "it must be the four-digit current year. To use today's date, omit the " + "date tag or use ." + ) + return creation_date def get_creation_date(self): return self.parse_creation_date(self.xmlroot.find("front/date")) @@ -269,3 +278,7 @@ def parser_msgs(self): class InvalidXMLError(Exception): """File is not valid XML""" pass + + +class InvalidMetadataError(Exception): + """XML is well-formed but has invalid metadata""" From 183cd995aa289632f194bff8b4b9c9b32a042200 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Fri, 21 Feb 2025 14:17:56 -0400 Subject: [PATCH 014/405] fix: maintain original TZID letter case (#8577) --- ietf/meeting/tests_views.py | 15 ++++++++++++++- ietf/templates/meeting/agenda.ics | 2 +- 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/ietf/meeting/tests_views.py b/ietf/meeting/tests_views.py index 848c9b7723..519f5f7c2d 100644 --- a/ietf/meeting/tests_views.py +++ b/ietf/meeting/tests_views.py @@ -385,7 +385,20 @@ def test_meeting_agenda(self): assert_ical_response_is_valid(self, r) self.assertContains(r, "BEGIN:VTIMEZONE") self.assertContains(r, "END:VTIMEZONE") - + self.assertContains(r, meeting.time_zone, msg_prefix="time_zone should appear in its original case") + self.assertNotEqual( + meeting.time_zone, + meeting.time_zone.lower(), + "meeting needs a mixed-case tz for this test", + ) + self.assertNotContains(r, meeting.time_zone.lower(), msg_prefix="time_zone should not be lower-cased") + self.assertNotEqual( + meeting.time_zone, + meeting.time_zone.upper(), + "meeting needs a mixed-case tz for this test", + ) + self.assertNotContains(r, meeting.time_zone.upper(), msg_prefix="time_zone should not be upper-cased") + # iCal, single group r = self.client.get(ical_url + "?show=" + session.group.parent.acronym.upper()) assert_ical_response_is_valid(self, r) diff --git a/ietf/templates/meeting/agenda.ics b/ietf/templates/meeting/agenda.ics index 8bc8222bbe..eb83dd479a 100644 --- a/ietf/templates/meeting/agenda.ics +++ b/ietf/templates/meeting/agenda.ics @@ -1,4 +1,4 @@ -{% load humanize tz %}{% autoescape off %}{% timezone schedule.meeting.tz %}{% with tzname=schedule.meeting.time_zone|lower %}{% load ietf_filters textfilters %}{% load cache %}{% cache 1800 ietf_meeting_agenda_ics schedule.meeting.number request.path request.GET %}BEGIN:VCALENDAR +{% load humanize tz %}{% autoescape off %}{% timezone schedule.meeting.tz %}{% with tzname=schedule.meeting.time_zone %}{% load ietf_filters textfilters %}{% load cache %}{% cache 1800 ietf_meeting_agenda_ics schedule.meeting.number request.path request.GET %}BEGIN:VCALENDAR VERSION:2.0 METHOD:PUBLISH PRODID:-//IETF//datatracker.ietf.org ical agenda//EN From a9a8f9ba0116aea7480e83a2dfc133196cf9280f Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Fri, 21 Feb 2025 20:08:12 -0400 Subject: [PATCH 015/405] chore(deps): remove pydyf pin, update weasyprint req (#8580) * chore(deps): remove pydyf pin, update weasyprint req * chore(deps): drop pdf extra from xml2rfc dep This should come back when we use xml2rfc for PDF generation --- requirements.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements.txt b/requirements.txt index 66a785e929..d8b6e0742f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -57,7 +57,7 @@ oic>=1.3 # Used only by tests Pillow>=9.1.0 psycopg2>=2.9.6 pyang>=2.5.3 -pydyf>0.8.0,<0.10.0 # until weasyprint adjusts for 0.10.0 and later +pydyf>0.8.0 pyflakes>=2.4.0 pyopenssl>=22.0.0 # Used by urllib3.contrib, which is used by PyQuery but not marked as a dependency pyquery>=1.4.3 @@ -80,6 +80,6 @@ tlds>=2022042700 # Used to teach bleach about which TLDs currently exist tqdm>=4.64.0 Unidecode>=1.3.4 urllib3>=1.26,<2 -weasyprint>=59 -xml2rfc[pdf]>=3.23.0 +weasyprint>=64.1 +xml2rfc>=3.23.0 xym>=0.6,<1.0 From cb8ef96f364c3e76d6726f12889d534ab1b49a66 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Mon, 3 Mar 2025 11:21:39 -0400 Subject: [PATCH 016/405] fix: more submission date feedback; refactor xml2rfc log capture (#8621) * feat: catch and report any parsing error * refactor: error handling in a more testable way * fix: no bare `except` * test: exception cases for test_parse_creation_date * fix: explicitly reject non-numeric day/year * test: suppress xml2rfc output in test * refactor: context manager to capture xml2rfc output * refactor: more capture_xml2rfc_output usage * fix: capture_xml2rfc_output exception handling --- ietf/submit/utils.py | 162 ++++++++++++++++++++--------------------- ietf/utils/tests.py | 107 ++++++++++++++++++++++++++- ietf/utils/xmldraft.py | 66 ++++++++++++----- 3 files changed, 232 insertions(+), 103 deletions(-) diff --git a/ietf/submit/utils.py b/ietf/submit/utils.py index 61274c0116..a0c7dd8511 100644 --- a/ietf/submit/utils.py +++ b/ietf/submit/utils.py @@ -58,7 +58,7 @@ from ietf.utils.mail import is_valid_email from ietf.utils.text import parse_unicode, normalize_text from ietf.utils.timezone import date_today -from ietf.utils.xmldraft import InvalidMetadataError, XMLDraft +from ietf.utils.xmldraft import InvalidMetadataError, XMLDraft, capture_xml2rfc_output from ietf.person.name import unidecode_name @@ -926,105 +926,101 @@ def render_missing_formats(submission): If a txt file already exists, leaves it in place. Overwrites an existing html file if there is one. """ - # Capture stdio/stdout from xml2rfc - xml2rfc_stdout = io.StringIO() - xml2rfc_stderr = io.StringIO() - xml2rfc.log.write_out = xml2rfc_stdout - xml2rfc.log.write_err = xml2rfc_stderr - xml_path = staging_path(submission.name, submission.rev, '.xml') - parser = xml2rfc.XmlRfcParser(str(xml_path), quiet=True) - try: - # --- Parse the xml --- - xmltree = parser.parse(remove_comments=False) - except Exception as err: - raise XmlRfcError( - "Error parsing XML", - xml2rfc_stdout=xml2rfc_stdout.getvalue(), - xml2rfc_stderr=xml2rfc_stderr.getvalue(), - ) from err - # If we have v2, run it through v2v3. Keep track of the submitted version, though. - xmlroot = xmltree.getroot() - xml_version = xmlroot.get('version', '2') - if xml_version == '2': - v2v3 = xml2rfc.V2v3XmlWriter(xmltree) + with capture_xml2rfc_output() as xml2rfc_logs: + xml_path = staging_path(submission.name, submission.rev, '.xml') + parser = xml2rfc.XmlRfcParser(str(xml_path), quiet=True) try: - xmltree.tree = v2v3.convert2to3() + # --- Parse the xml --- + xmltree = parser.parse(remove_comments=False) except Exception as err: raise XmlRfcError( - "Error converting v2 XML to v3", - xml2rfc_stdout=xml2rfc_stdout.getvalue(), - xml2rfc_stderr=xml2rfc_stderr.getvalue(), + "Error parsing XML", + xml2rfc_stdout=xml2rfc_logs["stdout"].getvalue(), + xml2rfc_stderr=xml2rfc_logs["stderr"].getvalue(), ) from err - - # --- Prep the xml --- - today = date_today() - prep = xml2rfc.PrepToolWriter(xmltree, quiet=True, liberal=True, keep_pis=[xml2rfc.V3_PI_TARGET]) - prep.options.accept_prepped = True - prep.options.date = today - try: - xmltree.tree = prep.prep() - except RfcWriterError: - raise XmlRfcError( - f"Error during xml2rfc prep: {prep.errors}", - xml2rfc_stdout=xml2rfc_stdout.getvalue(), - xml2rfc_stderr=xml2rfc_stderr.getvalue(), - ) - except Exception as err: - raise XmlRfcError( - "Unexpected error during xml2rfc prep", - xml2rfc_stdout=xml2rfc_stdout.getvalue(), - xml2rfc_stderr=xml2rfc_stderr.getvalue(), - ) from err - - # --- Convert to txt --- - txt_path = staging_path(submission.name, submission.rev, '.txt') - if not txt_path.exists(): - writer = xml2rfc.TextWriter(xmltree, quiet=True) - writer.options.accept_prepped = True + # If we have v2, run it through v2v3. Keep track of the submitted version, though. + xmlroot = xmltree.getroot() + xml_version = xmlroot.get('version', '2') + if xml_version == '2': + v2v3 = xml2rfc.V2v3XmlWriter(xmltree) + try: + xmltree.tree = v2v3.convert2to3() + except Exception as err: + raise XmlRfcError( + "Error converting v2 XML to v3", + xml2rfc_stdout=xml2rfc_logs["stdout"].getvalue(), + xml2rfc_stderr=xml2rfc_logs["stderr"].getvalue(), + ) from err + + # --- Prep the xml --- + today = date_today() + prep = xml2rfc.PrepToolWriter(xmltree, quiet=True, liberal=True, keep_pis=[xml2rfc.V3_PI_TARGET]) + prep.options.accept_prepped = True + prep.options.date = today + try: + xmltree.tree = prep.prep() + except RfcWriterError: + raise XmlRfcError( + f"Error during xml2rfc prep: {prep.errors}", + xml2rfc_stdout=xml2rfc_logs["stdout"].getvalue(), + xml2rfc_stderr=xml2rfc_logs["stderr"].getvalue(), + ) + except Exception as err: + raise XmlRfcError( + "Unexpected error during xml2rfc prep", + xml2rfc_stdout=xml2rfc_logs["stdout"].getvalue(), + xml2rfc_stderr=xml2rfc_logs["stderr"].getvalue(), + ) from err + + # --- Convert to txt --- + txt_path = staging_path(submission.name, submission.rev, '.txt') + if not txt_path.exists(): + writer = xml2rfc.TextWriter(xmltree, quiet=True) + writer.options.accept_prepped = True + writer.options.date = today + try: + writer.write(txt_path) + except Exception as err: + raise XmlRfcError( + "Error generating text format from XML", + xml2rfc_stdout=xml2rfc_logs["stdout"].getvalue(), + xml2rfc_stderr=xml2rfc_logs["stderr"].getvalue(), + ) from err + log.log( + 'In %s: xml2rfc %s generated %s from %s (version %s)' % ( + str(xml_path.parent), + xml2rfc.__version__, + txt_path.name, + xml_path.name, + xml_version, + ) + ) + # When the blobstores become autoritative - the guard at the + # containing if statement needs to be based on the store + with Path(txt_path).open("rb") as f: + store_file("staging", f"{submission.name}-{submission.rev}.txt", f) + + # --- Convert to html --- + html_path = staging_path(submission.name, submission.rev, '.html') + writer = xml2rfc.HtmlWriter(xmltree, quiet=True) writer.options.date = today try: - writer.write(txt_path) + writer.write(str(html_path)) except Exception as err: raise XmlRfcError( - "Error generating text format from XML", - xml2rfc_stdout=xml2rfc_stdout.getvalue(), - xml2rfc_stderr=xml2rfc_stderr.getvalue(), + "Error generating HTML format from XML", + xml2rfc_stdout=xml2rfc_logs["stdout"].getvalue(), + xml2rfc_stderr=xml2rfc_logs["stderr"].getvalue(), ) from err log.log( 'In %s: xml2rfc %s generated %s from %s (version %s)' % ( str(xml_path.parent), xml2rfc.__version__, - txt_path.name, + html_path.name, xml_path.name, xml_version, ) ) - # When the blobstores become autoritative - the guard at the - # containing if statement needs to be based on the store - with Path(txt_path).open("rb") as f: - store_file("staging", f"{submission.name}-{submission.rev}.txt", f) - - # --- Convert to html --- - html_path = staging_path(submission.name, submission.rev, '.html') - writer = xml2rfc.HtmlWriter(xmltree, quiet=True) - writer.options.date = today - try: - writer.write(str(html_path)) - except Exception as err: - raise XmlRfcError( - "Error generating HTML format from XML", - xml2rfc_stdout=xml2rfc_stdout.getvalue(), - xml2rfc_stderr=xml2rfc_stderr.getvalue(), - ) from err - log.log( - 'In %s: xml2rfc %s generated %s from %s (version %s)' % ( - str(xml_path.parent), - xml2rfc.__version__, - html_path.name, - xml_path.name, - xml_version, - ) - ) with Path(html_path).open("rb") as f: store_file("staging", f"{submission.name}-{submission.rev}.html", f) diff --git a/ietf/utils/tests.py b/ietf/utils/tests.py index 0a1986a608..2dd861cd11 100644 --- a/ietf/utils/tests.py +++ b/ietf/utils/tests.py @@ -23,6 +23,8 @@ from importlib import import_module from textwrap import dedent from tempfile import mkdtemp +from xml2rfc import log as xml2rfc_log +from xml2rfc.util.date import extract_date as xml2rfc_extract_date from django.apps import apps from django.contrib.auth.models import User @@ -57,7 +59,7 @@ from ietf.utils.test_utils import TestCase, unicontent from ietf.utils.text import parse_unicode from ietf.utils.timezone import timezone_not_near_midnight -from ietf.utils.xmldraft import XMLDraft +from ietf.utils.xmldraft import XMLDraft, InvalidMetadataError, capture_xml2rfc_output class SendingMail(TestCase): @@ -544,7 +546,7 @@ def test_get_refs_v2(self): def test_parse_creation_date(self): # override date_today to avoid skew when test runs around midnight today = datetime.date.today() - with patch("ietf.utils.xmldraft.date_today", return_value=today): + with capture_xml2rfc_output(), patch("ietf.utils.xmldraft.date_today", return_value=today): # Note: using a dict as a stand-in for XML elements, which rely on the get() method self.assertEqual( XMLDraft.parse_creation_date({"year": "2022", "month": "11", "day": "24"}), @@ -590,6 +592,74 @@ def test_parse_creation_date(self): ), datetime.date(today.year, 1 if today.month != 1 else 2, 15), ) + # Some exeception-inducing conditions + with self.assertRaises( + InvalidMetadataError, + msg="raise an InvalidMetadataError if a year-only date is not current", + ): + XMLDraft.parse_creation_date( + { + "year": str(today.year - 1), + "month": "", + "day": "", + } + ) + with self.assertRaises( + InvalidMetadataError, + msg="raise an InvalidMetadataError for a non-numeric year" + ): + XMLDraft.parse_creation_date( + { + "year": "two thousand twenty-five", + "month": "2", + "day": "28", + } + ) + with self.assertRaises( + InvalidMetadataError, + msg="raise an InvalidMetadataError for an invalid month" + ): + XMLDraft.parse_creation_date( + { + "year": "2024", + "month": "13", + "day": "28", + } + ) + with self.assertRaises( + InvalidMetadataError, + msg="raise an InvalidMetadataError for a misspelled month" + ): + XMLDraft.parse_creation_date( + { + "year": "2024", + "month": "Oktobur", + "day": "28", + } + ) + with self.assertRaises( + InvalidMetadataError, + msg="raise an InvalidMetadataError for an invalid day" + ): + XMLDraft.parse_creation_date( + { + "year": "2024", + "month": "feb", + "day": "31", + } + ) + with self.assertRaises( + InvalidMetadataError, + msg="raise an InvalidMetadataError for a non-numeric day" + ): + XMLDraft.parse_creation_date( + { + "year": "2024", + "month": "feb", + "day": "twenty-four", + } + ) + def test_parse_docname(self): with self.assertRaises(ValueError) as cm: @@ -671,6 +741,39 @@ def test_render_author_name(self): "J. Q.", ) + def test_capture_xml2rfc_output(self): + """capture_xml2rfc_output reroutes and captures xml2rfc logs""" + orig_write_out = xml2rfc_log.write_out + orig_write_err = xml2rfc_log.write_err + with capture_xml2rfc_output() as outer_log_streams: # ensure no output + # such meta! very Inception! + with capture_xml2rfc_output() as inner_log_streams: + # arbitrary xml2rfc method that triggers a log, nothing special otherwise + xml2rfc_extract_date({"year": "fish"}, datetime.date(2025,3,1)) + self.assertNotEqual(inner_log_streams, outer_log_streams) + self.assertEqual(xml2rfc_log.write_out, outer_log_streams["stdout"], "out stream should be restored") + self.assertEqual(xml2rfc_log.write_err, outer_log_streams["stderr"], "err stream should be restored") + self.assertEqual(xml2rfc_log.write_out, orig_write_out, "original out stream should be restored") + self.assertEqual(xml2rfc_log.write_err, orig_write_err, "original err stream should be restored") + + # don't happen to get any output on stdout and not paranoid enough to force some, just test stderr + self.assertGreater(len(inner_log_streams["stderr"].getvalue()), 0, "want output on inner streams") + self.assertEqual(len(outer_log_streams["stdout"].getvalue()), 0, "no output on outer streams") + self.assertEqual(len(outer_log_streams["stderr"].getvalue()), 0, "no output on outer streams") + + def test_capture_xml2rfc_output_exception_handling(self): + """capture_xml2rfc_output restores streams after an exception""" + orig_write_out = xml2rfc_log.write_out + orig_write_err = xml2rfc_log.write_err + with capture_xml2rfc_output() as outer_log_streams: # ensure no output + with self.assertRaises(RuntimeError), capture_xml2rfc_output() as inner_log_streams: + raise RuntimeError("nooo") + self.assertNotEqual(inner_log_streams, outer_log_streams) + self.assertEqual(xml2rfc_log.write_out, outer_log_streams["stdout"], "out stream should be restored") + self.assertEqual(xml2rfc_log.write_err, outer_log_streams["stderr"], "err stream should be restored") + self.assertEqual(xml2rfc_log.write_out, orig_write_out, "original out stream should be restored") + self.assertEqual(xml2rfc_log.write_err, orig_write_err, "original err stream should be restored") + class NameTests(TestCase): diff --git a/ietf/utils/xmldraft.py b/ietf/utils/xmldraft.py index 73baf917d8..3ac9a269c7 100644 --- a/ietf/utils/xmldraft.py +++ b/ietf/utils/xmldraft.py @@ -7,7 +7,7 @@ import debug # pyflakes: ignore -from contextlib import ExitStack +from contextlib import contextmanager from lxml.etree import XMLSyntaxError from xml2rfc.util.date import augment_date, extract_date from ietf.utils.timezone import date_today @@ -15,6 +15,21 @@ from .draft import Draft +@contextmanager +def capture_xml2rfc_output(): + orig_write_out = xml2rfc.log.write_out + orig_write_err = xml2rfc.log.write_err + parser_out = io.StringIO() + parser_err = io.StringIO() + xml2rfc.log.write_out = parser_out + xml2rfc.log.write_err = parser_err + try: + yield {"stdout": parser_out, "stderr": parser_err} + finally: + xml2rfc.log.write_out = orig_write_out + xml2rfc.log.write_err = orig_write_err + + class XMLDraft(Draft): """Draft from XML source @@ -38,27 +53,18 @@ def parse_xml(filename): Converts to xml2rfc v3 schema, then returns the root of the v3 tree and the original xml version. """ - orig_write_out = xml2rfc.log.write_out - orig_write_err = xml2rfc.log.write_err - parser_out = io.StringIO() - parser_err = io.StringIO() - - with ExitStack() as stack: - @stack.callback - def cleanup(): # called when context exited, even if there's an exception - xml2rfc.log.write_out = orig_write_out - xml2rfc.log.write_err = orig_write_err - - xml2rfc.log.write_out = parser_out - xml2rfc.log.write_err = parser_err + with capture_xml2rfc_output() as parser_logs: parser = xml2rfc.XmlRfcParser(filename, quiet=True) try: tree = parser.parse() except XMLSyntaxError: raise InvalidXMLError() except Exception as e: - raise XMLParseError(parser_out.getvalue(), parser_err.getvalue()) from e + raise XMLParseError( + parser_logs["stdout"].getvalue(), + parser_logs["stderr"].getvalue(), + ) from e xml_version = tree.getroot().get('version', '2') if xml_version == '2': @@ -147,10 +153,31 @@ def get_title(self): def parse_creation_date(date_elt): if date_elt is None: return None + today = date_today() - # ths mimics handling of date elements in the xml2rfc text/html writers - year, month, day = extract_date(date_elt, today) - year, month, day = augment_date(year, month, day, today) + + # Outright reject non-numeric year / day (xml2rfc's extract_date does not do this) + # (n.b., "year" can be non-numeric in a section per RFC 7991) + year = date_elt.get("year") + day = date_elt.get("day") + non_numeric_year = year and not year.isdigit() + non_numeric_day = day and not day.isdigit() + if non_numeric_day or non_numeric_year: + raise InvalidMetadataError( + "Unable to parse the element in the section: " + "year and day must be numeric values if specified." + ) + + try: + # ths mimics handling of date elements in the xml2rfc text/html writers + year, month, day = extract_date(date_elt, today) + year, month, day = augment_date(year, month, day, today) + except Exception as err: + # Give a generic error if anything goes wrong so far... + raise InvalidMetadataError( + "Unable to parse the element in the section." + ) from err + if not day: # Must choose a day for a datetime.date. Per RFC 7991 sect 2.17, we use # today's date if it is consistent with the rest of the date. Otherwise, @@ -159,9 +186,12 @@ def parse_creation_date(date_elt): day = today.day else: day = 15 + try: creation_date = datetime.date(year, month, day) except Exception: + # If everything went well, we should have had a valid datetime, but we didn't. + # The parsing _worked_ but not in a way that we can go forward with. raise InvalidMetadataError( "The element in the section specified an incomplete date " "that was not consistent with today's date. If you specify only a year, " From 232a861f8ae52e1026d59d7088f07211acd166a5 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Mon, 3 Mar 2025 14:51:14 -0400 Subject: [PATCH 017/405] chore: config gunicorn secure_scheme_headers (#8632) * chore: config gunicorn secure_scheme_headers * chore: typo in comment --- dev/build/gunicorn.conf.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/dev/build/gunicorn.conf.py b/dev/build/gunicorn.conf.py index 6666a0d37d..032d95ee0d 100644 --- a/dev/build/gunicorn.conf.py +++ b/dev/build/gunicorn.conf.py @@ -1,5 +1,11 @@ # Copyright The IETF Trust 2024, All Rights Reserved +# Configure security scheme headers for forwarded requests. Cloudflare sets X-Forwarded-Proto +# for us. Don't trust any of the other similar headers. Only trust the header if it's coming +# from localhost, as all legitimate traffic will reach gunicorn via co-located nginx. +secure_scheme_headers = {"X-FORWARDED-PROTO": "https"} +forwarded_allow_ips = "127.0.0.1, ::1" # this is the default + # Log as JSON on stdout (to distinguish from Django's logs on stderr) # # This is applied as an update to gunicorn's glogging.CONFIG_DEFAULTS. From 554182ef8ab33947ca8d9ee904a5d5472d3c57f8 Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Tue, 4 Mar 2025 11:42:04 -0600 Subject: [PATCH 018/405] feat: run the docker container as dev (#8606) * feat: run the docker container as dev * fix: $@ -> $* Old bug, but might as well fix it now --------- Co-authored-by: Jennifer Richards --- dev/celery/docker-init.sh | 13 ++++++--- docker-compose.yml | 4 ++- docker/celery.Dockerfile | 60 +++++++++++++++++++++++++++++++++++++++ 3 files changed, 72 insertions(+), 5 deletions(-) create mode 100644 docker/celery.Dockerfile diff --git a/dev/celery/docker-init.sh b/dev/celery/docker-init.sh index 4fd1f1294f..9940dfd7d0 100755 --- a/dev/celery/docker-init.sh +++ b/dev/celery/docker-init.sh @@ -49,11 +49,16 @@ if [[ -n "${CELERY_GID}" ]]; then fi run_as_celery_uid () { - SU_OPTS=() - if [[ -n "${CELERY_GROUP}" ]]; then - SU_OPTS+=("-g" "${CELERY_GROUP}") + IAM=$(whoami) + if [ "${IAM}" = "${CELERY_USERNAME:-root}" ]; then + SU_OPTS=() + if [[ -n "${CELERY_GROUP}" ]]; then + SU_OPTS+=("-g" "${CELERY_GROUP}") + fi + su "${SU_OPTS[@]}" "${CELERY_USERNAME:-root}" -s /bin/sh -c "$*" + else + /bin/sh -c "$*" fi - su "${SU_OPTS[@]}" "${CELERY_USERNAME:-root}" -s /bin/sh -c "$@" } log_term_timing_msgs () { diff --git a/docker-compose.yml b/docker-compose.yml index 30ce8ba4d2..9910c02a99 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -67,7 +67,9 @@ services: restart: unless-stopped celery: - image: ghcr.io/ietf-tools/datatracker-celery:latest + build: + context: . + dockerfile: docker/celery.Dockerfile init: true environment: CELERY_APP: ietf diff --git a/docker/celery.Dockerfile b/docker/celery.Dockerfile new file mode 100644 index 0000000000..e44200398c --- /dev/null +++ b/docker/celery.Dockerfile @@ -0,0 +1,60 @@ +FROM ghcr.io/ietf-tools/datatracker-celery:latest +LABEL maintainer="IETF Tools Team " + +ENV DEBIAN_FRONTEND=noninteractive + +# Install needed packages and setup non-root user. +ARG USERNAME=dev +ARG USER_UID=1000 +ARG USER_GID=$USER_UID +COPY docker/scripts/app-setup-debian.sh /tmp/library-scripts/docker-setup-debian.sh +RUN sed -i 's/\r$//' /tmp/library-scripts/docker-setup-debian.sh && chmod +x /tmp/library-scripts/docker-setup-debian.sh + +# Add Postgresql Apt Repository to get 14 +RUN echo "deb http://apt.postgresql.org/pub/repos/apt $(. /etc/os-release && echo "$VERSION_CODENAME")-pgdg main" | tee /etc/apt/sources.list.d/pgdg.list +RUN wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - + +RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ + && apt-get install -y --no-install-recommends postgresql-client-14 pgloader \ + # Remove imagemagick due to https://security-tracker.debian.org/tracker/CVE-2019-10131 + && apt-get purge -y imagemagick imagemagick-6-common \ + # Install common packages, non-root user + # Syntax: ./docker-setup-debian.sh [install zsh flag] [username] [user UID] [user GID] [upgrade packages flag] [install Oh My Zsh! flag] [Add non-free packages] + && bash /tmp/library-scripts/docker-setup-debian.sh "true" "${USERNAME}" "${USER_UID}" "${USER_GID}" "false" "true" "true" + +# Setup default python tools in a venv via pipx to avoid conflicts +ENV PIPX_HOME=/usr/local/py-utils \ + PIPX_BIN_DIR=/usr/local/py-utils/bin +ENV PATH=${PATH}:${PIPX_BIN_DIR} +COPY docker/scripts/app-setup-python.sh /tmp/library-scripts/docker-setup-python.sh +RUN sed -i 's/\r$//' /tmp/library-scripts/docker-setup-python.sh && chmod +x /tmp/library-scripts/docker-setup-python.sh +RUN bash /tmp/library-scripts/docker-setup-python.sh "none" "/usr/local" "${PIPX_HOME}" "${USERNAME}" + +# Remove library scripts for final image +RUN rm -rf /tmp/library-scripts + +# Copy the startup file +COPY dev/celery/docker-init.sh /docker-init.sh +RUN sed -i 's/\r$//' /docker-init.sh && \ + chmod +x /docker-init.sh + +ENTRYPOINT [ "/docker-init.sh" ] + +# Fix user UID / GID to match host +RUN groupmod --gid $USER_GID $USERNAME \ + && usermod --uid $USER_UID --gid $USER_GID $USERNAME \ + && chown -R $USER_UID:$USER_GID /home/$USERNAME \ + || exit 0 + +# Switch to local dev user +USER dev:dev + +# Install current datatracker python dependencies +COPY requirements.txt /tmp/pip-tmp/ +RUN pip3 --disable-pip-version-check --no-cache-dir install --user --no-warn-script-location -r /tmp/pip-tmp/requirements.txt +RUN pip3 --disable-pip-version-check --no-cache-dir install --user --no-warn-script-location watchdog[watchmedo] + +RUN sudo rm -rf /tmp/pip-tmp + +VOLUME [ "/assets" ] + From cf6340443f7437ac23ff65c981741335a0911363 Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Sat, 8 Mar 2025 00:56:14 -0500 Subject: [PATCH 019/405] docs: Update README.md --- README.md | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 0ece0eb03b..abebb7ca02 100644 --- a/README.md +++ b/README.md @@ -44,6 +44,7 @@ This project is following the standard **Git Feature Workflow** development model. Learn about all the various steps of the development workflow, from creating a fork to submitting a pull request, in the [Contributing](https://github.com/ietf-tools/.github/blob/main/CONTRIBUTING.md) guide. +> [!TIP] > Make sure to read the [Styleguides](https://github.com/ietf-tools/.github/blob/main/CONTRIBUTING.md#styleguides) section to ensure a cohesive code format across the project. You can submit bug reports, enhancement and new feature requests in the [discussions](https://github.com/ietf-tools/datatracker/discussions) area. Accepted tickets will be converted to issues. @@ -52,7 +53,8 @@ You can submit bug reports, enhancement and new feature requests in the [discuss Click the Fork button in the top-right corner of the repository to create a personal copy that you can work on. -> Note that some GitHub Actions might be enabled by default in your fork. You should disable them by going to **Settings** > **Actions** > **General** and selecting **Disable actions** (then Save). +> [!NOTE] +> Some GitHub Actions might be enabled by default in your fork. You should disable them by going to **Settings** > **Actions** > **General** and selecting **Disable actions** (then Save). #### Git Cloning Tips @@ -104,7 +106,8 @@ Read the [Docker Dev Environment](docker/README.md) guide to get started. Nightly database dumps of the datatracker are available as Docker images: `ghcr.io/ietf-tools/datatracker-db:latest` -> Note that to update the database in your dev environment to the latest version, you should run the `docker/cleandb` script. +> [!TIP] +> In order to update the database in your dev environment to the latest version, you should run the `docker/cleandb` script. ### Blob storage for dev/test @@ -248,6 +251,7 @@ From a datatracker container, run the command: ./ietf/manage.py test --settings=settings_test ``` +> [!TIP] > You can limit the run to specific tests using the `--pattern` argument. ### Frontend Tests @@ -257,11 +261,13 @@ Frontend tests are done via Playwright. There're 2 different type of tests: - Tests that test Vue pages / components and run natively without any external dependency. - Tests that require a running datatracker instance to test against (usually legacy views). +> [!IMPORTANT] > Make sure you have Node.js 16.x or later installed on your machine. #### Run Vue Tests -> :warning: All commands below **MUST** be run from the `./playwright` directory, unless noted otherwise. +> [!WARNING] +> All commands below **MUST** be run from the `./playwright` directory, unless noted otherwise. 1. Run **once** to install dependencies on your system: ```sh @@ -294,7 +300,8 @@ Frontend tests are done via Playwright. There're 2 different type of tests: First, you need to start a datatracker instance (dev or prod), ideally from a docker container, exposing the 8000 port. -> :warning: All commands below **MUST** be run from the `./playwright` directory. +> [!WARNING] +> All commands below **MUST** be run from the `./playwright` directory. 1. Run **once** to install dependencies on your system: ```sh From cf21c4129a3d083980297dcaa82b5fd58bf447f6 Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Sat, 8 Mar 2025 00:59:26 -0500 Subject: [PATCH 020/405] docs: Update README.md --- docker/README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docker/README.md b/docker/README.md index 14fcc38995..f2161a173f 100644 --- a/docker/README.md +++ b/docker/README.md @@ -4,11 +4,12 @@ 1. [Set up Docker](https://docs.docker.com/get-started/) on your preferred platform. On Windows, it is highly recommended to use the [WSL 2 *(Windows Subsystem for Linux)*](https://docs.docker.com/desktop/windows/wsl/) backend. +> [!IMPORTANT] > See the [IETF Tools Windows Dev guide](https://github.com/ietf-tools/.github/blob/main/docs/windows-dev.md) on how to get started when using Windows. 2. On Linux, you must [install Docker Compose manually](https://docs.docker.com/compose/install/linux/#install-the-plugin-manually) and not install Docker Desktop. On Mac and Windows install Docker Desktop which already includes Docker Compose. -2. If you have a copy of the datatracker code checked out already, simply `cd` to the top-level directory. +3. If you have a copy of the datatracker code checked out already, simply `cd` to the top-level directory. If not, check out a datatracker branch as usual. We'll check out `main` below, but you can use any branch: @@ -18,7 +19,7 @@ git checkout main ``` -3. Follow the instructions for your preferred editor: +4. Follow the instructions for your preferred editor: - [Visual Studio Code](#using-visual-studio-code) - [Other Editors / Generic](#using-other-editors--generic) @@ -189,7 +190,6 @@ The content of the source files will be copied into the target `.ics` files. Mak Because including all assets in the image would significantly increase the file size, they are not included by default. You can however fetch them by running the **Fetch assets via rsync** task in VS Code or run manually the script `docker/scripts/app-rsync-extras.sh` - ### Linux file permissions leaking to the host system If on the host filesystem you have permissions that look like this, From 06158c05c77b89434197a626d7cfcf04b8b1e14a Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Mon, 10 Mar 2025 16:28:20 -0400 Subject: [PATCH 021/405] chore: Remove deprecated version from docker-compose.extend.yml --- .devcontainer/docker-compose.extend.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.devcontainer/docker-compose.extend.yml b/.devcontainer/docker-compose.extend.yml index 286eefb29c..a92f42bc6d 100644 --- a/.devcontainer/docker-compose.extend.yml +++ b/.devcontainer/docker-compose.extend.yml @@ -1,5 +1,3 @@ -version: '3.8' - services: app: environment: From 887ec11f3916d19da04dd939a0aa2edd697f91fd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 15 Mar 2025 00:43:17 -0400 Subject: [PATCH 022/405] chore(deps): bump ncipollo/release-action from 1.15.0 to 1.16.0 (#8594) Bumps [ncipollo/release-action](https://github.com/ncipollo/release-action) from 1.15.0 to 1.16.0. - [Release notes](https://github.com/ncipollo/release-action/releases) - [Commits](https://github.com/ncipollo/release-action/compare/v1.15.0...v1.16.0) --- updated-dependencies: - dependency-name: ncipollo/release-action dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index a14ea73e9b..9f621e16b7 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -97,7 +97,7 @@ jobs: echo "IS_RELEASE=true" >> $GITHUB_ENV - name: Create Draft Release - uses: ncipollo/release-action@v1.15.0 + uses: ncipollo/release-action@v1.16.0 if: ${{ github.ref_name == 'release' }} with: prerelease: true @@ -316,7 +316,7 @@ jobs: histCoveragePath: historical-coverage.json - name: Create Release - uses: ncipollo/release-action@v1.15.0 + uses: ncipollo/release-action@v1.16.0 if: ${{ env.SHOULD_DEPLOY == 'true' }} with: allowUpdates: true @@ -329,7 +329,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} - name: Update Baseline Coverage - uses: ncipollo/release-action@v1.15.0 + uses: ncipollo/release-action@v1.16.0 if: ${{ github.event.inputs.updateCoverage == 'true' || github.ref_name == 'release' }} with: allowUpdates: true From e56c6cae272cb482a6d33f1f7e24c7ba8a48361d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 15 Mar 2025 00:43:47 -0400 Subject: [PATCH 023/405] chore(deps): bump actions/download-artifact from 4.1.8 to 4.1.9 (#8628) Bumps [actions/download-artifact](https://github.com/actions/download-artifact) from 4.1.8 to 4.1.9. - [Release notes](https://github.com/actions/download-artifact/releases) - [Commits](https://github.com/actions/download-artifact/compare/v4.1.8...v4.1.9) --- updated-dependencies: - dependency-name: actions/download-artifact dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 9f621e16b7..123bd5c65a 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -185,7 +185,7 @@ jobs: - name: Download a Coverage Results if: ${{ github.event.inputs.skiptests == 'false' || github.ref_name == 'release' }} - uses: actions/download-artifact@v4.1.8 + uses: actions/download-artifact@v4.1.9 with: name: coverage @@ -292,7 +292,7 @@ jobs: - name: Download Coverage Results if: ${{ github.event.inputs.skiptests == 'false' || github.ref_name == 'release' }} - uses: actions/download-artifact@v4.1.8 + uses: actions/download-artifact@v4.1.9 with: name: coverage @@ -407,7 +407,7 @@ jobs: - uses: actions/checkout@v4 - name: Download a Release Artifact - uses: actions/download-artifact@v4.1.8 + uses: actions/download-artifact@v4.1.9 with: name: release-${{ env.PKG_VERSION }} From 9db109f692d8592c5407ea82d2c2790eb6006263 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 15 Mar 2025 00:44:18 -0400 Subject: [PATCH 024/405] chore(deps): bump appleboy/ssh-action from 1.2.0 to 1.2.2 (#8650) Bumps [appleboy/ssh-action](https://github.com/appleboy/ssh-action) from 1.2.0 to 1.2.2. - [Release notes](https://github.com/appleboy/ssh-action/releases) - [Changelog](https://github.com/appleboy/ssh-action/blob/master/.goreleaser.yaml) - [Commits](https://github.com/appleboy/ssh-action/compare/7eaf76671a0d7eec5d98ee897acda4f968735a17...2ead5e36573f08b82fbfce1504f1a4b05a647c6f) --- updated-dependencies: - dependency-name: appleboy/ssh-action dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/tests-az.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/tests-az.yml b/.github/workflows/tests-az.yml index 6d53a121aa..8553563a19 100644 --- a/.github/workflows/tests-az.yml +++ b/.github/workflows/tests-az.yml @@ -38,7 +38,7 @@ jobs: ssh-keyscan -t rsa $vminfo >> ~/.ssh/known_hosts - name: Remote SSH into VM - uses: appleboy/ssh-action@7eaf76671a0d7eec5d98ee897acda4f968735a17 + uses: appleboy/ssh-action@2ead5e36573f08b82fbfce1504f1a4b05a647c6f env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: From 227b44bfa25036e7d4fea86a72405beb4e53e2ce Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 15 Mar 2025 00:45:42 -0400 Subject: [PATCH 025/405] chore(deps): bump nanoid (#8649) Bumps the npm group with 1 update in the /dev/deploy-to-container directory: [nanoid](https://github.com/ai/nanoid). Updates `nanoid` from 5.0.9 to 5.1.3 - [Release notes](https://github.com/ai/nanoid/releases) - [Changelog](https://github.com/ai/nanoid/blob/main/CHANGELOG.md) - [Commits](https://github.com/ai/nanoid/compare/5.0.9...5.1.3) --- updated-dependencies: - dependency-name: nanoid dependency-type: direct:production update-type: version-update:semver-minor dependency-group: npm ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- dev/deploy-to-container/package-lock.json | 15 ++++++++------- dev/deploy-to-container/package.json | 2 +- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/dev/deploy-to-container/package-lock.json b/dev/deploy-to-container/package-lock.json index 03327083b1..c4f675527e 100644 --- a/dev/deploy-to-container/package-lock.json +++ b/dev/deploy-to-container/package-lock.json @@ -8,7 +8,7 @@ "dependencies": { "dockerode": "^4.0.4", "fs-extra": "^11.3.0", - "nanoid": "5.0.9", + "nanoid": "5.1.3", "nanoid-dictionary": "5.0.0-beta.1", "slugify": "1.6.6", "tar": "^7.4.3", @@ -668,15 +668,16 @@ "optional": true }, "node_modules/nanoid": { - "version": "5.0.9", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.0.9.tgz", - "integrity": "sha512-Aooyr6MXU6HpvvWXKoVoXwKMs/KyVakWwg7xQfv5/S/RIgJMy0Ifa45H9qqYy7pTCszrHzP21Uk4PZq2HpEM8Q==", + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.1.3.tgz", + "integrity": "sha512-zAbEOEr7u2CbxwoMRlz/pNSpRP0FdAU4pRaYunCdEezWohXFs+a0Xw7RfkKaezMsmSM1vttcLthJtwRnVtOfHQ==", "funding": [ { "type": "github", "url": "https://github.com/sponsors/ai" } ], + "license": "MIT", "bin": { "nanoid": "bin/nanoid.js" }, @@ -1612,9 +1613,9 @@ "optional": true }, "nanoid": { - "version": "5.0.9", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.0.9.tgz", - "integrity": "sha512-Aooyr6MXU6HpvvWXKoVoXwKMs/KyVakWwg7xQfv5/S/RIgJMy0Ifa45H9qqYy7pTCszrHzP21Uk4PZq2HpEM8Q==" + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.1.3.tgz", + "integrity": "sha512-zAbEOEr7u2CbxwoMRlz/pNSpRP0FdAU4pRaYunCdEezWohXFs+a0Xw7RfkKaezMsmSM1vttcLthJtwRnVtOfHQ==" }, "nanoid-dictionary": { "version": "5.0.0-beta.1", diff --git a/dev/deploy-to-container/package.json b/dev/deploy-to-container/package.json index 1f54745ebf..8772c568c8 100644 --- a/dev/deploy-to-container/package.json +++ b/dev/deploy-to-container/package.json @@ -4,7 +4,7 @@ "dependencies": { "dockerode": "^4.0.4", "fs-extra": "^11.3.0", - "nanoid": "5.0.9", + "nanoid": "5.1.3", "nanoid-dictionary": "5.0.0-beta.1", "slugify": "1.6.6", "tar": "^7.4.3", From 968820de34bdf6d0c1fd69b921ed490da725b941 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Sat, 15 Mar 2025 14:45:04 +0700 Subject: [PATCH 026/405] feat: celery task + admin to resend Messages (#8661) * feat: Message re-send task * feat: admin action to queue redelivery * feat: MessageAdmin list_filters * feat: show sent status * feat: better date filtering * chore: remove send-by-date task Adds complexity and risk - the improved Messages admin lets us do most of what it did without the opportunity for accidentally resending huge ranges * chore: fill in empty docstring * style: black * fix: unused import * feat: better logging * chore: mypy lint * test: test retry_send_messages_by_pk_task * test: test retry_send_messages --- ietf/message/admin.py | 79 +++++++++++++++++++++++++++++++--- ietf/message/tasks.py | 24 ++++++++++- ietf/message/tests.py | 59 +++++++++++++++++++++++-- ietf/message/utils.py | 36 ++++++++++++++-- ietf/settings.py | 1 + ietf/templates/admin/base.html | 1 + requirements.txt | 1 + 7 files changed, 187 insertions(+), 14 deletions(-) diff --git a/ietf/message/admin.py b/ietf/message/admin.py index c2564c04b9..250e1eb596 100644 --- a/ietf/message/admin.py +++ b/ietf/message/admin.py @@ -1,32 +1,99 @@ -from django.contrib import admin +# Copyright The IETF Trust 2012-2025, All Rights Reserved +from django.contrib import admin, messages +from django.db.models import QuerySet +from rangefilter.filters import DateRangeQuickSelectListFilterBuilder from ietf.message.models import Message, MessageAttachment, SendQueue, AnnouncementFrom +from ietf.message.tasks import retry_send_messages_by_pk_task + + +class MessageSentStatusListFilter(admin.SimpleListFilter): + """Filter Messages by whether or not they were sent""" + + title = "status" + parameter_name = "status" + + def lookups(self, request, model_admin): + return [ + ("sent", "Sent"), + ("unsent", "Not sent"), + ] + + def queryset(self, request, queryset): + if self.value() == "unsent": + return queryset.filter(sent__isnull=True) + elif self.value() == "sent": + return queryset.filter(sent__isnull=False) + class MessageAdmin(admin.ModelAdmin): - list_display = ["subject", "by", "time", "groups"] + list_display = ["sent_status", "subject", "by", "time", "groups"] search_fields = ["subject", "body"] raw_id_fields = ["by", "related_groups", "related_docs"] + list_filter = [ + MessageSentStatusListFilter, + ("time", DateRangeQuickSelectListFilterBuilder()), + ] ordering = ["-time"] + actions = ["retry_send"] def groups(self, instance): return ", ".join(g.acronym for g in instance.related_groups.all()) + + @admin.display(description="Sent", boolean=True) + def sent_status(self, instance): + return instance.sent is not None + + @admin.action(description="Send selected messages if unsent") + def retry_send(self, request, queryset: QuerySet[Message]): + try: + retry_send_messages_by_pk_task.delay( + message_pks=list(queryset.values_list("pk", flat=True)), + resend=False, + ) + except Exception as err: + self.message_user( + request, + f"Error: {repr(err)}", + messages.ERROR, + ) + else: + self.message_user(request, "Messages queued for delivery", messages.SUCCESS) + + admin.site.register(Message, MessageAdmin) + class MessageAttachmentAdmin(admin.ModelAdmin): - list_display = ['id', 'message', 'filename', 'removed',] - raw_id_fields = ['message'] + list_display = [ + "id", + "message", + "filename", + "removed", + ] + raw_id_fields = ["message"] + + admin.site.register(MessageAttachment, MessageAttachmentAdmin) + class SendQueueAdmin(admin.ModelAdmin): list_display = ["time", "by", "message", "send_at", "sent_at"] list_filter = ["time", "send_at", "sent_at"] search_fields = ["message__body"] raw_id_fields = ["by", "message"] ordering = ["-time"] + + admin.site.register(SendQueue, SendQueueAdmin) + class AnnouncementFromAdmin(admin.ModelAdmin): - list_display = ['name', 'group', 'address', ] -admin.site.register(AnnouncementFrom, AnnouncementFromAdmin) + list_display = [ + "name", + "group", + "address", + ] +admin.site.register(AnnouncementFrom, AnnouncementFromAdmin) diff --git a/ietf/message/tasks.py b/ietf/message/tasks.py index efd776b9d8..1fdff7bea4 100644 --- a/ietf/message/tasks.py +++ b/ietf/message/tasks.py @@ -5,8 +5,8 @@ from celery import shared_task from smtplib import SMTPException -from ietf.message.utils import send_scheduled_message_from_send_queue -from ietf.message.models import SendQueue +from ietf.message.utils import send_scheduled_message_from_send_queue, retry_send_messages +from ietf.message.models import SendQueue, Message from ietf.utils import log from ietf.utils.mail import log_smtp_exception, send_error_email @@ -25,3 +25,23 @@ def send_scheduled_mail_task(): except SMTPException as e: log_smtp_exception(e) send_error_email(e) + + +@shared_task +def retry_send_messages_by_pk_task(message_pks: list, resend=False): + """Task to retry sending Messages by PK + + Sends Messages whose PK is included in the list. + Only previously unsent messages are sent unless `resend` is true. + """ + log.log( + "retry_send_messages_by_pk_task: " + "retrying send of Message PKs [{}] (resend={})".format( + ", ".join(str(pk) for pk in message_pks), + resend, + ) + ) + retry_send_messages( + messages=Message.objects.filter(pk__in=message_pks), + resend=resend, + ) diff --git a/ietf/message/tests.py b/ietf/message/tests.py index 7fbd29167c..a677d5477e 100644 --- a/ietf/message/tests.py +++ b/ietf/message/tests.py @@ -11,10 +11,10 @@ import debug # pyflakes:ignore from ietf.group.factories import GroupFactory -from ietf.message.factories import SendQueueFactory +from ietf.message.factories import MessageFactory, SendQueueFactory from ietf.message.models import Message, SendQueue -from ietf.message.tasks import send_scheduled_mail_task -from ietf.message.utils import send_scheduled_message_from_send_queue +from ietf.message.tasks import send_scheduled_mail_task, retry_send_messages_by_pk_task +from ietf.message.utils import send_scheduled_message_from_send_queue, retry_send_messages from ietf.person.models import Person from ietf.utils.mail import outbox, send_mail_text, send_mail_message, get_payload_text from ietf.utils.test_utils import TestCase @@ -133,6 +133,44 @@ def test_send_mime_announcement(self): self.assertTrue(SendQueue.objects.get(id=q.id).sent_at) +class UtilsTests(TestCase): + @mock.patch("ietf.message.utils.send_mail_message") + def test_retry_send_messages(self, mock_send_mail_message): + sent_message = MessageFactory(sent=timezone.now()) + unsent_messages = MessageFactory.create_batch(2, sent=None) + + # Send the sent message and one of the unsent messages + retry_send_messages( + Message.objects.filter(pk__in=[ + sent_message.pk, + unsent_messages[0].pk, + ]), + resend=False, + ) + self.assertEqual(mock_send_mail_message.call_count, 1) + self.assertEqual( + mock_send_mail_message.call_args.args[1], + unsent_messages[0], + ) + + mock_send_mail_message.reset_mock() + # Once again, send the sent message and one of the unsent messages + # (we can use the same one because our mock prevented it from having + # its status updated to sent) + retry_send_messages( + Message.objects.filter(pk__in=[ + sent_message.pk, + unsent_messages[0].pk, + ]), + resend=True, + ) + self.assertEqual(mock_send_mail_message.call_count, 2) + self.assertCountEqual( + [call_args.args[1] for call_args in mock_send_mail_message.call_args_list], + [sent_message, unsent_messages[0]], + ) + + class TaskTests(TestCase): @mock.patch("ietf.message.tasks.log_smtp_exception") @mock.patch("ietf.message.tasks.send_scheduled_message_from_send_queue") @@ -150,3 +188,18 @@ def test_send_scheduled_mail_task(self, mock_send_message, mock_log_smtp_excepti self.assertEqual(mock_send_message.call_count, 1) self.assertEqual(mock_send_message.call_args[0], (not_yet_sent,)) self.assertTrue(mock_log_smtp_exception.called) + + @mock.patch("ietf.message.tasks.retry_send_messages") + def test_retry_send_messages_by_pk_task(self, mock_retry_send): + msgs = MessageFactory.create_batch(3) + MessageFactory() # an extra message that won't be resent + + retry_send_messages_by_pk_task([msg.pk for msg in msgs], resend=False) + called_with_messages = mock_retry_send.call_args.kwargs["messages"] + self.assertCountEqual(msgs, called_with_messages) + self.assertFalse(mock_retry_send.call_args.kwargs["resend"]) + + retry_send_messages_by_pk_task([msg.pk for msg in msgs], resend=True) + called_with_messages = mock_retry_send.call_args.kwargs["messages"] + self.assertCountEqual(msgs, called_with_messages) + self.assertTrue(mock_retry_send.call_args.kwargs["resend"]) diff --git a/ietf/message/utils.py b/ietf/message/utils.py index 2601eccab8..74448ca7c9 100644 --- a/ietf/message/utils.py +++ b/ietf/message/utils.py @@ -1,13 +1,17 @@ # Copyright The IETF Trust 2012-2020, All Rights Reserved # -*- coding: utf-8 -*- +import email +import email.utils +import re +import smtplib -import re, email - +from django.db.models import QuerySet from django.utils import timezone from django.utils.encoding import force_str -from ietf.utils.mail import send_mail_text, send_mail_mime +from ietf.utils import log +from ietf.utils.mail import send_mail_text, send_mail_mime, send_mail_message from ietf.message.models import Message first_dot_on_line_re = re.compile(r'^\.', re.MULTILINE) @@ -58,3 +62,29 @@ def send_scheduled_message_from_send_queue(queue_item): queue_item.message.sent = queue_item.sent_at queue_item.message.save() + + +def retry_send_messages(messages: QuerySet[Message], resend=False): + """Attempt delivery of Messages""" + if not resend: + # only include sent messages on explicit request + for already_sent in messages.filter(sent__isnull=False): + assert already_sent.sent is not None # appease mypy type checking + log.log( + f"retry_send_messages: skipping {already_sent.pk} " + f"(already sent {already_sent.sent.isoformat(timespec='milliseconds')})" + ) + messages = messages.filter(sent__isnull=True) + for msg in messages: + to = ",".join(a[1] for a in email.utils.getaddresses([msg.to])) + try: + send_mail_message(None, msg) + log.log( + f'retry_send_messages: ' + f'sent {msg.pk} {msg.frm} -> {to} "{msg.subject.strip()}"' + ) + except smtplib.SMTPException as e: + log.log( + f'retry_send_messages: ' + f'Failure {e}: {msg.pk} {msg.frm} -> {to} "{msg.subject.strip()}"' + ) diff --git a/ietf/settings.py b/ietf/settings.py index faee42237c..1fe5f48229 100644 --- a/ietf/settings.py +++ b/ietf/settings.py @@ -465,6 +465,7 @@ def skip_unreadable_post(record): 'drf_spectacular', 'drf_standardized_errors', 'rest_framework', + 'rangefilter', 'simple_history', 'tastypie', 'widget_tweaks', diff --git a/ietf/templates/admin/base.html b/ietf/templates/admin/base.html index 9ca7377a54..d48891dfc4 100644 --- a/ietf/templates/admin/base.html +++ b/ietf/templates/admin/base.html @@ -20,6 +20,7 @@ --header-color: var(--bs-secondary); --breadcrumbs-fg: var(--bs-secondary); --breadcrumbs-link-fg: var(--link-fg); + .calendar caption { background-color: var(--secondary);} } span.text-danger { color: var(--bs-danger); } diff --git a/requirements.txt b/requirements.txt index d8b6e0742f..8bd906c220 100644 --- a/requirements.txt +++ b/requirements.txt @@ -13,6 +13,7 @@ celery>=5.2.6 coverage>=4.5.4,<5.0 # Coverage 5.x moves from a json database to SQLite. Moving to 5.x will require substantial rewrites in ietf.utils.test_runner and ietf.release.views defusedxml>=0.7.1 # for TastyPie when using xml; not a declared dependency Django>4.2,<5 +django-admin-rangefilter>=0.13.2 django-analytical>=3.1.0 django-bootstrap5>=21.3 django-celery-beat>=2.3.0 From 48211414dfc992e4a51f0d2de9183eab5c44dad0 Mon Sep 17 00:00:00 2001 From: rjsparks <10996692+rjsparks@users.noreply.github.com> Date: Sat, 15 Mar 2025 07:58:06 +0000 Subject: [PATCH 027/405] ci: update base image target version to 20250315T0745 --- dev/build/Dockerfile | 2 +- dev/build/TARGET_BASE | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/dev/build/Dockerfile b/dev/build/Dockerfile index 852ba43c80..2b02a091c5 100644 --- a/dev/build/Dockerfile +++ b/dev/build/Dockerfile @@ -1,4 +1,4 @@ -FROM ghcr.io/ietf-tools/datatracker-app-base:20250128T1728 +FROM ghcr.io/ietf-tools/datatracker-app-base:20250315T0745 LABEL maintainer="IETF Tools Team " ENV DEBIAN_FRONTEND=noninteractive diff --git a/dev/build/TARGET_BASE b/dev/build/TARGET_BASE index fbc9426744..e6f490b168 100644 --- a/dev/build/TARGET_BASE +++ b/dev/build/TARGET_BASE @@ -1 +1 @@ -20250128T1728 +20250315T0745 From 200d2bd164224694c0524e734ea6e46b62f3998a Mon Sep 17 00:00:00 2001 From: Russ Housley Date: Sat, 15 Mar 2025 04:23:35 -0400 Subject: [PATCH 028/405] fix(review_info template): display correct date for I-D to be reviewed --- ietf/templates/doc/review/request_info.html | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/ietf/templates/doc/review/request_info.html b/ietf/templates/doc/review/request_info.html index ee46916b43..9ad126d59e 100644 --- a/ietf/templates/doc/review/request_info.html +++ b/ietf/templates/doc/review/request_info.html @@ -96,7 +96,7 @@ {% endif %} - {% if doc.time %} + {% if review_req.doc.time %} @@ -104,7 +104,10 @@ I-D last updated - {{ doc.time|date:"Y-m-d" }} + {{ review_req.doc.time|date:"Y-m-d" }} + {% if review_req.doc.pub_date %} + (Latest revision {{ review_req.doc.pub_date|date:"Y-m-d" }}) + {% endif %} {% endif %} From 603938a9b6438ce78caea13272e61c0e15546478 Mon Sep 17 00:00:00 2001 From: Rich Salz Date: Sat, 15 Mar 2025 05:19:12 -0400 Subject: [PATCH 029/405] chore: Add IETF to "Last Call Expired" email Subject (#8675) No need to change the test. Fixes: #8526 --- ietf/doc/mails.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ietf/doc/mails.py b/ietf/doc/mails.py index c1e2074bc0..ddecbb6b54 100644 --- a/ietf/doc/mails.py +++ b/ietf/doc/mails.py @@ -568,7 +568,7 @@ def email_last_call_expired(doc): send_mail(None, addrs.to, "DraftTracker Mail System ", - "Last Call Expired: %s" % doc.file_tag(), + "IETF Last Call Expired: %s" % doc.file_tag(), "doc/mail/change_notice.txt", dict(text=text, doc=doc, From 9eb5b2fa8e25c68ec927b82008d61c8e38be354c Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Sat, 15 Mar 2025 05:23:16 -0400 Subject: [PATCH 030/405] fix(agenda): always render session row if rendering a new date row when filtering (#8672) --- client/agenda/AgendaScheduleList.vue | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/client/agenda/AgendaScheduleList.vue b/client/agenda/AgendaScheduleList.vue index 905677b4da..ab0f6e0184 100644 --- a/client/agenda/AgendaScheduleList.vue +++ b/client/agenda/AgendaScheduleList.vue @@ -253,6 +253,7 @@ const meetingEvents = computed(() => { // -> Add date row const itemDate = DateTime.fromISO(item.adjustedStartDate) + let willRenderDateRow = false if (itemDate.toISODate() !== acc.lastDate) { acc.result.push({ id: item.id, @@ -262,12 +263,13 @@ const meetingEvents = computed(() => { date: itemDate.toLocaleString(DateTime.DATE_HUGE), cssClasses: 'agenda-table-display-day' }) + willRenderDateRow = true } acc.lastDate = itemDate.toISODate() // -> Add session header row const typeName = `${item.type}-${item.slotName}` - if (item.type === 'regular' && acc.lastTypeName !== typeName) { + if (item.type === 'regular' && (acc.lastTypeName !== typeName || willRenderDateRow)) { acc.result.push({ key: `sesshd-${item.id}`, displayType: 'session-head', From 4bf1b938724abb775a5e9532740934dfa9a4dc40 Mon Sep 17 00:00:00 2001 From: Russ Housley Date: Sat, 15 Mar 2025 05:38:23 -0400 Subject: [PATCH 031/405] fix: Set Review Type Name for IETF Last Call (#8678) --- ietf/name/fixtures/names.json | 4 ++-- ietf/name/migrations/0015_last_call_name.py | 22 +++++++++++++++++++++ 2 files changed, 24 insertions(+), 2 deletions(-) create mode 100644 ietf/name/migrations/0015_last_call_name.py diff --git a/ietf/name/fixtures/names.json b/ietf/name/fixtures/names.json index 8f1262b4cf..96273fbc35 100644 --- a/ietf/name/fixtures/names.json +++ b/ietf/name/fixtures/names.json @@ -2628,7 +2628,7 @@ "used": true }, "model": "doc.state", - "pk": 182 + "pk": 181 }, { "fields": { @@ -13210,7 +13210,7 @@ { "fields": { "desc": "", - "name": "Last Call", + "name": "IETF Last Call", "order": 2, "used": true }, diff --git a/ietf/name/migrations/0015_last_call_name.py b/ietf/name/migrations/0015_last_call_name.py new file mode 100644 index 0000000000..ac210a274f --- /dev/null +++ b/ietf/name/migrations/0015_last_call_name.py @@ -0,0 +1,22 @@ +# Copyright 2025, IETF Trust + +from django.db import migrations + + +def forward(apps, schema_editor): + ReviewTypeName = apps.get_model("name", "ReviewTypeName") + ReviewTypeName.objects.filter(slug="lc").update(name="IETF Last Call") + +def reverse(apps, schema_editor): + ReviewTypeName = apps.get_model("name", "ReviewTypeName") + ReviewTypeName.objects.filter(slug="lc").update(name="Last Call") + +class Migration(migrations.Migration): + + dependencies = [ + ("name", "0014_change_legacy_stream_desc"), + ] + + operations = [ + migrations.RunPython(forward, reverse) + ] From 44bb285836e032acb15d38febc82f4495a6dd9e8 Mon Sep 17 00:00:00 2001 From: Jim Fenton Date: Sun, 16 Mar 2025 14:37:56 +0700 Subject: [PATCH 032/405] fix: use correct variable to decide to show meeting notes (#8674) * fix: use correct variable to decide to show session notes * Correct number of lines for selftests * fix: adjust test to match --------- Co-authored-by: Robert Sparks --- ietf/meeting/tests_views.py | 28 +++++++++++-------- .../meeting/session_details_panel.html | 2 +- 2 files changed, 17 insertions(+), 13 deletions(-) diff --git a/ietf/meeting/tests_views.py b/ietf/meeting/tests_views.py index 519f5f7c2d..59d7e49f7f 100644 --- a/ietf/meeting/tests_views.py +++ b/ietf/meeting/tests_views.py @@ -424,37 +424,41 @@ def test_meeting_agenda(self): self.assertEqual(r.status_code, 200) def test_session_recordings_via_factories(self): - session = SessionFactory(meeting__type_id="ietf", meeting__date=date_today()-datetime.timedelta(days=180)) + session = SessionFactory(meeting__type_id="ietf", meeting__date=date_today()-datetime.timedelta(days=180), meeting__number=str(random.randint(108,150))) self.assertEqual(session.meetecho_recording_name, "") self.assertEqual(len(session.recordings()), 0) url = urlreverse("ietf.meeting.views.session_details", kwargs=dict(num=session.meeting.number, acronym=session.group.acronym)) r = self.client.get(url) q = PyQuery(r.content) # debug.show("q(f'#notes_and_recordings_{session.pk}')") - self.assertEqual(len(q(f"#notes_and_recordings_{session.pk} tr")), 1) - link = q(f"#notes_and_recordings_{session.pk} tr a") - self.assertEqual(len(link), 1) - self.assertEqual(link[0].attrib['href'], str(session.session_recording_url())) + self.assertEqual(len(q(f"#notes_and_recordings_{session.pk} tr")), 2) + links = q(f"#notes_and_recordings_{session.pk} tr a") + self.assertEqual(len(links), 2) + self.assertEqual(links[0].attrib['href'], str(session.notes_url())) + self.assertEqual(links[1].attrib['href'], str(session.session_recording_url())) session.meetecho_recording_name = 'my_test_session_name' session.save() r = self.client.get(url) q = PyQuery(r.content) - self.assertEqual(len(q(f"#notes_and_recordings_{session.pk} tr")), 1) + self.assertEqual(len(q(f"#notes_and_recordings_{session.pk} tr")), 2) links = q(f"#notes_and_recordings_{session.pk} tr a") - self.assertEqual(len(links), 1) - self.assertEqual(links[0].attrib['href'], session.session_recording_url()) + self.assertEqual(len(links), 2) + self.assertEqual(links[0].attrib['href'], str(session.notes_url())) + self.assertEqual(links[1].attrib['href'], str(session.session_recording_url())) new_recording_url = "https://www.youtube.com/watch?v=jNQXAC9IVRw" new_recording_title = "Me at the zoo" create_recording(session, new_recording_url, new_recording_title) r = self.client.get(url) q = PyQuery(r.content) - self.assertEqual(len(q(f"#notes_and_recordings_{session.pk} tr")), 2) + self.assertEqual(len(q(f"#notes_and_recordings_{session.pk} tr")), 3) links = q(f"#notes_and_recordings_{session.pk} tr a") - self.assertEqual(len(links), 2) - self.assertEqual(links[0].attrib['href'], new_recording_url) - self.assertIn(new_recording_title, links[0].text_content()) + self.assertEqual(len(links), 3) + self.assertEqual(links[0].attrib['href'], str(session.notes_url())) + self.assertEqual(links[1].attrib['href'], new_recording_url) + self.assertIn(new_recording_title, links[1].text_content()) + self.assertEqual(links[2].attrib['href'], str(session.session_recording_url())) #debug.show("q(f'#notes_and_recordings_{session_pk}')") def test_delete_recordings(self): diff --git a/ietf/templates/meeting/session_details_panel.html b/ietf/templates/meeting/session_details_panel.html index 9b7a192f05..87d9e3d672 100644 --- a/ietf/templates/meeting/session_details_panel.html +++ b/ietf/templates/meeting/session_details_panel.html @@ -310,7 +310,7 @@

Notes and recordings

- {% if session.uses_notes %} + {% if meeting.uses_notes %} {% for role in person.role_set.all|active_roles %} - + {% for day in time_slices %} {% endfor %} {% endif %} From 24101bb8ca85cfb3a5c47d7f9ed283cc6fb5bc0e Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Thu, 9 Oct 2025 13:49:40 -0500 Subject: [PATCH 216/405] feat: json snapshots of ipr statements (#9684) --- ietf/ipr/tests.py | 57 +++++++++++++++++++++++++++++++++++++++++++++++ ietf/ipr/urls.py | 1 + ietf/ipr/utils.py | 24 ++++++++++++++++++-- ietf/ipr/views.py | 7 +++++- 4 files changed, 86 insertions(+), 3 deletions(-) diff --git a/ietf/ipr/tests.py b/ietf/ipr/tests.py index 74fa540126..4146fbd4c1 100644 --- a/ietf/ipr/tests.py +++ b/ietf/ipr/tests.py @@ -3,6 +3,7 @@ import datetime +import json from unittest import mock import re @@ -15,6 +16,8 @@ from django.urls import reverse as urlreverse from django.utils import timezone +from django.db.models import Max + import debug # pyflakes:ignore from ietf.api.views import EmailIngestionError @@ -45,6 +48,7 @@ from ietf.mailtrigger.utils import gather_address_lists from ietf.message.factories import MessageFactory from ietf.message.models import Message +from ietf.person.factories import PersonFactory from ietf.utils.mail import outbox, empty_outbox, get_payload_text from ietf.utils.test_utils import TestCase, login_testing_unauthorized from ietf.utils.text import text_to_dict @@ -1113,3 +1117,56 @@ def test_patent_details_required_unless_blanket(self): val = self.data.pop(pf) self.assertTrue(HolderIprDisclosureForm(data=self.data).is_valid()) self.data[pf] = val + +class JsonSnapshotTests(TestCase): + def test_json_snapshot(self): + h = HolderIprDisclosureFactory() + url = urlreverse("ietf.ipr.views.json_snapshot", kwargs=dict(id=h.id)) + login_testing_unauthorized(self, "secretary", url) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + dump = json.loads(r.content) + self.assertCountEqual( + [o["model"] for o in dump], + ["ipr.holderiprdisclosure", "ipr.iprdisclosurebase", "person.person"], + ) + h.docs.add(WgRfcFactory()) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + dump = json.loads(r.content) + self.assertCountEqual( + [o["model"] for o in dump], + [ + "ipr.holderiprdisclosure", + "ipr.iprdisclosurebase", + "ipr.iprdocrel", + "person.person", + ], + ) + IprEventFactory( + disclosure=h, + message=MessageFactory(by=PersonFactory()), + in_reply_to=MessageFactory(), + ) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + dump = json.loads(r.content) + self.assertCountEqual( + [o["model"] for o in dump], + [ + "ipr.holderiprdisclosure", + "ipr.iprdisclosurebase", + "ipr.iprdocrel", + "ipr.iprevent", + "message.message", + "message.message", + "person.person", + "person.person", + "person.person", + "person.person", + ], + ) + no_such_ipr_id = IprDisclosureBase.objects.aggregate(Max("id"))["id__max"] + 1 + url = urlreverse("ietf.ipr.views.json_snapshot", kwargs=dict(id=no_such_ipr_id)) + r = self.client.get(url) + self.assertEqual(r.status_code, 404) diff --git a/ietf/ipr/urls.py b/ietf/ipr/urls.py index 84ed04a66b..2c8a26c624 100644 --- a/ietf/ipr/urls.py +++ b/ietf/ipr/urls.py @@ -21,6 +21,7 @@ url(r'^(?P\d+)/notify/(?Pupdate|posted)/$', views.notify), url(r'^(?P\d+)/post/$', views.post), url(r'^(?P\d+)/state/$', views.state), + url(r'^(?P\d+)/json-snapshot/$', views.json_snapshot), url(r'^update/$', RedirectView.as_view(url=reverse_lazy('ietf.ipr.views.showlist'), permanent=True)), url(r'^update/(?P\d+)/$', views.update), url(r'^new-(?P<_type>(specific|generic|general|third-party))/$', views.new), diff --git a/ietf/ipr/utils.py b/ietf/ipr/utils.py index 7e569a1d1d..bcbb052260 100644 --- a/ietf/ipr/utils.py +++ b/ietf/ipr/utils.py @@ -1,11 +1,16 @@ -# Copyright The IETF Trust 2014-2020, All Rights Reserved +# Copyright The IETF Trust 2014-2025, All Rights Reserved # -*- coding: utf-8 -*- +import json +import debug # pyflakes:ignore + from textwrap import dedent +from django.core import serializers + from ietf.ipr.mail import process_response_email, UndeliverableIprResponseError -import debug # pyflakes:ignore +from ietf.ipr.models import IprDocRel def get_genitive(name): """Return the genitive form of name""" @@ -85,3 +90,18 @@ def ingest_response_email(message: bytes): email_original_message=message, email_attach_traceback=True, ) from err + +def json_dump_disclosure(disclosure): + objs = set() + objs.add(disclosure) + objs.add(disclosure.iprdisclosurebase_ptr) + objs.add(disclosure.by) + objs.update(IprDocRel.objects.filter(disclosure=disclosure)) + objs.update(disclosure.iprevent_set.all()) + objs.update([i.by for i in disclosure.iprevent_set.all()]) + objs.update([i.message for i in disclosure.iprevent_set.all() if i.message ]) + objs.update([i.message.by for i in disclosure.iprevent_set.all() if i.message ]) + objs.update([i.in_reply_to for i in disclosure.iprevent_set.all() if i.in_reply_to ]) + objs.update([i.in_reply_to.by for i in disclosure.iprevent_set.all() if i.in_reply_to ]) + objs = sorted(list(objs),key=lambda o:o.__class__.__name__) + return json.dumps(json.loads(serializers.serialize("json",objs)),indent=4) diff --git a/ietf/ipr/views.py b/ietf/ipr/views.py index 08979a3972..8eb3affbc0 100644 --- a/ietf/ipr/views.py +++ b/ietf/ipr/views.py @@ -32,7 +32,7 @@ NonDocSpecificIprDisclosure, IprDocRel, RelatedIpr,IprEvent) from ietf.ipr.utils import (get_genitive, get_ipr_summary, - iprs_from_docs, related_docs) + iprs_from_docs, json_dump_disclosure, related_docs) from ietf.mailtrigger.utils import gather_address_lists from ietf.message.models import Message from ietf.message.utils import infer_message @@ -901,3 +901,8 @@ def update(request, id): child = ipr.get_child() type = class_to_type[child.__class__.__name__] return new(request, type, updates=id) + +@role_required("Secretariat") +def json_snapshot(request, id): + obj = get_object_or_404(IprDisclosureBase,id=id).get_child() + return HttpResponse(json_dump_disclosure(obj),content_type="application/json") From 9d2fa7a32c6dae35de28c3a5f62ca5d762baef3c Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Thu, 9 Oct 2025 17:04:13 -0500 Subject: [PATCH 217/405] feat: track deleted ipr disclosures (#9691) * feat: track deleted ipr disclosures * fix: unique constraint on removed_id --- ietf/ipr/admin.py | 23 ++++++++++++--- .../migrations/0005_removediprdisclosure.py | 28 +++++++++++++++++++ .../migrations/0006_already_removed_ipr.py | 24 ++++++++++++++++ ietf/ipr/models.py | 6 +++- ietf/ipr/resources.py | 19 +++++++++++-- ietf/ipr/tests.py | 22 ++++++++++++++- ietf/ipr/views.py | 11 ++++++-- ietf/templates/ipr/deleted.html | 16 +++++++++++ 8 files changed, 139 insertions(+), 10 deletions(-) create mode 100644 ietf/ipr/migrations/0005_removediprdisclosure.py create mode 100644 ietf/ipr/migrations/0006_already_removed_ipr.py create mode 100644 ietf/templates/ipr/deleted.html diff --git a/ietf/ipr/admin.py b/ietf/ipr/admin.py index afc1952d72..1a8a908dcd 100644 --- a/ietf/ipr/admin.py +++ b/ietf/ipr/admin.py @@ -1,13 +1,22 @@ -# Copyright The IETF Trust 2010-2020, All Rights Reserved +# Copyright The IETF Trust 2010-2025, All Rights Reserved # -*- coding: utf-8 -*- from django import forms from django.contrib import admin from ietf.name.models import DocRelationshipName -from ietf.ipr.models import (IprDisclosureBase, IprDocRel, IprEvent, - RelatedIpr, HolderIprDisclosure, ThirdPartyIprDisclosure, GenericIprDisclosure, - NonDocSpecificIprDisclosure, LegacyMigrationIprEvent) +from ietf.ipr.models import ( + IprDisclosureBase, + IprDocRel, + IprEvent, + RelatedIpr, + HolderIprDisclosure, + RemovedIprDisclosure, + ThirdPartyIprDisclosure, + GenericIprDisclosure, + NonDocSpecificIprDisclosure, + LegacyMigrationIprEvent, +) # ------------------------------------------------------ # ModelAdmins @@ -110,3 +119,9 @@ class LegacyMigrationIprEventAdmin(admin.ModelAdmin): list_filter = ['time', 'type', 'response_due'] raw_id_fields = ['by', 'disclosure', 'message', 'in_reply_to'] admin.site.register(LegacyMigrationIprEvent, LegacyMigrationIprEventAdmin) + +class RemovedIprDisclosureAdmin(admin.ModelAdmin): + pass + + +admin.site.register(RemovedIprDisclosure, RemovedIprDisclosureAdmin) diff --git a/ietf/ipr/migrations/0005_removediprdisclosure.py b/ietf/ipr/migrations/0005_removediprdisclosure.py new file mode 100644 index 0000000000..400a264579 --- /dev/null +++ b/ietf/ipr/migrations/0005_removediprdisclosure.py @@ -0,0 +1,28 @@ +# Copyright The IETF Trust 2025, All Rights Reserved + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("ipr", "0004_holderiprdisclosure_is_blanket_disclosure"), + ] + + operations = [ + migrations.CreateModel( + name="RemovedIprDisclosure", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("removed_id", models.PositiveBigIntegerField(unique=True)), + ("reason", models.TextField()), + ], + ), + ] diff --git a/ietf/ipr/migrations/0006_already_removed_ipr.py b/ietf/ipr/migrations/0006_already_removed_ipr.py new file mode 100644 index 0000000000..0e2dbc63eb --- /dev/null +++ b/ietf/ipr/migrations/0006_already_removed_ipr.py @@ -0,0 +1,24 @@ +# Copyright The IETF Trust 2025, All Rights Reserved +from django.db import migrations + + +def forward(apps, schema_editor): + RemovedIprDisclosure = apps.get_model("ipr", "RemovedIprDisclosure") + for id in (6544, 6068): + RemovedIprDisclosure.objects.create( + removed_id=id, + reason="This IPR disclosure was removed as objectively false.", + ) + + +def reverse(apps, schema_editor): + RemovedIprDisclosure = apps.get_model("ipr", "RemovedIprDisclosure") + RemovedIprDisclosure.objects.all().delete() + + +class Migration(migrations.Migration): + dependencies = [ + ("ipr", "0005_removediprdisclosure"), + ] + + operations = [migrations.RunPython(forward, reverse)] diff --git a/ietf/ipr/models.py b/ietf/ipr/models.py index 2d81eb4b42..ea148c2704 100644 --- a/ietf/ipr/models.py +++ b/ietf/ipr/models.py @@ -1,4 +1,4 @@ -# Copyright The IETF Trust 2007-2023, All Rights Reserved +# Copyright The IETF Trust 2007-2025, All Rights Reserved # -*- coding: utf-8 -*- @@ -270,3 +270,7 @@ class LegacyMigrationIprEvent(IprEvent): """A subclass of IprEvent specifically for capturing contents of legacy_url_0, the text of a disclosure submitted by email""" pass + +class RemovedIprDisclosure(models.Model): + removed_id = models.PositiveBigIntegerField(unique=True) + reason = models.TextField() diff --git a/ietf/ipr/resources.py b/ietf/ipr/resources.py index 0d8421cdec..c4d2c436e6 100644 --- a/ietf/ipr/resources.py +++ b/ietf/ipr/resources.py @@ -1,4 +1,4 @@ -# Copyright The IETF Trust 2015-2019, All Rights Reserved +# Copyright The IETF Trust 2015-2025, All Rights Reserved # -*- coding: utf-8 -*- # Autogenerated by the mkresources management command 2015-03-21 14:05 PDT @@ -11,7 +11,7 @@ from ietf import api -from ietf.ipr.models import ( IprDisclosureBase, IprDocRel, HolderIprDisclosure, ThirdPartyIprDisclosure, +from ietf.ipr.models import ( IprDisclosureBase, IprDocRel, HolderIprDisclosure, RemovedIprDisclosure, ThirdPartyIprDisclosure, RelatedIpr, NonDocSpecificIprDisclosure, GenericIprDisclosure, IprEvent, LegacyMigrationIprEvent ) from ietf.person.resources import PersonResource @@ -295,3 +295,18 @@ class Meta: } api.ipr.register(LegacyMigrationIprEventResource()) + + +class RemovedIprDisclosureResource(ModelResource): + class Meta: + queryset = RemovedIprDisclosure.objects.all() + serializer = api.Serializer() + cache = SimpleCache() + #resource_name = 'removediprdisclosure' + ordering = ['id', ] + filtering = { + "id": ALL, + "removed_id": ALL, + "reason": ALL, + } +api.ipr.register(RemovedIprDisclosureResource()) diff --git a/ietf/ipr/tests.py b/ietf/ipr/tests.py index 4146fbd4c1..53a599e2de 100644 --- a/ietf/ipr/tests.py +++ b/ietf/ipr/tests.py @@ -41,7 +41,7 @@ from ietf.ipr.forms import DraftForm, HolderIprDisclosureForm from ietf.ipr.mail import (process_response_email, get_reply_to, get_update_submitter_emails, get_pseudo_submitter, get_holders, get_update_cc_addrs, UndeliverableIprResponseError) -from ietf.ipr.models import (IprDisclosureBase, GenericIprDisclosure, HolderIprDisclosure, +from ietf.ipr.models import (IprDisclosureBase, GenericIprDisclosure, HolderIprDisclosure, RemovedIprDisclosure, ThirdPartyIprDisclosure, IprEvent) from ietf.ipr.templatetags.ipr_filters import no_revisions_message from ietf.ipr.utils import get_genitive, get_ipr_summary, ingest_response_email @@ -129,6 +129,26 @@ def test_showlist(self): self.assertContains(r, "removed as objectively false") ipr.delete() + def test_show_delete(self): + ipr = HolderIprDisclosureFactory() + removed = RemovedIprDisclosure.objects.create( + removed_id=ipr.pk, reason="Removed for reasons" + ) + url = urlreverse("ietf.ipr.views.show", kwargs=dict(id=removed.removed_id)) + r = self.client.get(url) + self.assertContains(r, "Removed for reasons") + q = PyQuery(r.content) + self.assertEqual(len(q("#deletion_warning")), 0) + self.client.login(username="secretary", password="secretary+password") + r = self.client.get(url) + self.assertContains(r, "Removed for reasons") + q = PyQuery(r.content) + self.assertEqual(len(q("#deletion_warning")), 1) + ipr.delete() + r = self.client.get(url) + self.assertContains(r, "Removed for reasons") + q = PyQuery(r.content) + self.assertEqual(len(q("#deletion_warning")), 0) def test_show_posted(self): ipr = HolderIprDisclosureFactory() diff --git a/ietf/ipr/views.py b/ietf/ipr/views.py index 8eb3affbc0..665c99dc43 100644 --- a/ietf/ipr/views.py +++ b/ietf/ipr/views.py @@ -28,7 +28,7 @@ AddCommentForm, AddEmailForm, NotifyForm, StateForm, NonDocSpecificIprDisclosureForm, GenericIprDisclosureForm) from ietf.ipr.models import (IprDisclosureStateName, IprDisclosureBase, - HolderIprDisclosure, GenericIprDisclosure, ThirdPartyIprDisclosure, + HolderIprDisclosure, GenericIprDisclosure, RemovedIprDisclosure, ThirdPartyIprDisclosure, NonDocSpecificIprDisclosure, IprDocRel, RelatedIpr,IprEvent) from ietf.ipr.utils import (get_genitive, get_ipr_summary, @@ -817,7 +817,14 @@ def get_details_tabs(ipr, selected): def show(request, id): """View of individual declaration""" - ipr = get_object_or_404(IprDisclosureBase, id=id).get_child() + ipr = IprDisclosureBase.objects.filter(id=id) + removed = RemovedIprDisclosure.objects.filter(removed_id=id) + if removed.exists(): + return render(request, "ipr/deleted.html", {"removed": removed.get(), "ipr": ipr}) + if not ipr.exists(): + raise Http404 + else: + ipr = ipr.get().get_child() if not has_role(request.user, 'Secretariat'): if ipr.state.slug in ['removed', 'removed_objfalse']: return render(request, "ipr/removed.html", { diff --git a/ietf/templates/ipr/deleted.html b/ietf/templates/ipr/deleted.html new file mode 100644 index 0000000000..24f696ebca --- /dev/null +++ b/ietf/templates/ipr/deleted.html @@ -0,0 +1,16 @@ +{% extends "base.html" %} +{# Copyright The IETF Trust 2015-2023, All Rights Reserved #} +{% load ietf_filters origin %} +{% block title %}Removed IPR Disclosure{% endblock %} +{% block content %} + {% origin %} +

Removed IPR disclosure

+

+ {{ removed.reason }} +

+ {% if user|has_role:"Secretariat" and ipr.exists %} +

+ This disclosure has not yet been deleted and parts of its content is available through, e.g, the history view and the /api/v1 views. +

+ {% endif %} + {% endblock %} \ No newline at end of file From ed6b061cfe4279328dbb7b914f5f7f76644521f9 Mon Sep 17 00:00:00 2001 From: Ryan Cross Date: Fri, 10 Oct 2025 05:43:24 -0700 Subject: [PATCH 218/405] chore: merge feat/sreq to main (#9697) * refactor: move session request tool to ietf.meeting and restyle (#9617) * refactor: move session request tool to ietf.meeting and restyle to match standard Datatracker * fix: add redirect for old session request url * fix: move stripe javascript to js file * fix: update copyright lines for modified files * fix: rename javascripts and expand redirects * fix: don't show inactive constraints label when there are none (#9680) * chore: remove unused utility * fix: add test for secr main menu page (#9693) * fix: don't show inactive constraints label when there are none * fix: add test for secr main menu page --------- Co-authored-by: Jennifer Richards Co-authored-by: Robert Sparks --- ietf/meeting/forms.py | 333 +++++++- .../templatetags/ams_filters.py | 2 + .../tests_session_requests.py} | 314 ++++--- ietf/meeting/tests_views.py | 4 +- ietf/meeting/urls.py | 14 +- .../views_session_request.py} | 803 ++++++++++-------- ietf/secr/meetings/views.py | 4 +- ietf/secr/sreq/__init__.py | 0 ietf/secr/sreq/forms.py | 333 -------- ietf/secr/sreq/templatetags/__init__.py | 0 ietf/secr/sreq/urls.py | 20 - ietf/secr/telechat/tests.py | 21 + ietf/secr/templates/includes/activities.html | 23 - .../includes/buttons_next_cancel.html | 6 - .../includes/buttons_submit_cancel.html | 6 - .../templates/includes/sessions_footer.html | 5 - .../includes/sessions_request_form.html | 130 --- .../includes/sessions_request_view.html | 73 -- .../sessions_request_view_formset.html | 32 - .../sessions_request_view_session_set.html | 32 - ietf/secr/templates/index.html | 6 +- ietf/secr/templates/sreq/confirm.html | 57 -- ietf/secr/templates/sreq/edit.html | 39 - ietf/secr/templates/sreq/locked.html | 30 - ietf/secr/templates/sreq/main.html | 65 -- ietf/secr/templates/sreq/new.html | 43 - ietf/secr/templates/sreq/tool_status.html | 42 - ietf/secr/templates/sreq/view.html | 55 -- ietf/secr/urls.py | 13 +- ietf/secr/utils/group.py | 50 -- ietf/settings.py | 1 - ietf/static/js/custom_striped.js | 16 + ietf/{secr => }/static/js/session_form.js | 2 +- .../js/session_request.js} | 12 +- ietf/templates/base/menu.html | 4 +- ietf/templates/group/meetings-row.html | 3 +- ietf/templates/group/meetings.html | 3 +- .../meeting/important_dates_for_meeting.ics | 5 +- ietf/templates/meeting/requests.html | 4 +- .../session_approval_notification.txt | 5 +- .../meeting}/session_cancel_notification.txt | 1 + .../meeting/session_details_form.html | 64 +- .../session_not_meeting_notification.txt} | 1 + .../meeting/session_request_confirm.html | 38 + .../meeting/session_request_form.html | 206 +++++ .../meeting/session_request_info.txt | 26 + .../meeting/session_request_list.html | 65 ++ .../meeting/session_request_locked.html | 21 + .../meeting}/session_request_notification.txt | 3 +- .../meeting/session_request_status.html | 28 + .../meeting/session_request_view.html | 59 ++ .../meeting/session_request_view_formset.html | 49 ++ .../session_request_view_session_set.html | 47 + .../meeting/session_request_view_table.html | 146 ++++ package.json | 5 +- 55 files changed, 1728 insertions(+), 1641 deletions(-) rename ietf/{secr/sreq => meeting}/templatetags/ams_filters.py (96%) rename ietf/{secr/sreq/tests.py => meeting/tests_session_requests.py} (84%) rename ietf/{secr/sreq/views.py => meeting/views_session_request.py} (80%) delete mode 100644 ietf/secr/sreq/__init__.py delete mode 100644 ietf/secr/sreq/forms.py delete mode 100644 ietf/secr/sreq/templatetags/__init__.py delete mode 100644 ietf/secr/sreq/urls.py delete mode 100644 ietf/secr/templates/includes/activities.html delete mode 100644 ietf/secr/templates/includes/buttons_next_cancel.html delete mode 100644 ietf/secr/templates/includes/buttons_submit_cancel.html delete mode 100755 ietf/secr/templates/includes/sessions_footer.html delete mode 100755 ietf/secr/templates/includes/sessions_request_form.html delete mode 100644 ietf/secr/templates/includes/sessions_request_view.html delete mode 100644 ietf/secr/templates/includes/sessions_request_view_formset.html delete mode 100644 ietf/secr/templates/includes/sessions_request_view_session_set.html delete mode 100755 ietf/secr/templates/sreq/confirm.html delete mode 100755 ietf/secr/templates/sreq/edit.html delete mode 100755 ietf/secr/templates/sreq/locked.html delete mode 100755 ietf/secr/templates/sreq/main.html delete mode 100755 ietf/secr/templates/sreq/new.html delete mode 100755 ietf/secr/templates/sreq/tool_status.html delete mode 100644 ietf/secr/templates/sreq/view.html delete mode 100644 ietf/secr/utils/group.py create mode 100644 ietf/static/js/custom_striped.js rename ietf/{secr => }/static/js/session_form.js (92%) rename ietf/{secr/static/js/sessions.js => static/js/session_request.js} (90%) rename ietf/{secr/templates/sreq => templates/meeting}/session_approval_notification.txt (56%) rename ietf/{secr/templates/sreq => templates/meeting}/session_cancel_notification.txt (71%) rename ietf/{secr/templates/sreq/not_meeting_notification.txt => templates/meeting/session_not_meeting_notification.txt} (83%) create mode 100644 ietf/templates/meeting/session_request_confirm.html create mode 100644 ietf/templates/meeting/session_request_form.html create mode 100644 ietf/templates/meeting/session_request_info.txt create mode 100644 ietf/templates/meeting/session_request_list.html create mode 100644 ietf/templates/meeting/session_request_locked.html rename ietf/{secr/templates/sreq => templates/meeting}/session_request_notification.txt (56%) create mode 100644 ietf/templates/meeting/session_request_status.html create mode 100644 ietf/templates/meeting/session_request_view.html create mode 100644 ietf/templates/meeting/session_request_view_formset.html create mode 100644 ietf/templates/meeting/session_request_view_session_set.html create mode 100644 ietf/templates/meeting/session_request_view_table.html diff --git a/ietf/meeting/forms.py b/ietf/meeting/forms.py index b6b1a1591f..e5b1697f86 100644 --- a/ietf/meeting/forms.py +++ b/ietf/meeting/forms.py @@ -1,4 +1,4 @@ -# Copyright The IETF Trust 2016-2023, All Rights Reserved +# Copyright The IETF Trust 2016-2025, All Rights Reserved # -*- coding: utf-8 -*- @@ -15,19 +15,24 @@ from django.core import validators from django.core.exceptions import ValidationError from django.forms import BaseInlineFormSet +from django.template.defaultfilters import pluralize from django.utils.functional import cached_property +from django.utils.safestring import mark_safe import debug # pyflakes:ignore from ietf.doc.models import Document, State, NewRevisionDocEvent from ietf.group.models import Group from ietf.group.utils import groups_managed_by -from ietf.meeting.models import Session, Meeting, Schedule, COUNTRIES, TIMEZONES, TimeSlot, Room +from ietf.meeting.models import (Session, Meeting, Schedule, COUNTRIES, TIMEZONES, TimeSlot, Room, + Constraint, ResourceAssociation) from ietf.meeting.helpers import get_next_interim_number, make_materials_directories from ietf.meeting.helpers import is_interim_meeting_approved, get_next_agenda_name from ietf.message.models import Message -from ietf.name.models import TimeSlotTypeName, SessionPurposeName +from ietf.name.models import TimeSlotTypeName, SessionPurposeName, TimerangeName, ConstraintName +from ietf.person.fields import SearchablePersonsField from ietf.person.models import Person +from ietf.utils import log from ietf.utils.fields import ( DatepickerDateField, DatepickerSplitDateTimeWidget, @@ -35,9 +40,14 @@ ModelMultipleChoiceField, MultiEmailField, ) +from ietf.utils.html import clean_text_field from ietf.utils.validators import ( validate_file_size, validate_mime_type, validate_file_extension, validate_no_html_frame) +NUM_SESSION_CHOICES = (('', '--Please select'), ('1', '1'), ('2', '2')) +SESSION_TIME_RELATION_CHOICES = (('', 'No preference'),) + Constraint.TIME_RELATION_CHOICES +JOINT_FOR_SESSION_CHOICES = (('1', 'First session'), ('2', 'Second session'), ('3', 'Third session'), ) + # ------------------------------------------------- # Helpers # ------------------------------------------------- @@ -74,6 +84,27 @@ def duration_string(duration): return string +def allowed_conflicting_groups(): + return Group.objects.filter( + type__in=['wg', 'ag', 'rg', 'rag', 'program', 'edwg'], + state__in=['bof', 'proposed', 'active']) + + +def check_conflict(groups, source_group): + ''' + Takes a string which is a list of group acronyms. Checks that they are all active groups + ''' + # convert to python list (allow space or comma separated lists) + items = groups.replace(',', ' ').split() + active_groups = allowed_conflicting_groups() + for group in items: + if group == source_group.acronym: + raise forms.ValidationError("Cannot declare a conflict with the same group: %s" % group) + + if not active_groups.filter(acronym=group): + raise forms.ValidationError("Invalid or inactive group acronym: %s" % group) + + # ------------------------------------------------- # Forms # ------------------------------------------------- @@ -753,6 +784,9 @@ def __init__(self, group, *args, **kwargs): self.fields['purpose'].queryset = SessionPurposeName.objects.filter(pk__in=session_purposes) if not group.features.acts_like_wg: self.fields['requested_duration'].durations = [datetime.timedelta(minutes=m) for m in range(30, 241, 30)] + # add bootstrap classes + self.fields['purpose'].widget.attrs.update({'class': 'form-select'}) + self.fields['type'].widget.attrs.update({'class': 'form-select', 'aria-label': 'session type'}) class Meta: model = Session @@ -837,3 +871,296 @@ def sessiondetailsformset_factory(min_num=1, max_num=3): max_num=max_num, extra=max_num, # only creates up to max_num total ) + + +class SessionRequestStatusForm(forms.Form): + message = forms.CharField(widget=forms.Textarea(attrs={'rows': '3', 'cols': '80'}), strip=False) + + +class NameModelMultipleChoiceField(ModelMultipleChoiceField): + def label_from_instance(self, name): + return name.desc + + +class SessionRequestForm(forms.Form): + num_session = forms.ChoiceField( + choices=NUM_SESSION_CHOICES, + label="Number of sessions") + # session fields are added in __init__() + session_time_relation = forms.ChoiceField( + choices=SESSION_TIME_RELATION_CHOICES, + required=False, + label="Time between two sessions") + attendees = forms.IntegerField(label="Number of Attendees") + # FIXME: it would cleaner to have these be + # ModelMultipleChoiceField, and just customize the widgetry, that + # way validation comes for free (applies to this CharField and the + # constraints dynamically instantiated in __init__()) + joint_with_groups = forms.CharField(max_length=255, required=False) + joint_with_groups_selector = forms.ChoiceField(choices=[], required=False) # group select widget for prev field + joint_for_session = forms.ChoiceField(choices=JOINT_FOR_SESSION_CHOICES, required=False) + comments = forms.CharField( + max_length=200, + label='Special Requests', + help_text='i.e. restrictions on meeting times / days, etc. (limit 200 characters)', + required=False) + third_session = forms.BooleanField( + required=False, + help_text="Help") + resources = forms.MultipleChoiceField( + widget=forms.CheckboxSelectMultiple, + required=False, + label='Resources Requested') + bethere = SearchablePersonsField( + label="Participants who must be present", + required=False, + help_text=mark_safe('Do not include Area Directors and WG Chairs; the system already tracks their availability.')) + timeranges = NameModelMultipleChoiceField( + widget=forms.CheckboxSelectMultiple, + required=False, + label=mark_safe('Times during which this WG can not meet:
Please explain any selections in Special Requests below.'), + queryset=TimerangeName.objects.all()) + adjacent_with_wg = forms.ChoiceField( + required=False, + label=mark_safe('Plan session adjacent with another WG:
(Immediately before or after another WG, no break in between, in the same room.)')) + send_notifications = forms.BooleanField(label="Send notification emails?", required=False, initial=False) + + def __init__(self, group, meeting, data=None, *args, **kwargs): + self.hidden = kwargs.pop('hidden', False) + self.notifications_optional = kwargs.pop('notifications_optional', False) + + self.group = group + formset_class = sessiondetailsformset_factory(max_num=3 if group.features.acts_like_wg else 50) + self.session_forms = formset_class(group=self.group, meeting=meeting, data=data) + super().__init__(data=data, *args, **kwargs) + if not self.notifications_optional: + self.fields['send_notifications'].widget = forms.HiddenInput() + + # Allow additional sessions for non-wg-like groups + if not self.group.features.acts_like_wg: + self.fields['num_session'].choices = ((n, str(n)) for n in range(1, 51)) + + self._add_widget_class(self.fields['third_session'].widget, 'form-check-input') + self.fields['comments'].widget = forms.Textarea(attrs={'rows': '3', 'cols': '65'}) + + other_groups = list(allowed_conflicting_groups().exclude(pk=group.pk).values_list('acronym', 'acronym').order_by('acronym')) + self.fields['adjacent_with_wg'].choices = [('', '--No preference')] + other_groups + group_acronym_choices = [('', '--Select WG(s)')] + other_groups + self.fields['joint_with_groups_selector'].choices = group_acronym_choices + + # Set up constraints for the meeting + self._wg_field_data = [] + for constraintname in meeting.group_conflict_types.all(): + # two fields for each constraint: a CharField for the group list and a selector to add entries + constraint_field = forms.CharField(max_length=255, required=False) + constraint_field.widget.attrs['data-slug'] = constraintname.slug + constraint_field.widget.attrs['data-constraint-name'] = str(constraintname).title() + constraint_field.widget.attrs['aria-label'] = f'{constraintname.slug}_input' + self._add_widget_class(constraint_field.widget, 'wg_constraint') + self._add_widget_class(constraint_field.widget, 'form-control') + + selector_field = forms.ChoiceField(choices=group_acronym_choices, required=False) + selector_field.widget.attrs['data-slug'] = constraintname.slug # used by onchange handler + self._add_widget_class(selector_field.widget, 'wg_constraint_selector') + self._add_widget_class(selector_field.widget, 'form-control') + + cfield_id = 'constraint_{}'.format(constraintname.slug) + cselector_id = 'wg_selector_{}'.format(constraintname.slug) + # keep an eye out for field name conflicts + log.assertion('cfield_id not in self.fields') + log.assertion('cselector_id not in self.fields') + self.fields[cfield_id] = constraint_field + self.fields[cselector_id] = selector_field + self._wg_field_data.append((constraintname, cfield_id, cselector_id)) + + # Show constraints that are not actually used by the meeting so these don't get lost + self._inactive_wg_field_data = [] + inactive_cnames = ConstraintName.objects.filter( + is_group_conflict=True # Only collect group conflicts... + ).exclude( + meeting=meeting # ...that are not enabled for this meeting... + ).filter( + constraint__source=group, # ...but exist for this group... + constraint__meeting=meeting, # ... at this meeting. + ).distinct() + + for inactive_constraint_name in inactive_cnames: + field_id = 'delete_{}'.format(inactive_constraint_name.slug) + self.fields[field_id] = forms.BooleanField(required=False, label='Delete this conflict', help_text='Delete this inactive conflict?') + self._add_widget_class(self.fields[field_id].widget, 'form-control') + constraints = group.constraint_source_set.filter(meeting=meeting, name=inactive_constraint_name) + self._inactive_wg_field_data.append( + (inactive_constraint_name, + ' '.join([c.target.acronym for c in constraints]), + field_id) + ) + + self.fields['joint_with_groups_selector'].widget.attrs['onchange'] = "document.form_post.joint_with_groups.value=document.form_post.joint_with_groups.value + ' ' + this.options[this.selectedIndex].value; return 1;" + self.fields["resources"].choices = [(x.pk, x.desc) for x in ResourceAssociation.objects.filter(name__used=True).order_by('name__order')] + + if self.hidden: + # replace all the widgets to start... + for key in list(self.fields.keys()): + self.fields[key].widget = forms.HiddenInput() + # re-replace a couple special cases + self.fields['resources'].widget = forms.MultipleHiddenInput() + self.fields['timeranges'].widget = forms.MultipleHiddenInput() + # and entirely replace bethere - no need to support searching if input is hidden + self.fields['bethere'] = ModelMultipleChoiceField( + widget=forms.MultipleHiddenInput, required=False, + queryset=Person.objects.all(), + ) + + def wg_constraint_fields(self): + """Iterates over wg constraint fields + + Intended for use in the template. + """ + for cname, cfield_id, cselector_id in self._wg_field_data: + yield cname, self[cfield_id], self[cselector_id] + + def wg_constraint_count(self): + """How many wg constraints are there?""" + return len(self._wg_field_data) + + def wg_constraint_field_ids(self): + """Iterates over wg constraint field IDs""" + for cname, cfield_id, _ in self._wg_field_data: + yield cname, cfield_id + + def inactive_wg_constraints(self): + for cname, value, field_id in self._inactive_wg_field_data: + yield cname, value, self[field_id] + + def inactive_wg_constraint_count(self): + return len(self._inactive_wg_field_data) + + def inactive_wg_constraint_field_ids(self): + """Iterates over wg constraint field IDs""" + for cname, _, field_id in self._inactive_wg_field_data: + yield cname, field_id + + @staticmethod + def _add_widget_class(widget, new_class): + """Add a new class, taking care in case some already exist""" + existing_classes = widget.attrs.get('class', '').split() + widget.attrs['class'] = ' '.join(existing_classes + [new_class]) + + def _join_conflicts(self, cleaned_data, slugs): + """Concatenate constraint fields from cleaned data into a single list""" + conflicts = [] + for cname, cfield_id, _ in self._wg_field_data: + if cname.slug in slugs and cfield_id in cleaned_data: + groups = cleaned_data[cfield_id] + # convert to python list (allow space or comma separated lists) + items = groups.replace(',', ' ').split() + conflicts.extend(items) + return conflicts + + def _validate_duplicate_conflicts(self, cleaned_data): + """Validate that no WGs appear in more than one constraint that does not allow duplicates + + Raises ValidationError + """ + # Only the older constraints (conflict, conflic2, conflic3) need to be mutually exclusive. + all_conflicts = self._join_conflicts(cleaned_data, ['conflict', 'conflic2', 'conflic3']) + seen = [] + duplicated = [] + errors = [] + for c in all_conflicts: + if c not in seen: + seen.append(c) + elif c not in duplicated: # only report once + duplicated.append(c) + errors.append(forms.ValidationError('%s appears in conflicts more than once' % c)) + return errors + + def clean_joint_with_groups(self): + groups = self.cleaned_data['joint_with_groups'] + check_conflict(groups, self.group) + return groups + + def clean_comments(self): + return clean_text_field(self.cleaned_data['comments']) + + def clean_bethere(self): + bethere = self.cleaned_data["bethere"] + if bethere: + extra = set( + Person.objects.filter( + role__group=self.group, role__name__in=["chair", "ad"] + ) + & bethere + ) + if extra: + extras = ", ".join(e.name for e in extra) + raise forms.ValidationError( + ( + f"Please remove the following person{pluralize(len(extra))}, the system " + f"tracks their availability due to their role{pluralize(len(extra))}: {extras}." + ) + ) + return bethere + + def clean_send_notifications(self): + return True if not self.notifications_optional else self.cleaned_data['send_notifications'] + + def is_valid(self): + return super().is_valid() and self.session_forms.is_valid() + + def clean(self): + super(SessionRequestForm, self).clean() + self.session_forms.clean() + + data = self.cleaned_data + + # Validate the individual conflict fields + for _, cfield_id, _ in self._wg_field_data: + try: + check_conflict(data[cfield_id], self.group) + except forms.ValidationError as e: + self.add_error(cfield_id, e) + + # Skip remaining tests if individual field tests had errors, + if self.errors: + return data + + # error if conflicts contain disallowed dupes + for error in self._validate_duplicate_conflicts(data): + self.add_error(None, error) + + # Verify expected number of session entries are present + num_sessions_with_data = len(self.session_forms.forms_to_keep) + num_sessions_expected = -1 + try: + num_sessions_expected = int(data.get('num_session', '')) + except ValueError: + self.add_error('num_session', 'Invalid value for number of sessions') + if num_sessions_with_data < num_sessions_expected: + self.add_error('num_session', 'Must provide data for all sessions') + + # if default (empty) option is selected, cleaned_data won't include num_session key + if num_sessions_expected != 2 and num_sessions_expected is not None: + if data.get('session_time_relation'): + self.add_error( + 'session_time_relation', + forms.ValidationError('Time between sessions can only be used when two sessions are requested.') + ) + + joint_session = data.get('joint_for_session', '') + if joint_session != '': + joint_session = int(joint_session) + if joint_session > num_sessions_with_data: + self.add_error( + 'joint_for_session', + forms.ValidationError( + f'Session {joint_session} can not be the joint session, the session has not been requested.' + ) + ) + + return data + + @property + def media(self): + # get media for our formset + return super().media + self.session_forms.media + forms.Media(js=('ietf/js/session_form.js',)) diff --git a/ietf/secr/sreq/templatetags/ams_filters.py b/ietf/meeting/templatetags/ams_filters.py similarity index 96% rename from ietf/secr/sreq/templatetags/ams_filters.py rename to ietf/meeting/templatetags/ams_filters.py index 3ef872232a..a8175a81d6 100644 --- a/ietf/secr/sreq/templatetags/ams_filters.py +++ b/ietf/meeting/templatetags/ams_filters.py @@ -1,3 +1,5 @@ +# Copyright The IETF Trust 2025, All Rights Reserved + from django import template from ietf.person.models import Person diff --git a/ietf/secr/sreq/tests.py b/ietf/meeting/tests_session_requests.py similarity index 84% rename from ietf/secr/sreq/tests.py rename to ietf/meeting/tests_session_requests.py index 847b993e1c..0cb092d2f8 100644 --- a/ietf/secr/sreq/tests.py +++ b/ietf/meeting/tests_session_requests.py @@ -1,4 +1,4 @@ -# Copyright The IETF Trust 2013-2022, All Rights Reserved +# Copyright The IETF Trust 2013-2025, All Rights Reserved # -*- coding: utf-8 -*- @@ -15,30 +15,15 @@ from ietf.name.models import ConstraintName, TimerangeName from ietf.person.factories import PersonFactory from ietf.person.models import Person -from ietf.secr.sreq.forms import SessionForm +from ietf.meeting.forms import SessionRequestForm from ietf.utils.mail import outbox, empty_outbox, get_payload_text, send_mail from ietf.utils.timezone import date_today from pyquery import PyQuery -SECR_USER='secretary' +SECR_USER = 'secretary' -class SreqUrlTests(TestCase): - def test_urls(self): - MeetingFactory(type_id='ietf',date=date_today()) - - self.client.login(username="secretary", password="secretary+password") - - r = self.client.get("/secr/") - self.assertEqual(r.status_code, 200) - - r = self.client.get("/secr/sreq/") - self.assertEqual(r.status_code, 200) - - testgroup=GroupFactory() - r = self.client.get("/secr/sreq/%s/new/" % testgroup.acronym) - self.assertEqual(r.status_code, 200) class SessionRequestTestCase(TestCase): def test_main(self): @@ -46,7 +31,7 @@ def test_main(self): SessionFactory.create_batch(2, meeting=meeting, status_id='sched') SessionFactory.create_batch(2, meeting=meeting, status_id='disappr') # Several unscheduled groups come from make_immutable_base_data - url = reverse('ietf.secr.sreq.views.main') + url = reverse('ietf.meeting.views_session_request.list_view') self.client.login(username="secretary", password="secretary+password") r = self.client.get(url) self.assertEqual(r.status_code, 200) @@ -62,27 +47,27 @@ def test_approve(self): mars = GroupFactory(parent=area, acronym='mars') # create session waiting for approval session = SessionFactory(meeting=meeting, group=mars, status_id='apprw') - url = reverse('ietf.secr.sreq.views.approve', kwargs={'acronym':'mars'}) + url = reverse('ietf.meeting.views_session_request.approve_request', kwargs={'acronym': 'mars'}) self.client.login(username="ad", password="ad+password") r = self.client.get(url) - self.assertRedirects(r,reverse('ietf.secr.sreq.views.view', kwargs={'acronym':'mars'})) + self.assertRedirects(r, reverse('ietf.meeting.views_session_request.view_request', kwargs={'acronym': 'mars'})) self.assertEqual(SchedulingEvent.objects.filter(session=session).order_by('-id')[0].status_id, 'appr') - + def test_cancel(self): meeting = MeetingFactory(type_id='ietf', date=date_today()) ad = Person.objects.get(user__username='ad') area = RoleFactory(name_id='ad', person=ad, group__type_id='area').group session = SessionFactory(meeting=meeting, group__parent=area, group__acronym='mars', status_id='sched') - url = reverse('ietf.secr.sreq.views.cancel', kwargs={'acronym':'mars'}) + url = reverse('ietf.meeting.views_session_request.cancel_request', kwargs={'acronym': 'mars'}) self.client.login(username="ad", password="ad+password") r = self.client.get(url) - self.assertRedirects(r,reverse('ietf.secr.sreq.views.main')) + self.assertRedirects(r, reverse('ietf.meeting.views_session_request.list_view')) self.assertEqual(SchedulingEvent.objects.filter(session=session).order_by('-id')[0].status_id, 'deleted') def test_cancel_notification_msg(self): to = "" subject = "Dummy subject" - template = "sreq/session_cancel_notification.txt" + template = "meeting/session_cancel_notification.txt" meeting = MeetingFactory(type_id="ietf", date=date_today()) requester = PersonFactory(name="James O'Rourke", user__username="jimorourke") context = {"meeting": meeting, "requester": requester} @@ -113,9 +98,9 @@ def test_edit(self): group4 = GroupFactory() iabprog = GroupFactory(type_id='program') - SessionFactory(meeting=meeting,group=mars,status_id='sched') + SessionFactory(meeting=meeting, group=mars, status_id='sched') - url = reverse('ietf.secr.sreq.views.edit', kwargs={'acronym':'mars'}) + url = reverse('ietf.meeting.views_session_request.edit_request', kwargs={'acronym': 'mars'}) self.client.login(username="marschairman", password="marschairman+password") r = self.client.get(url) self.assertEqual(r.status_code, 200) @@ -123,9 +108,9 @@ def test_edit(self): comments = 'need lights' mars_sessions = meeting.session_set.filter(group__acronym='mars') empty_outbox() - post_data = {'num_session':'2', + post_data = {'num_session': '2', 'attendees': attendees, - 'constraint_chair_conflict':iabprog.acronym, + 'constraint_chair_conflict': iabprog.acronym, 'session_time_relation': 'subsequent-days', 'adjacent_with_wg': group2.acronym, 'joint_with_groups': group3.acronym + ' ' + group4.acronym, @@ -135,7 +120,7 @@ def test_edit(self): 'session_set-INITIAL_FORMS': '1', 'session_set-MIN_NUM_FORMS': '1', 'session_set-MAX_NUM_FORMS': '3', - 'session_set-0-id':mars_sessions[0].pk, + 'session_set-0-id': mars_sessions[0].pk, 'session_set-0-name': mars_sessions[0].name, 'session_set-0-short': mars_sessions[0].short, 'session_set-0-purpose': mars_sessions[0].purpose_id, @@ -169,7 +154,7 @@ def test_edit(self): 'session_set-2-DELETE': 'on', 'submit': 'Continue'} r = self.client.post(url, post_data, HTTP_HOST='example.com') - redirect_url = reverse('ietf.secr.sreq.views.view', kwargs={'acronym': 'mars'}) + redirect_url = reverse('ietf.meeting.views_session_request.view_request', kwargs={'acronym': 'mars'}) self.assertRedirects(r, redirect_url) # Check whether updates were stored in the database @@ -204,17 +189,17 @@ def test_edit(self): # Edit again, changing the joint sessions and clearing some fields. The behaviour of # edit is different depending on whether previous joint sessions were recorded. empty_outbox() - post_data = {'num_session':'2', - 'attendees':attendees, - 'constraint_chair_conflict':'', - 'comments':'need lights', + post_data = {'num_session': '2', + 'attendees': attendees, + 'constraint_chair_conflict': '', + 'comments': 'need lights', 'joint_with_groups': group2.acronym, 'joint_for_session': '1', 'session_set-TOTAL_FORMS': '3', # matches what view actually sends, even with only 2 filled in 'session_set-INITIAL_FORMS': '2', 'session_set-MIN_NUM_FORMS': '1', 'session_set-MAX_NUM_FORMS': '3', - 'session_set-0-id':sessions[0].pk, + 'session_set-0-id': sessions[0].pk, 'session_set-0-name': sessions[0].name, 'session_set-0-short': sessions[0].short, 'session_set-0-purpose': sessions[0].purpose_id, @@ -270,7 +255,6 @@ def test_edit(self): r = self.client.get(redirect_url) self.assertContains(r, 'First session with: {}'.format(group2.acronym)) - def test_edit_constraint_bethere(self): meeting = MeetingFactory(type_id='ietf', date=date_today()) mars = RoleFactory(name_id='chair', person__user__username='marschairman', group__acronym='mars').group @@ -282,7 +266,7 @@ def test_edit_constraint_bethere(self): name_id='bethere', ) self.assertEqual(session.people_constraints.count(), 1) - url = reverse('ietf.secr.sreq.views.edit', kwargs=dict(acronym='mars')) + url = reverse('ietf.meeting.views_session_request.edit_request', kwargs=dict(acronym='mars')) self.client.login(username='marschairman', password='marschairman+password') attendees = '10' ad = Person.objects.get(user__username='ad') @@ -290,8 +274,8 @@ def test_edit_constraint_bethere(self): 'num_session': '1', 'attendees': attendees, 'bethere': str(ad.pk), - 'constraint_chair_conflict':'', - 'comments':'', + 'constraint_chair_conflict': '', + 'comments': '', 'joint_with_groups': '', 'joint_for_session': '', 'delete_conflict': 'on', @@ -299,7 +283,7 @@ def test_edit_constraint_bethere(self): 'session_set-INITIAL_FORMS': '1', 'session_set-MIN_NUM_FORMS': '1', 'session_set-MAX_NUM_FORMS': '3', - 'session_set-0-id':session.pk, + 'session_set-0-id': session.pk, 'session_set-0-name': session.name, 'session_set-0-short': session.short, 'session_set-0-purpose': session.purpose_id, @@ -313,8 +297,8 @@ def test_edit_constraint_bethere(self): 'session_set-1-id': '', 'session_set-1-name': '', 'session_set-1-short': '', - 'session_set-1-purpose':'regular', - 'session_set-1-type':'regular', + 'session_set-1-purpose': 'regular', + 'session_set-1-type': 'regular', 'session_set-1-requested_duration': '', 'session_set-1-on_agenda': 'True', 'session_set-1-attendees': attendees, @@ -333,7 +317,7 @@ def test_edit_constraint_bethere(self): 'submit': 'Save', } r = self.client.post(url, post_data, HTTP_HOST='example.com') - redirect_url = reverse('ietf.secr.sreq.views.view', kwargs={'acronym': 'mars'}) + redirect_url = reverse('ietf.meeting.views_session_request.view_request', kwargs={'acronym': 'mars'}) self.assertRedirects(r, redirect_url) self.assertEqual([pc.person for pc in session.people_constraints.all()], [ad]) @@ -350,7 +334,7 @@ def test_edit_inactive_conflicts(self): target=other_group, ) - url = reverse('ietf.secr.sreq.views.edit', kwargs=dict(acronym='mars')) + url = reverse('ietf.meeting.views_session_request.edit_request', kwargs=dict(acronym='mars')) self.client.login(username='marschairman', password='marschairman+password') r = self.client.get(url) self.assertEqual(r.status_code, 200) @@ -360,17 +344,17 @@ def test_edit_inactive_conflicts(self): found = q('input#id_delete_conflict[type="checkbox"]') self.assertEqual(len(found), 1) delete_checkbox = found[0] - # check that the label on the checkbox is correct - self.assertIn('Delete this conflict', delete_checkbox.tail) + self.assertIn('Delete this conflict', delete_checkbox.label.text) # check that the target is displayed correctly in the UI - self.assertIn(other_group.acronym, delete_checkbox.find('../input[@type="text"]').value) + row = found.parent().parent() + self.assertIn(other_group.acronym, row.find('input[@type="text"]').val()) attendees = '10' post_data = { 'num_session': '1', 'attendees': attendees, - 'constraint_chair_conflict':'', - 'comments':'', + 'constraint_chair_conflict': '', + 'comments': '', 'joint_with_groups': '', 'joint_for_session': '', 'delete_conflict': 'on', @@ -378,7 +362,7 @@ def test_edit_inactive_conflicts(self): 'session_set-INITIAL_FORMS': '1', 'session_set-MIN_NUM_FORMS': '1', 'session_set-MAX_NUM_FORMS': '3', - 'session_set-0-id':session.pk, + 'session_set-0-id': session.pk, 'session_set-0-name': session.name, 'session_set-0-short': session.short, 'session_set-0-purpose': session.purpose_id, @@ -392,28 +376,28 @@ def test_edit_inactive_conflicts(self): 'submit': 'Save', } r = self.client.post(url, post_data, HTTP_HOST='example.com') - redirect_url = reverse('ietf.secr.sreq.views.view', kwargs={'acronym': 'mars'}) + redirect_url = reverse('ietf.meeting.views_session_request.view_request', kwargs={'acronym': 'mars'}) self.assertRedirects(r, redirect_url) self.assertEqual(len(mars.constraint_source_set.filter(name_id='conflict')), 0) def test_tool_status(self): MeetingFactory(type_id='ietf', date=date_today()) - url = reverse('ietf.secr.sreq.views.tool_status') + url = reverse('ietf.meeting.views_session_request.status') self.client.login(username="secretary", password="secretary+password") r = self.client.get(url) self.assertEqual(r.status_code, 200) - r = self.client.post(url, {'message':'locked', 'submit':'Lock'}) - self.assertRedirects(r,reverse('ietf.secr.sreq.views.main')) + r = self.client.post(url, {'message': 'locked', 'submit': 'Lock'}) + self.assertRedirects(r, reverse('ietf.meeting.views_session_request.list_view')) def test_new_req_constraint_types(self): """Configurable constraint types should be handled correctly in a new request - Relies on SessionForm representing constraint values with element IDs + Relies on SessionRequestForm representing constraint values with element IDs like id_constraint_ """ meeting = MeetingFactory(type_id='ietf', date=date_today()) RoleFactory(name_id='chair', person__user__username='marschairman', group__acronym='mars') - url = reverse('ietf.secr.sreq.views.new', kwargs=dict(acronym='mars')) + url = reverse('ietf.meeting.views_session_request.new_request', kwargs=dict(acronym='mars')) self.client.login(username="marschairman", password="marschairman+password") for expected in [ @@ -441,7 +425,7 @@ def test_edit_req_constraint_types(self): add_to_schedule=False) RoleFactory(name_id='chair', person__user__username='marschairman', group__acronym='mars') - url = reverse('ietf.secr.sreq.views.edit', kwargs=dict(acronym='mars')) + url = reverse('ietf.meeting.views_session_request.edit_request', kwargs=dict(acronym='mars')) self.client.login(username='marschairman', password='marschairman+password') for expected in [ @@ -460,6 +444,7 @@ def test_edit_req_constraint_types(self): ['id_constraint_{}'.format(conf_name) for conf_name in expected], ) + class SubmitRequestCase(TestCase): def setUp(self): super(SubmitRequestCase, self).setUp() @@ -476,15 +461,15 @@ def test_submit_request(self): group3 = GroupFactory(parent=area) group4 = GroupFactory(parent=area) session_count_before = Session.objects.filter(meeting=meeting, group=group).count() - url = reverse('ietf.secr.sreq.views.new',kwargs={'acronym':group.acronym}) - confirm_url = reverse('ietf.secr.sreq.views.confirm',kwargs={'acronym':group.acronym}) - main_url = reverse('ietf.secr.sreq.views.main') + url = reverse('ietf.meeting.views_session_request.new_request', kwargs={'acronym': group.acronym}) + confirm_url = reverse('ietf.meeting.views_session_request.confirm', kwargs={'acronym': group.acronym}) + main_url = reverse('ietf.meeting.views_session_request.list_view') attendees = '10' comments = 'need projector' - post_data = {'num_session':'1', - 'attendees':attendees, - 'constraint_chair_conflict':'', - 'comments':comments, + post_data = {'num_session': '1', + 'attendees': attendees, + 'constraint_chair_conflict': '', + 'comments': comments, 'adjacent_with_wg': group2.acronym, 'timeranges': ['thursday-afternoon-early', 'thursday-afternoon-late'], 'joint_with_groups': group3.acronym + ' ' + group4.acronym, @@ -506,7 +491,7 @@ def test_submit_request(self): 'session_set-0-DELETE': '', 'submit': 'Continue'} self.client.login(username="secretary", password="secretary+password") - r = self.client.post(url,post_data) + r = self.client.post(url, post_data) self.assertEqual(r.status_code, 200) # Verify the contents of the confirm view @@ -515,13 +500,13 @@ def test_submit_request(self): self.assertContains(r, 'First session with: {} {}'.format(group3.acronym, group4.acronym)) post_data['submit'] = 'Submit' - r = self.client.post(confirm_url,post_data) + r = self.client.post(confirm_url, post_data) self.assertRedirects(r, main_url) session_count_after = Session.objects.filter(meeting=meeting, group=group, type='regular').count() self.assertEqual(session_count_after, session_count_before + 1) # test that second confirm does not add sessions - r = self.client.post(confirm_url,post_data) + r = self.client.post(confirm_url, post_data) self.assertRedirects(r, main_url) session_count_after = Session.objects.filter(meeting=meeting, group=group, type='regular').count() self.assertEqual(session_count_after, session_count_before + 1) @@ -535,42 +520,6 @@ def test_submit_request(self): ) self.assertEqual(set(list(session.joint_with_groups.all())), set([group3, group4])) - def test_submit_request_invalid(self): - MeetingFactory(type_id='ietf', date=date_today()) - ad = Person.objects.get(user__username='ad') - area = RoleFactory(name_id='ad', person=ad, group__type_id='area').group - group = GroupFactory(parent=area) - url = reverse('ietf.secr.sreq.views.new',kwargs={'acronym':group.acronym}) - attendees = '10' - comments = 'need projector' - post_data = { - 'num_session':'2', - 'attendees':attendees, - 'constraint_chair_conflict':'', - 'comments':comments, - 'session_set-TOTAL_FORMS': '1', - 'session_set-INITIAL_FORMS': '1', - 'session_set-MIN_NUM_FORMS': '1', - 'session_set-MAX_NUM_FORMS': '3', - # no 'session_set-0-id' to create a new session - 'session_set-0-name': '', - 'session_set-0-short': '', - 'session_set-0-purpose': 'regular', - 'session_set-0-type': 'regular', - 'session_set-0-requested_duration': '3600', - 'session_set-0-on_agenda': True, - 'session_set-0-remote_instructions': '', - 'session_set-0-attendees': attendees, - 'session_set-0-comments': comments, - 'session_set-0-DELETE': '', - } - self.client.login(username="secretary", password="secretary+password") - r = self.client.post(url,post_data) - self.assertEqual(r.status_code, 200) - q = PyQuery(r.content) - self.assertEqual(len(q('#session-request-form')),1) - self.assertContains(r, 'Must provide data for all sessions') - def test_submit_request_check_constraints(self): m1 = MeetingFactory(type_id='ietf', date=date_today() - datetime.timedelta(days=100)) MeetingFactory(type_id='ietf', date=date_today(), @@ -597,7 +546,7 @@ def test_submit_request_check_constraints(self): self.client.login(username="secretary", password="secretary+password") - url = reverse('ietf.secr.sreq.views.new',kwargs={'acronym':group.acronym}) + url = reverse('ietf.meeting.views_session_request.new_request', kwargs={'acronym': group.acronym}) r = self.client.get(url + '?previous') self.assertEqual(r.status_code, 200) q = PyQuery(r.content) @@ -607,11 +556,11 @@ def test_submit_request_check_constraints(self): attendees = '10' comments = 'need projector' - post_data = {'num_session':'1', - 'attendees':attendees, + post_data = {'num_session': '1', + 'attendees': attendees, 'constraint_chair_conflict': group.acronym, - 'comments':comments, - 'session_set-TOTAL_FORMS': '1', + 'comments': comments, + 'session_set-TOTAL_FORMS': '3', 'session_set-INITIAL_FORMS': '1', 'session_set-MIN_NUM_FORMS': '1', 'session_set-MAX_NUM_FORMS': '3', @@ -626,11 +575,31 @@ def test_submit_request_check_constraints(self): 'session_set-0-attendees': attendees, 'session_set-0-comments': comments, 'session_set-0-DELETE': '', + 'session_set-1-name': '', + 'session_set-1-short': '', + 'session_set-1-purpose': session.purpose_id, + 'session_set-1-type': session.type_id, + 'session_set-1-requested_duration': '', + 'session_set-1-on_agenda': session.on_agenda, + 'session_set-1-remote_instructions': '', + 'session_set-1-attendees': attendees, + 'session_set-1-comments': '', + 'session_set-1-DELETE': 'on', + 'session_set-2-name': '', + 'session_set-2-short': '', + 'session_set-2-purpose': session.purpose_id, + 'session_set-2-type': session.type_id, + 'session_set-2-requested_duration': '', + 'session_set-2-on_agenda': session.on_agenda, + 'session_set-2-remote_instructions': '', + 'session_set-2-attendees': attendees, + 'session_set-2-comments': '', + 'session_set-2-DELETE': 'on', 'submit': 'Continue'} - r = self.client.post(url,post_data) + r = self.client.post(url, post_data) self.assertEqual(r.status_code, 200) q = PyQuery(r.content) - self.assertEqual(len(q('#session-request-form')),1) + self.assertEqual(len(q('#session-request-form')), 1) self.assertContains(r, "Cannot declare a conflict with the same group") def test_request_notification(self): @@ -645,18 +614,18 @@ def test_request_notification(self): RoleFactory(name_id='chair', group=group, person__user__username='ameschairman') resource = ResourceAssociation.objects.create(name_id='project') # Bit of a test data hack - the fixture now has no used resources to pick from - resource.name.used=True + resource.name.used = True resource.name.save() - url = reverse('ietf.secr.sreq.views.new',kwargs={'acronym':group.acronym}) - confirm_url = reverse('ietf.secr.sreq.views.confirm',kwargs={'acronym':group.acronym}) + url = reverse('ietf.meeting.views_session_request.new_request', kwargs={'acronym': group.acronym}) + confirm_url = reverse('ietf.meeting.views_session_request.confirm', kwargs={'acronym': group.acronym}) len_before = len(outbox) attendees = '10' - post_data = {'num_session':'2', - 'attendees':attendees, - 'bethere':str(ad.pk), - 'constraint_chair_conflict':group4.acronym, - 'comments':'', + post_data = {'num_session': '2', + 'attendees': attendees, + 'bethere': str(ad.pk), + 'constraint_chair_conflict': group4.acronym, + 'comments': '', 'resources': resource.pk, 'session_time_relation': 'subsequent-days', 'adjacent_with_wg': group2.acronym, @@ -692,23 +661,23 @@ def test_request_notification(self): 'submit': 'Continue'} self.client.login(username="ameschairman", password="ameschairman+password") # submit - r = self.client.post(url,post_data) + r = self.client.post(url, post_data) self.assertEqual(r.status_code, 200) q = PyQuery(r.content) self.assertTrue('Confirm' in str(q("title")), r.context['form'].errors) # confirm post_data['submit'] = 'Submit' - r = self.client.post(confirm_url,post_data) - self.assertRedirects(r, reverse('ietf.secr.sreq.views.main')) - self.assertEqual(len(outbox),len_before+1) + r = self.client.post(confirm_url, post_data) + self.assertRedirects(r, reverse('ietf.meeting.views_session_request.list_view')) + self.assertEqual(len(outbox), len_before + 1) notification = outbox[-1] notification_payload = get_payload_text(notification) - sessions = Session.objects.filter(meeting=meeting,group=group) + sessions = Session.objects.filter(meeting=meeting, group=group) self.assertEqual(len(sessions), 2) session = sessions[0] - self.assertEqual(session.resources.count(),1) - self.assertEqual(session.people_constraints.count(),1) + self.assertEqual(session.resources.count(), 1) + self.assertEqual(session.people_constraints.count(), 1) self.assertEqual(session.constraints().get(name='time_relation').time_relation, 'subsequent-days') self.assertEqual(session.constraints().get(name='wg_adjacent').target.acronym, group2.acronym) self.assertEqual( @@ -731,7 +700,7 @@ def test_request_notification(self): def test_request_notification_msg(self): to = "" subject = "Dummy subject" - template = "sreq/session_request_notification.txt" + template = "meeting/session_request_notification.txt" header = "A new" meeting = MeetingFactory(type_id="ietf", date=date_today()) requester = PersonFactory(name="James O'Rourke", user__username="jimorourke") @@ -767,19 +736,19 @@ def test_request_notification_third_session(self): RoleFactory(name_id='chair', group=group, person__user__username='ameschairman') resource = ResourceAssociation.objects.create(name_id='project') # Bit of a test data hack - the fixture now has no used resources to pick from - resource.name.used=True + resource.name.used = True resource.name.save() - url = reverse('ietf.secr.sreq.views.new',kwargs={'acronym':group.acronym}) - confirm_url = reverse('ietf.secr.sreq.views.confirm',kwargs={'acronym':group.acronym}) + url = reverse('ietf.meeting.views_session_request.new_request', kwargs={'acronym': group.acronym}) + confirm_url = reverse('ietf.meeting.views_session_request.confirm', kwargs={'acronym': group.acronym}) len_before = len(outbox) attendees = '10' - post_data = {'num_session':'2', + post_data = {'num_session': '2', 'third_session': 'true', - 'attendees':attendees, - 'bethere':str(ad.pk), - 'constraint_chair_conflict':group4.acronym, - 'comments':'', + 'attendees': attendees, + 'bethere': str(ad.pk), + 'constraint_chair_conflict': group4.acronym, + 'comments': '', 'resources': resource.pk, 'session_time_relation': 'subsequent-days', 'adjacent_with_wg': group2.acronym, @@ -826,23 +795,23 @@ def test_request_notification_third_session(self): 'submit': 'Continue'} self.client.login(username="ameschairman", password="ameschairman+password") # submit - r = self.client.post(url,post_data) + r = self.client.post(url, post_data) self.assertEqual(r.status_code, 200) q = PyQuery(r.content) self.assertTrue('Confirm' in str(q("title")), r.context['form'].errors) # confirm post_data['submit'] = 'Submit' - r = self.client.post(confirm_url,post_data) - self.assertRedirects(r, reverse('ietf.secr.sreq.views.main')) - self.assertEqual(len(outbox),len_before+1) + r = self.client.post(confirm_url, post_data) + self.assertRedirects(r, reverse('ietf.meeting.views_session_request.list_view')) + self.assertEqual(len(outbox), len_before + 1) notification = outbox[-1] notification_payload = get_payload_text(notification) - sessions = Session.objects.filter(meeting=meeting,group=group) + sessions = Session.objects.filter(meeting=meeting, group=group) self.assertEqual(len(sessions), 3) session = sessions[0] - self.assertEqual(session.resources.count(),1) - self.assertEqual(session.people_constraints.count(),1) + self.assertEqual(session.resources.count(), 1) + self.assertEqual(session.people_constraints.count(), 1) self.assertEqual(session.constraints().get(name='time_relation').time_relation, 'subsequent-days') self.assertEqual(session.constraints().get(name='wg_adjacent').target.acronym, group2.acronym) self.assertEqual( @@ -861,16 +830,17 @@ def test_request_notification_third_session(self): self.assertIn('1 Hour, 1 Hour, 1 Hour', notification_payload) self.assertIn('The third session requires your approval', notification_payload) + class LockAppTestCase(TestCase): def setUp(self): super().setUp() - self.meeting = MeetingFactory(type_id='ietf', date=date_today(),session_request_lock_message='locked') + self.meeting = MeetingFactory(type_id='ietf', date=date_today(), session_request_lock_message='locked') self.group = GroupFactory(acronym='mars') RoleFactory(name_id='chair', group=self.group, person__user__username='marschairman') - SessionFactory(group=self.group,meeting=self.meeting) + SessionFactory(group=self.group, meeting=self.meeting) def test_edit_request(self): - url = reverse('ietf.secr.sreq.views.edit',kwargs={'acronym':self.group.acronym}) + url = reverse('ietf.meeting.views_session_request.edit_request', kwargs={'acronym': self.group.acronym}) self.client.login(username="secretary", password="secretary+password") r = self.client.get(url) self.assertEqual(r.status_code, 200) @@ -882,48 +852,49 @@ def test_edit_request(self): self.assertEqual(r.status_code, 200) q = PyQuery(r.content) self.assertEqual(len(q(':disabled[name="submit"]')), 1) - + def test_view_request(self): - url = reverse('ietf.secr.sreq.views.view',kwargs={'acronym':self.group.acronym}) + url = reverse('ietf.meeting.views_session_request.view_request', kwargs={'acronym': self.group.acronym}) self.client.login(username="secretary", password="secretary+password") - r = self.client.get(url,follow=True) + r = self.client.get(url, follow=True) self.assertEqual(r.status_code, 200) q = PyQuery(r.content) self.assertEqual(len(q(':enabled[name="edit"]')), 1) # secretary can edit chair = self.group.role_set.filter(name_id='chair').first().person.user.username self.client.login(username=chair, password=f'{chair}+password') - r = self.client.get(url,follow=True) + r = self.client.get(url, follow=True) self.assertEqual(r.status_code, 200) q = PyQuery(r.content) self.assertEqual(len(q(':disabled[name="edit"]')), 1) # chair cannot edit def test_new_request(self): - url = reverse('ietf.secr.sreq.views.new',kwargs={'acronym':self.group.acronym}) - + url = reverse('ietf.meeting.views_session_request.new_request', kwargs={'acronym': self.group.acronym}) + # try as WG Chair self.client.login(username="marschairman", password="marschairman+password") r = self.client.get(url, follow=True) self.assertEqual(r.status_code, 200) q = PyQuery(r.content) - self.assertEqual(len(q('#session-request-form')),0) - + self.assertEqual(len(q('#session-request-form')), 0) + # try as Secretariat self.client.login(username="secretary", password="secretary+password") - r = self.client.get(url,follow=True) + r = self.client.get(url, follow=True) self.assertEqual(r.status_code, 200) q = PyQuery(r.content) - self.assertEqual(len(q('#session-request-form')),1) - + self.assertEqual(len(q('#session-request-form')), 1) + + class NotMeetingCase(TestCase): def test_not_meeting(self): - MeetingFactory(type_id='ietf',date=date_today()) + MeetingFactory(type_id='ietf', date=date_today()) group = GroupFactory(acronym='mars') - url = reverse('ietf.secr.sreq.views.no_session',kwargs={'acronym':group.acronym}) + url = reverse('ietf.meeting.views_session_request.no_session', kwargs={'acronym': group.acronym}) self.client.login(username="secretary", password="secretary+password") empty_outbox() - r = self.client.get(url,follow=True) + r = self.client.get(url, follow=True) # If the view invoked by that get throws an exception (such as an integrity error), # the traceback from this test will talk about a TransactionManagementError and # yell about executing queries before the end of an 'atomic' block @@ -932,14 +903,15 @@ def test_not_meeting(self): self.assertEqual(r.status_code, 200) self.assertContains(r, 'A message was sent to notify not having a session') - r = self.client.get(url,follow=True) + r = self.client.get(url, follow=True) self.assertEqual(r.status_code, 200) self.assertContains(r, 'is already marked as not meeting') - self.assertEqual(len(outbox),1) + self.assertEqual(len(outbox), 1) self.assertTrue('Not having a session' in outbox[0]['Subject']) self.assertTrue('session-request@' in outbox[0]['To']) + class RetrievePreviousCase(TestCase): pass @@ -949,7 +921,7 @@ class RetrievePreviousCase(TestCase): # test access by unauthorized -class SessionFormTest(TestCase): +class SessionRequestFormTest(TestCase): def setUp(self): super().setUp() self.meeting = MeetingFactory(type_id='ietf') @@ -1014,19 +986,19 @@ def setUp(self): 'session_set-2-comments': '', 'session_set-2-DELETE': '', } - + def test_valid(self): # Test with three sessions - form = SessionForm(data=self.valid_form_data, group=self.group1, meeting=self.meeting) + form = SessionRequestForm(data=self.valid_form_data, group=self.group1, meeting=self.meeting) self.assertTrue(form.is_valid()) - + # Test with two sessions self.valid_form_data.update({ 'third_session': '', 'session_set-TOTAL_FORMS': '2', 'joint_for_session': '2' }) - form = SessionForm(data=self.valid_form_data, group=self.group1, meeting=self.meeting) + form = SessionRequestForm(data=self.valid_form_data, group=self.group1, meeting=self.meeting) self.assertTrue(form.is_valid()) # Test with one session @@ -1036,9 +1008,9 @@ def test_valid(self): 'joint_for_session': '1', 'session_time_relation': '', }) - form = SessionForm(data=self.valid_form_data, group=self.group1, meeting=self.meeting) + form = SessionRequestForm(data=self.valid_form_data, group=self.group1, meeting=self.meeting) self.assertTrue(form.is_valid()) - + def test_invalid_groups(self): new_form_data = { 'constraint_chair_conflict': 'doesnotexist', @@ -1057,7 +1029,7 @@ def test_valid_group_appears_in_multiple_conflicts(self): 'constraint_tech_overlap': self.group2.acronym, } self.valid_form_data.update(new_form_data) - form = SessionForm(data=self.valid_form_data, group=self.group1, meeting=self.meeting) + form = SessionRequestForm(data=self.valid_form_data, group=self.group1, meeting=self.meeting) self.assertTrue(form.is_valid()) def test_invalid_group_appears_in_multiple_conflicts(self): @@ -1116,7 +1088,7 @@ def test_invalid_joint_for_session(self): 'joint_for_session': [ 'Session 2 can not be the joint session, the session has not been requested.'] }) - + def test_invalid_missing_session_length(self): form = self._invalid_test_helper({ 'session_set-TOTAL_FORMS': '2', @@ -1156,6 +1128,6 @@ def test_invalid_missing_session_length(self): def _invalid_test_helper(self, new_form_data): form_data = dict(self.valid_form_data, **new_form_data) - form = SessionForm(data=form_data, group=self.group1, meeting=self.meeting) + form = SessionRequestForm(data=form_data, group=self.group1, meeting=self.meeting) self.assertFalse(form.is_valid()) return form diff --git a/ietf/meeting/tests_views.py b/ietf/meeting/tests_views.py index bd3ab772fc..b1bbc62907 100644 --- a/ietf/meeting/tests_views.py +++ b/ietf/meeting/tests_views.py @@ -1,4 +1,4 @@ -# Copyright The IETF Trust 2009-2024, All Rights Reserved +# Copyright The IETF Trust 2009-2025, All Rights Reserved # -*- coding: utf-8 -*- import datetime import io @@ -7554,7 +7554,7 @@ def test_meeting_requests(self): ) def _sreq_edit_link(sess): return urlreverse( - 'ietf.secr.sreq.views.edit', + 'ietf.meeting.views_session_request.edit_request', kwargs={ 'num': meeting.number, 'acronym': sess.group.acronym, diff --git a/ietf/meeting/urls.py b/ietf/meeting/urls.py index 18b123b4d8..af36a6656c 100644 --- a/ietf/meeting/urls.py +++ b/ietf/meeting/urls.py @@ -1,10 +1,10 @@ -# Copyright The IETF Trust 2007-2024, All Rights Reserved +# Copyright The IETF Trust 2007-2025, All Rights Reserved from django.conf import settings from django.urls import include from django.views.generic import RedirectView -from ietf.meeting import views, views_proceedings +from ietf.meeting import views, views_proceedings, views_session_request from ietf.utils.urls import url class AgendaRedirectView(RedirectView): @@ -108,6 +108,8 @@ def get_redirect_url(self, *args, **kwargs): url(r'^important-dates.(?Pics)$', views.important_dates), url(r'^proceedings/meetinghosts/edit/', views_proceedings.edit_meetinghosts), url(r'^proceedings/meetinghosts/(?P\d+)/logo/$', views_proceedings.meetinghost_logo), + url(r'^session/request/%(acronym)s/edit/$' % settings.URL_REGEXPS, views_session_request.edit_request), + url(r'^session/request/%(acronym)s/view/$' % settings.URL_REGEXPS, views_session_request.view_request), ] urlpatterns = [ @@ -127,6 +129,13 @@ def get_redirect_url(self, *args, **kwargs): url(r'^upcoming/?$', views.upcoming), url(r'^upcoming\.ics/?$', views.upcoming_ical), url(r'^upcoming\.json/?$', views.upcoming_json), + url(r'^session/request/$', views_session_request.list_view), + url(r'^session/request/%(acronym)s/new/$' % settings.URL_REGEXPS, views_session_request.new_request), + url(r'^session/request/%(acronym)s/approve/$' % settings.URL_REGEXPS, views_session_request.approve_request), + url(r'^session/request/%(acronym)s/no_session/$' % settings.URL_REGEXPS, views_session_request.no_session), + url(r'^session/request/%(acronym)s/cancel/$' % settings.URL_REGEXPS, views_session_request.cancel_request), + url(r'^session/request/%(acronym)s/confirm/$' % settings.URL_REGEXPS, views_session_request.confirm), + url(r'^session/request/status/$', views_session_request.status), url(r'^session/(?P\d+)/agenda_materials$', views.session_materials), url(r'^session/(?P\d+)/cancel/?', views.cancel_session), url(r'^session/(?P\d+)/edit/?', views.edit_session), @@ -140,4 +149,3 @@ def get_redirect_url(self, *args, **kwargs): url(r'^(?P\d+)/', include(safe_for_all_meeting_types)), url(r'^(?Pinterim-[a-z0-9-]+)/', include(safe_for_all_meeting_types)), ] - diff --git a/ietf/secr/sreq/views.py b/ietf/meeting/views_session_request.py similarity index 80% rename from ietf/secr/sreq/views.py rename to ietf/meeting/views_session_request.py index eb93168e1c..a1ef74f1b8 100644 --- a/ietf/secr/sreq/views.py +++ b/ietf/meeting/views_session_request.py @@ -1,29 +1,26 @@ -# Copyright The IETF Trust 2013-2022, All Rights Reserved +# Copyright The IETF Trust 2007-2025, All Rights Reserved # -*- coding: utf-8 -*- - import datetime import inflect from collections import defaultdict, OrderedDict from django.conf import settings from django.contrib import messages +from django.core.exceptions import ObjectDoesNotExist from django.db.models import Q from django.shortcuts import render, get_object_or_404, redirect from django.http import Http404 -import debug # pyflakes:ignore - from ietf.group.models import Group, GroupFeatures from ietf.ietfauth.utils import has_role, role_required -from ietf.meeting.models import Meeting, Session, Constraint, ResourceAssociation, SchedulingEvent from ietf.meeting.helpers import get_meeting +from ietf.meeting.models import Session, Meeting, Constraint, ResourceAssociation, SchedulingEvent from ietf.meeting.utils import add_event_info_to_session_qs -from ietf.name.models import SessionStatusName, ConstraintName -from ietf.secr.sreq.forms import (SessionForm, ToolStatusForm, allowed_conflicting_groups, +from ietf.meeting.forms import (SessionRequestStatusForm, SessionRequestForm, allowed_conflicting_groups, JOINT_FOR_SESSION_CHOICES) +from ietf.name.models import SessionStatusName, ConstraintName from ietf.secr.utils.decorators import check_permissions -from ietf.secr.utils.group import get_my_groups from ietf.utils.mail import send_mail from ietf.mailtrigger.utils import gather_address_lists @@ -31,12 +28,25 @@ # Globals # ------------------------------------------------- # TODO: This needs to be replaced with something that pays attention to groupfeatures -AUTHORIZED_ROLES=('WG Chair','WG Secretary','RG Chair','IAB Group Chair','Area Director','Secretariat','Team Chair','IRTF Chair','Program Chair','Program Lead','Program Secretary', 'EDWG Chair') +AUTHORIZED_ROLES = ( + 'WG Chair', + 'WG Secretary', + 'RG Chair', + 'IAB Group Chair', + 'Area Director', + 'Secretariat', + 'Team Chair', + 'IRTF Chair', + 'Program Chair', + 'Program Lead', + 'Program Secretary', + 'EDWG Chair') # ------------------------------------------------- # Helper Functions # ------------------------------------------------- + def check_app_locked(meeting=None): ''' This function returns True if the application is locked to non-secretariat users. @@ -45,6 +55,54 @@ def check_app_locked(meeting=None): meeting = get_meeting(days=14) return bool(meeting.session_request_lock_message) + +def get_lock_message(meeting=None): + ''' + Returns the message to display to non-secretariat users when the tool is locked. + ''' + if not meeting: + meeting = get_meeting(days=14) + return meeting.session_request_lock_message + + +def get_my_groups(user, conclude=False): + ''' + Takes a Django user object (from request) + Returns a list of groups the user has access to. Rules are as follows + secretariat - has access to all groups + area director - has access to all groups in their area + wg chair or secretary - has access to their own group + chair of irtf has access to all irtf groups + + If user=None than all groups are returned. + concluded=True means include concluded groups. Need this to upload materials for groups + after they've been concluded. it happens. + ''' + my_groups = set() + states = ['bof', 'proposed', 'active'] + if conclude: + states.extend(['conclude', 'bof-conc']) + + all_groups = Group.objects.filter(type__features__has_meetings=True, state__in=states).order_by('acronym') + if user is None or has_role(user, 'Secretariat'): + return all_groups + + try: + person = user.person + except ObjectDoesNotExist: + return list() + + for group in all_groups: + if group.role_set.filter(person=person, name__in=('chair', 'secr', 'ad')): + my_groups.add(group) + continue + if group.parent and group.parent.role_set.filter(person=person, name__in=('ad', 'chair')): + my_groups.add(group) + continue + + return list(my_groups) + + def get_initial_session(sessions, prune_conflicts=False): ''' This function takes a queryset of sessions ordered by 'id' for consistency. It returns @@ -97,13 +155,43 @@ def valid_conflict(conflict): initial['joint_for_session_display'] = dict(JOINT_FOR_SESSION_CHOICES)[initial['joint_for_session']] return initial -def get_lock_message(meeting=None): + +def inbound_session_conflicts_as_string(group, meeting): ''' - Returns the message to display to non-secretariat users when the tool is locked. + Takes a Group object and Meeting object and returns a string of other groups which have + a conflict with this one ''' - if not meeting: - meeting = get_meeting(days=14) - return meeting.session_request_lock_message + constraints = group.constraint_target_set.filter(meeting=meeting, name__is_group_conflict=True) + group_set = set(constraints.values_list('source__acronym', flat=True)) # set to de-dupe + group_list = sorted(group_set) # give a consistent order + return ', '.join(group_list) + + +def get_outbound_conflicts(form: SessionRequestForm): + """extract wg conflict constraint data from a SessionForm""" + outbound_conflicts = [] + for conflictname, cfield_id in form.wg_constraint_field_ids(): + conflict_groups = form.cleaned_data[cfield_id] + if len(conflict_groups) > 0: + outbound_conflicts.append(dict(name=conflictname, groups=conflict_groups)) + return outbound_conflicts + + +def save_conflicts(group, meeting, conflicts, name): + ''' + This function takes a Group, Meeting a string which is a list of Groups acronyms (conflicts), + and the constraint name (conflict|conflic2|conflic3) and creates Constraint records + ''' + constraint_name = ConstraintName.objects.get(slug=name) + acronyms = conflicts.replace(',',' ').split() + for acronym in acronyms: + target = Group.objects.get(acronym=acronym) + + constraint = Constraint(source=group, + target=target, + meeting=meeting, + name=constraint_name) + constraint.save() def get_requester_text(person, group): @@ -129,22 +217,6 @@ def get_requester_text(person, group): ) -def save_conflicts(group, meeting, conflicts, name): - ''' - This function takes a Group, Meeting a string which is a list of Groups acronyms (conflicts), - and the constraint name (conflict|conflic2|conflic3) and creates Constraint records - ''' - constraint_name = ConstraintName.objects.get(slug=name) - acronyms = conflicts.replace(',',' ').split() - for acronym in acronyms: - target = Group.objects.get(acronym=acronym) - - constraint = Constraint(source=group, - target=target, - meeting=meeting, - name=constraint_name) - constraint.save() - def send_notification(group, meeting, login, sreq_data, session_data, action): ''' This function generates email notifications for various session request activities. @@ -152,10 +224,10 @@ def send_notification(group, meeting, login, sreq_data, session_data, action): session_data is an array of data from individual session subforms action argument is a string [new|update]. ''' - (to_email, cc_list) = gather_address_lists('session_requested',group=group,person=login) + (to_email, cc_list) = gather_address_lists('session_requested', group=group, person=login) from_email = (settings.SESSION_REQUEST_FROM_EMAIL) subject = '%s - New Meeting Session Request for IETF %s' % (group.acronym, meeting.number) - template = 'sreq/session_request_notification.txt' + template = 'meeting/session_request_notification.txt' # send email context = {} @@ -164,7 +236,7 @@ def send_notification(group, meeting, login, sreq_data, session_data, action): context['meeting'] = meeting context['login'] = login context['header'] = 'A new' - context['requester'] = get_requester_text(login,group) + context['requester'] = get_requester_text(login, group) # update overrides if action == 'update': @@ -174,10 +246,10 @@ def send_notification(group, meeting, login, sreq_data, session_data, action): # if third session requested approval is required # change headers TO=ADs, CC=session-request, submitter and cochairs if len(session_data) > 2: - (to_email, cc_list) = gather_address_lists('session_requested_long',group=group,person=login) + (to_email, cc_list) = gather_address_lists('session_requested_long', group=group, person=login) subject = '%s - Request for meeting session approval for IETF %s' % (group.acronym, meeting.number) - template = 'sreq/session_approval_notification.txt' - #status_text = 'the %s Directors for approval' % group.parent + template = 'meeting/session_approval_notification.txt' + # status_text = 'the %s Directors for approval' % group.parent context['session_lengths'] = [sd['requested_duration'] for sd in session_data] @@ -189,103 +261,188 @@ def send_notification(group, meeting, login, sreq_data, session_data, action): context, cc=cc_list) -def inbound_session_conflicts_as_string(group, meeting): - ''' - Takes a Group object and Meeting object and returns a string of other groups which have - a conflict with this one - ''' - constraints = group.constraint_target_set.filter(meeting=meeting, name__is_group_conflict=True) - group_set = set(constraints.values_list('source__acronym', flat=True)) # set to de-dupe - group_list = sorted(group_set) # give a consistent order - return ', '.join(group_list) + +def session_changed(session): + latest_event = SchedulingEvent.objects.filter(session=session).order_by('-time', '-id').first() + + if latest_event and latest_event.status_id == "schedw" and session.meeting.schedule is not None: + # send an email to iesg-secretariat to alert to change + pass + + +def status_slug_for_new_session(session, session_number): + if session.group.features.acts_like_wg and session_number == 2: + return 'apprw' + return 'schedw' # ------------------------------------------------- # View Functions # ------------------------------------------------- -@check_permissions -def approve(request, acronym): + + +@role_required(*AUTHORIZED_ROLES) +def list_view(request): ''' - This view approves the third session. For use by ADs or Secretariat. + Display list of groups the user has access to. ''' meeting = get_meeting(days=14) - group = get_object_or_404(Group, acronym=acronym) - session = add_event_info_to_session_qs(Session.objects.filter(meeting=meeting, group=group)).filter(current_status='apprw').first() - if session is None: - raise Http404 + # check for locked flag + is_locked = check_app_locked() + if is_locked and not has_role(request.user, 'Secretariat'): + message = get_lock_message() + return render(request, 'meeting/session_request_locked.html', { + 'message': message, + 'meeting': meeting}) - if has_role(request.user,'Secretariat') or group.parent.role_set.filter(name='ad',person=request.user.person): - SchedulingEvent.objects.create( - session=session, - status=SessionStatusName.objects.get(slug='appr'), - by=request.user.person, - ) - session_changed(session) + scheduled_groups = [] + unscheduled_groups = [] - messages.success(request, 'Third session approved') - return redirect('ietf.secr.sreq.views.view', acronym=acronym) - else: - # if an unauthorized user gets here return error - messages.error(request, 'Not authorized to approve the third session') - return redirect('ietf.secr.sreq.views.view', acronym=acronym) + group_types = GroupFeatures.objects.filter(has_meetings=True).values_list('type', flat=True) -@check_permissions -def cancel(request, acronym): - ''' - This view cancels a session request and sends a notification. - To cancel, or withdraw the request set status = deleted. - "canceled" status is used by the secretariat. + my_groups = [g for g in get_my_groups(request.user, conclude=True) if g.type_id in group_types] - NOTE: this function can also be called after a session has been - scheduled during the period when the session request tool is - reopened. In this case be sure to clear the timeslot assignment as well. + sessions_by_group = defaultdict(list) + for s in add_event_info_to_session_qs(Session.objects.filter(meeting=meeting, group__in=my_groups)).filter(current_status__in=['schedw', 'apprw', 'appr', 'sched']): + sessions_by_group[s.group_id].append(s) + + for group in my_groups: + group.meeting_sessions = sessions_by_group.get(group.pk, []) + + if group.pk in sessions_by_group: + # include even if concluded as we need to to see that the + # sessions are there + scheduled_groups.append(group) + else: + if group.state_id not in ['conclude', 'bof-conc']: + # too late for unscheduled if concluded + unscheduled_groups.append(group) + + # warn if there are no associated groups + if not scheduled_groups and not unscheduled_groups: + messages.warning(request, 'The account %s is not associated with any groups. If you have multiple Datatracker accounts you may try another or report a problem to %s' % (request.user, settings.SECRETARIAT_ACTION_EMAIL)) + + # add session status messages for use in template + for group in scheduled_groups: + if not group.features.acts_like_wg or (len(group.meeting_sessions) < 3): + group.status_message = group.meeting_sessions[0].current_status + else: + group.status_message = 'First two sessions: %s, Third session: %s' % (group.meeting_sessions[0].current_status, group.meeting_sessions[2].current_status) + + # add not meeting indicators for use in template + for group in unscheduled_groups: + if any(s.current_status == 'notmeet' for s in group.meeting_sessions): + group.not_meeting = True + + return render(request, 'meeting/session_request_list.html', { + 'is_locked': is_locked, + 'meeting': meeting, + 'scheduled_groups': scheduled_groups, + 'unscheduled_groups': unscheduled_groups}, + ) + + +@role_required('Secretariat') +def status(request): + ''' + This view handles locking and unlocking of the session request tool to the public. ''' meeting = get_meeting(days=14) - group = get_object_or_404(Group, acronym=acronym) - sessions = Session.objects.filter(meeting=meeting,group=group).order_by('id') - login = request.user.person + is_locked = check_app_locked(meeting=meeting) - # delete conflicts - Constraint.objects.filter(meeting=meeting,source=group).delete() + if request.method == 'POST': + button_text = request.POST.get('submit', '') + if button_text == 'Back': + return redirect('ietf.meeting.views_session_request.list_view') - # mark sessions as deleted - for session in sessions: - SchedulingEvent.objects.create( - session=session, - status=SessionStatusName.objects.get(slug='deleted'), - by=request.user.person, - ) - session_changed(session) + form = SessionRequestStatusForm(request.POST) - # clear schedule assignments if already scheduled - session.timeslotassignments.all().delete() + if button_text == 'Lock': + if form.is_valid(): + meeting.session_request_lock_message = form.cleaned_data['message'] + meeting.save() + messages.success(request, 'Session Request Tool is now Locked') + return redirect('ietf.meeting.views_session_request.list_view') - # send notifitcation - (to_email, cc_list) = gather_address_lists('session_request_cancelled',group=group,person=login) - from_email = (settings.SESSION_REQUEST_FROM_EMAIL) - subject = '%s - Cancelling a meeting request for IETF %s' % (group.acronym, meeting.number) - send_mail(request, to_email, from_email, subject, 'sreq/session_cancel_notification.txt', - {'requester':get_requester_text(login,group), - 'meeting':meeting}, cc=cc_list) + elif button_text == 'Unlock': + meeting.session_request_lock_message = '' + meeting.save() + messages.success(request, 'Session Request Tool is now Unlocked') + return redirect('ietf.meeting.views_session_request.list_view') - messages.success(request, 'The %s Session Request has been cancelled' % group.acronym) - return redirect('ietf.secr.sreq.views.main') + else: + if is_locked: + message = get_lock_message() + initial = {'message': message} + form = SessionRequestStatusForm(initial=initial) + else: + form = SessionRequestStatusForm() + return render(request, 'meeting/session_request_status.html', { + 'is_locked': is_locked, + 'form': form}, + ) -def status_slug_for_new_session(session, session_number): - if session.group.features.acts_like_wg and session_number == 2: - return 'apprw' - return 'schedw' +@check_permissions +def new_request(request, acronym): + ''' + This view gathers details for a new session request. The user proceeds to confirm() + to create the request. + ''' + group = get_object_or_404(Group, acronym=acronym) + if len(group.features.session_purposes) == 0: + raise Http404(f'Cannot request sessions for group "{acronym}"') + meeting = get_meeting(days=14) + session_conflicts = dict(inbound=inbound_session_conflicts_as_string(group, meeting)) -def get_outbound_conflicts(form: SessionForm): - """extract wg conflict constraint data from a SessionForm""" - outbound_conflicts = [] - for conflictname, cfield_id in form.wg_constraint_field_ids(): - conflict_groups = form.cleaned_data[cfield_id] - if len(conflict_groups) > 0: - outbound_conflicts.append(dict(name=conflictname, groups=conflict_groups)) - return outbound_conflicts + # check if app is locked + is_locked = check_app_locked() + if is_locked and not has_role(request.user, 'Secretariat'): + messages.warning(request, "The Session Request Tool is closed") + return redirect('ietf.meeting.views_session_request.list_view') + + if request.method == 'POST': + button_text = request.POST.get('submit', '') + if button_text == 'Cancel': + return redirect('ietf.meeting.views_session_request.list_view') + + form = SessionRequestForm(group, meeting, request.POST, notifications_optional=has_role(request.user, "Secretariat")) + if form.is_valid(): + return confirm(request, acronym) + + # the "previous" querystring causes the form to be returned + # pre-populated with data from last meeeting's session request + elif request.method == 'GET' and 'previous' in request.GET: + latest_session = add_event_info_to_session_qs(Session.objects.filter(meeting__type_id='ietf', group=group)).exclude(current_status__in=['notmeet', 'deleted', 'canceled',]).order_by('-meeting__date').first() + if latest_session: + previous_meeting = Meeting.objects.get(number=latest_session.meeting.number) + previous_sessions = add_event_info_to_session_qs(Session.objects.filter(meeting=previous_meeting, group=group)).exclude(current_status__in=['notmeet', 'deleted']).order_by('id') + if not previous_sessions: + messages.warning(request, 'This group did not meet at %s' % previous_meeting) + return redirect('ietf.meeting.views_session_request.new_request', acronym=acronym) + else: + messages.info(request, 'Fetched session info from %s' % previous_meeting) + else: + messages.warning(request, 'Did not find any previous meeting') + return redirect('ietf.meeting.views_session_request.new_request', acronym=acronym) + + initial = get_initial_session(previous_sessions, prune_conflicts=True) + if 'resources' in initial: + initial['resources'] = [x.pk for x in initial['resources']] + form = SessionRequestForm(group, meeting, initial=initial, notifications_optional=has_role(request.user, "Secretariat")) + + else: + initial = {} + form = SessionRequestForm(group, meeting, initial=initial, notifications_optional=has_role(request.user, "Secretariat")) + + return render(request, 'meeting/session_request_form.html', { + 'meeting': meeting, + 'form': form, + 'group': group, + 'is_create': True, + 'session_conflicts': session_conflicts}, + ) @role_required(*AUTHORIZED_ROLES) @@ -295,11 +452,11 @@ def confirm(request, acronym): to confirm for submission. ''' # FIXME: this should be using form.is_valid/form.cleaned_data - invalid input will make it crash - group = get_object_or_404(Group,acronym=acronym) + group = get_object_or_404(Group, acronym=acronym) if len(group.features.session_purposes) == 0: raise Http404(f'Cannot request sessions for group "{acronym}"') meeting = get_meeting(days=14) - form = SessionForm(group, meeting, request.POST, hidden=True, notifications_optional=has_role(request.user, "Secretariat")) + form = SessionRequestForm(group, meeting, request.POST, hidden=True, notifications_optional=has_role(request.user, "Secretariat")) form.is_valid() login = request.user.person @@ -307,8 +464,8 @@ def confirm(request, acronym): # check if request already exists for this group if add_event_info_to_session_qs(Session.objects.filter(group=group, meeting=meeting)).filter(Q(current_status__isnull=True) | ~Q(current_status__in=['deleted', 'notmeet'])): messages.warning(request, 'Sessions for working group %s have already been requested once.' % group.acronym) - return redirect('ietf.secr.sreq.views.main') - + return redirect('ietf.meeting.views_session_request.list_view') + session_data = form.data.copy() # use cleaned_data for the 'bethere' field so we get the Person instances session_data['bethere'] = form.cleaned_data['bethere'] if 'bethere' in form.cleaned_data else [] @@ -318,7 +475,7 @@ def confirm(request, acronym): session_data['joint_for_session_display'] = dict(JOINT_FOR_SESSION_CHOICES)[session_data['joint_for_session']] if form.cleaned_data.get('timeranges'): session_data['timeranges_display'] = [t.desc for t in form.cleaned_data['timeranges']] - session_data['resources'] = [ ResourceAssociation.objects.get(pk=pk) for pk in request.POST.getlist('resources') ] + session_data['resources'] = [ResourceAssociation.objects.get(pk=pk) for pk in request.POST.getlist('resources')] # extract wg conflict constraint data for the view / notifications outbound_conflicts = get_outbound_conflicts(form) @@ -326,7 +483,7 @@ def confirm(request, acronym): button_text = request.POST.get('submit', '') if button_text == 'Cancel': messages.success(request, 'Session Request has been cancelled') - return redirect('ietf.secr.sreq.views.main') + return redirect('ietf.meeting.views_session_request.list_view') if request.method == 'POST' and button_text == 'Submit': # delete any existing session records with status = canceled or notmeet @@ -344,10 +501,10 @@ def confirm(request, acronym): if 'resources' in form.data: new_session.resources.set(session_data['resources']) jfs = form.data.get('joint_for_session', '-1') - if not jfs: # jfs might be '' + if not jfs: # jfs might be '' jfs = '-1' if int(jfs) == count + 1: # count is zero-indexed - groups_split = form.cleaned_data.get('joint_with_groups').replace(',',' ').split() + groups_split = form.cleaned_data.get('joint_with_groups').replace(',', ' ').split() joint = Group.objects.filter(acronym__in=groups_split) new_session.joint_with_groups.set(joint) new_session.save() @@ -388,36 +545,105 @@ def confirm(request, acronym): 'new', ) - status_text = 'IETF Agenda to be scheduled' - messages.success(request, 'Your request has been sent to %s' % status_text) - return redirect('ietf.secr.sreq.views.main') + status_text = 'IETF Agenda to be scheduled' + messages.success(request, 'Your request has been sent to %s' % status_text) + return redirect('ietf.meeting.views_session_request.list_view') + + # POST from request submission + session_conflicts = dict( + outbound=outbound_conflicts, # each is a dict with name and groups as keys + inbound=inbound_session_conflicts_as_string(group, meeting), + ) + if form.cleaned_data.get('third_session'): + messages.warning(request, 'Note: Your request for a third session must be approved by an area director before being submitted to agenda@ietf.org. Click "Submit" below to email an approval request to the area directors') + + return render(request, 'meeting/session_request_confirm.html', { + 'form': form, + 'session': session_data, + 'group': group, + 'meeting': meeting, + 'session_conflicts': session_conflicts}, + ) + + +@role_required(*AUTHORIZED_ROLES) +def view_request(request, acronym, num=None): + ''' + This view displays the session request info + ''' + meeting = get_meeting(num, days=14) + group = get_object_or_404(Group, acronym=acronym) + query = Session.objects.filter(meeting=meeting, group=group) + status_is_null = Q(current_status__isnull=True) + status_allowed = ~Q(current_status__in=("canceled", "notmeet", "deleted")) + sessions = ( + add_event_info_to_session_qs(query) + .filter(status_is_null | status_allowed) + .order_by("id") + ) + + # check if app is locked + is_locked = check_app_locked() + if is_locked: + messages.warning(request, "The Session Request Tool is closed") + + # if there are no session requests yet, redirect to new session request page + if not sessions: + if is_locked: + return redirect('ietf.meeting.views_session_request.list_view') + else: + return redirect('ietf.meeting.views_session_request.new_request', acronym=acronym) + + activities = [{ + 'act_date': e.time.strftime('%b %d, %Y'), + 'act_time': e.time.strftime('%H:%M:%S'), + 'activity': e.status.name, + 'act_by': e.by, + } for e in sessions[0].schedulingevent_set.select_related('status', 'by')] + + # gather outbound conflicts + outbound_dict = OrderedDict() + for obc in group.constraint_source_set.filter(meeting=meeting, name__is_group_conflict=True): + if obc.name.slug not in outbound_dict: + outbound_dict[obc.name.slug] = [] + outbound_dict[obc.name.slug].append(obc.target.acronym) - # POST from request submission session_conflicts = dict( - outbound=outbound_conflicts, # each is a dict with name and groups as keys inbound=inbound_session_conflicts_as_string(group, meeting), + outbound=[dict(name=ConstraintName.objects.get(slug=slug), groups=' '.join(groups)) + for slug, groups in outbound_dict.items()], ) - return render(request, 'sreq/confirm.html', { - 'form': form, - 'session': session_data, + + show_approve_button = False + + # if sessions include a 3rd session waiting approval and the user is a secretariat or AD of the group + # display approve button + if any(s.current_status == 'apprw' for s in sessions): + if has_role(request.user, 'Secretariat') or group.parent.role_set.filter(name='ad', person=request.user.person): + show_approve_button = True + + # build session dictionary (like querydict from new session request form) for use in template + session = get_initial_session(sessions) + + return render(request, 'meeting/session_request_view.html', { + 'can_edit': (not is_locked) or has_role(request.user, 'Secretariat'), + 'can_cancel': (not is_locked) or has_role(request.user, 'Secretariat'), + 'session': session, # legacy processed data + 'sessions': sessions, # actual session instances + 'activities': activities, + 'meeting': meeting, 'group': group, - 'session_conflicts': session_conflicts}, + 'session_conflicts': session_conflicts, + 'show_approve_button': show_approve_button}, ) - -def session_changed(session): - latest_event = SchedulingEvent.objects.filter(session=session).order_by('-time', '-id').first() - - if latest_event and latest_event.status_id == "schedw" and session.meeting.schedule != None: - # send an email to iesg-secretariat to alert to change - pass @check_permissions -def edit(request, acronym, num=None): +def edit_request(request, acronym, num=None): ''' This view allows the user to edit details of the session request ''' - meeting = get_meeting(num,days=14) + meeting = get_meeting(num, days=14) group = get_object_or_404(Group, acronym=acronym) if len(group.features.session_purposes) == 0: raise Http404(f'Cannot request sessions for group "{acronym}"') @@ -443,15 +669,15 @@ def edit(request, acronym, num=None): login = request.user.person first_session = Session() - if(len(sessions) > 0): + if (len(sessions) > 0): first_session = sessions[0] if request.method == 'POST': button_text = request.POST.get('submit', '') if button_text == 'Cancel': - return redirect('ietf.secr.sreq.views.view', acronym=acronym) + return redirect('ietf.meeting.views_session_request.view_request', acronym=acronym) - form = SessionForm(group, meeting, request.POST, initial=initial, notifications_optional=has_role(request.user, "Secretariat")) + form = SessionRequestForm(group, meeting, request.POST, initial=initial, notifications_optional=has_role(request.user, "Secretariat")) if form.is_valid(): if form.has_changed(): changed_session_forms = [sf for sf in form.session_forms.forms_to_keep if sf.has_changed()] @@ -513,11 +739,11 @@ def edit(request, acronym, num=None): if 'resources' in form.changed_data: new_resource_ids = form.cleaned_data['resources'] - new_resources = [ ResourceAssociation.objects.get(pk=a) - for a in new_resource_ids] + new_resources = [ResourceAssociation.objects.get(pk=a) + for a in new_resource_ids] first_session.resources = new_resources - if 'bethere' in form.changed_data and set(form.cleaned_data['bethere'])!=set(initial['bethere']): + if 'bethere' in form.changed_data and set(form.cleaned_data['bethere']) != set(initial['bethere']): first_session.constraints().filter(name='bethere').delete() bethere_cn = ConstraintName.objects.get(slug='bethere') for p in form.cleaned_data['bethere']: @@ -539,7 +765,7 @@ def edit(request, acronym, num=None): # deprecated # log activity - #add_session_activity(group,'Session Request was updated',meeting,user) + # add_session_activity(group,'Session Request was updated',meeting,user) # send notification if form.cleaned_data.get("send_notifications"): @@ -556,7 +782,7 @@ def edit(request, acronym, num=None): ) messages.success(request, 'Session Request updated') - return redirect('ietf.secr.sreq.views.view', acronym=acronym) + return redirect('ietf.meeting.views_session_request.view_request', acronym=acronym) else: # method is not POST # gather outbound conflicts for initial value @@ -567,142 +793,46 @@ def edit(request, acronym, num=None): initial['constraint_{}'.format(slug)] = ' '.join(groups) if not sessions: - return redirect('ietf.secr.sreq.views.new', acronym=acronym) - form = SessionForm(group, meeting, initial=initial, notifications_optional=has_role(request.user, "Secretariat")) + return redirect('ietf.meeting.views_session_request.new_request', acronym=acronym) + form = SessionRequestForm(group, meeting, initial=initial, notifications_optional=has_role(request.user, "Secretariat")) - return render(request, 'sreq/edit.html', { - 'is_locked': is_locked and not has_role(request.user,'Secretariat'), + return render(request, 'meeting/session_request_form.html', { + 'is_locked': is_locked and not has_role(request.user, 'Secretariat'), 'meeting': meeting, 'form': form, 'group': group, + 'is_create': False, 'session_conflicts': session_conflicts}, ) -@role_required(*AUTHORIZED_ROLES) -def main(request): - ''' - Display list of groups the user has access to. - - Template variables - form: a select box populated with unscheduled groups - meeting: the current meeting - scheduled_sessions: - ''' - # check for locked flag - is_locked = check_app_locked() - - if is_locked and not has_role(request.user,'Secretariat'): - message = get_lock_message() - return render(request, 'sreq/locked.html', { - 'message': message}, - ) - - meeting = get_meeting(days=14) - - scheduled_groups = [] - unscheduled_groups = [] - - group_types = GroupFeatures.objects.filter(has_meetings=True).values_list('type', flat=True) - - my_groups = [g for g in get_my_groups(request.user, conclude=True) if g.type_id in group_types] - - sessions_by_group = defaultdict(list) - for s in add_event_info_to_session_qs(Session.objects.filter(meeting=meeting, group__in=my_groups)).filter(current_status__in=['schedw', 'apprw', 'appr', 'sched']): - sessions_by_group[s.group_id].append(s) - - for group in my_groups: - group.meeting_sessions = sessions_by_group.get(group.pk, []) - - if group.pk in sessions_by_group: - # include even if concluded as we need to to see that the - # sessions are there - scheduled_groups.append(group) - else: - if group.state_id not in ['conclude', 'bof-conc']: - # too late for unscheduled if concluded - unscheduled_groups.append(group) - - # warn if there are no associated groups - if not scheduled_groups and not unscheduled_groups: - messages.warning(request, 'The account %s is not associated with any groups. If you have multiple Datatracker accounts you may try another or report a problem to %s' % (request.user, settings.SECRETARIAT_ACTION_EMAIL)) - - # add session status messages for use in template - for group in scheduled_groups: - if not group.features.acts_like_wg or (len(group.meeting_sessions) < 3): - group.status_message = group.meeting_sessions[0].current_status - else: - group.status_message = 'First two sessions: %s, Third session: %s' % (group.meeting_sessions[0].current_status, group.meeting_sessions[2].current_status) - - # add not meeting indicators for use in template - for group in unscheduled_groups: - if any(s.current_status == 'notmeet' for s in group.meeting_sessions): - group.not_meeting = True - - return render(request, 'sreq/main.html', { - 'is_locked': is_locked, - 'meeting': meeting, - 'scheduled_groups': scheduled_groups, - 'unscheduled_groups': unscheduled_groups}, - ) @check_permissions -def new(request, acronym): +def approve_request(request, acronym): ''' - This view gathers details for a new session request. The user proceeds to confirm() - to create the request. + This view approves the third session. For use by ADs or Secretariat. ''' - group = get_object_or_404(Group, acronym=acronym) - if len(group.features.session_purposes) == 0: - raise Http404(f'Cannot request sessions for group "{acronym}"') meeting = get_meeting(days=14) - session_conflicts = dict(inbound=inbound_session_conflicts_as_string(group, meeting)) - - # check if app is locked - is_locked = check_app_locked() - if is_locked and not has_role(request.user,'Secretariat'): - messages.warning(request, "The Session Request Tool is closed") - return redirect('ietf.secr.sreq.views.main') - - if request.method == 'POST': - button_text = request.POST.get('submit', '') - if button_text == 'Cancel': - return redirect('ietf.secr.sreq.views.main') - - form = SessionForm(group, meeting, request.POST, notifications_optional=has_role(request.user, "Secretariat")) - if form.is_valid(): - return confirm(request, acronym) + group = get_object_or_404(Group, acronym=acronym) - # the "previous" querystring causes the form to be returned - # pre-populated with data from last meeeting's session request - elif request.method == 'GET' and 'previous' in request.GET: - latest_session = add_event_info_to_session_qs(Session.objects.filter(meeting__type_id='ietf', group=group)).exclude(current_status__in=['notmeet', 'deleted', 'canceled',]).order_by('-meeting__date').first() - if latest_session: - previous_meeting = Meeting.objects.get(number=latest_session.meeting.number) - previous_sessions = add_event_info_to_session_qs(Session.objects.filter(meeting=previous_meeting, group=group)).exclude(current_status__in=['notmeet', 'deleted']).order_by('id') - if not previous_sessions: - messages.warning(request, 'This group did not meet at %s' % previous_meeting) - return redirect('ietf.secr.sreq.views.new', acronym=acronym) - else: - messages.info(request, 'Fetched session info from %s' % previous_meeting) - else: - messages.warning(request, 'Did not find any previous meeting') - return redirect('ietf.secr.sreq.views.new', acronym=acronym) + session = add_event_info_to_session_qs(Session.objects.filter(meeting=meeting, group=group)).filter(current_status='apprw').first() + if session is None: + raise Http404 - initial = get_initial_session(previous_sessions, prune_conflicts=True) - if 'resources' in initial: - initial['resources'] = [x.pk for x in initial['resources']] - form = SessionForm(group, meeting, initial=initial, notifications_optional=has_role(request.user, "Secretariat")) + if has_role(request.user, 'Secretariat') or group.parent.role_set.filter(name='ad', person=request.user.person): + SchedulingEvent.objects.create( + session=session, + status=SessionStatusName.objects.get(slug='appr'), + by=request.user.person, + ) + session_changed(session) + messages.success(request, 'Third session approved') + return redirect('ietf.meeting.views_session_request.view_request', acronym=acronym) else: - initial={} - form = SessionForm(group, meeting, initial=initial, notifications_optional=has_role(request.user, "Secretariat")) + # if an unauthorized user gets here return error + messages.error(request, 'Not authorized to approve the third session') + return redirect('ietf.meeting.views_session_request.view_request', acronym=acronym) - return render(request, 'sreq/new.html', { - 'meeting': meeting, - 'form': form, - 'group': group, - 'session_conflicts': session_conflicts}, - ) @check_permissions def no_session(request, acronym): @@ -722,7 +852,7 @@ def no_session(request, acronym): # skip if state is already notmeet if add_event_info_to_session_qs(Session.objects.filter(group=group, meeting=meeting)).filter(current_status='notmeet'): messages.info(request, 'The group %s is already marked as not meeting' % group.acronym) - return redirect('ietf.secr.sreq.views.main') + return redirect('ietf.meeting.views_session_request.list_view') session = Session.objects.create( group=group, @@ -740,125 +870,62 @@ def no_session(request, acronym): session_changed(session) # send notification - (to_email, cc_list) = gather_address_lists('session_request_not_meeting',group=group,person=login) + (to_email, cc_list) = gather_address_lists('session_request_not_meeting', group=group, person=login) from_email = (settings.SESSION_REQUEST_FROM_EMAIL) subject = '%s - Not having a session at IETF %s' % (group.acronym, meeting.number) - send_mail(request, to_email, from_email, subject, 'sreq/not_meeting_notification.txt', - {'login':login, - 'group':group, - 'meeting':meeting}, cc=cc_list) + send_mail(request, to_email, from_email, subject, 'meeting/session_not_meeting_notification.txt', + {'login': login, + 'group': group, + 'meeting': meeting}, cc=cc_list) # deprecated? # log activity - #text = 'A message was sent to notify not having a session at IETF %d' % meeting.meeting_num - #add_session_activity(group,text,meeting,request.person) + # text = 'A message was sent to notify not having a session at IETF %d' % meeting.meeting_num + # add_session_activity(group,text,meeting,request.person) # redirect messages.success(request, 'A message was sent to notify not having a session at IETF %s' % meeting.number) - return redirect('ietf.secr.sreq.views.main') - -@role_required('Secretariat') -def tool_status(request): - ''' - This view handles locking and unlocking of the tool to the public. - ''' - meeting = get_meeting(days=14) - is_locked = check_app_locked(meeting=meeting) - - if request.method == 'POST': - button_text = request.POST.get('submit', '') - if button_text == 'Back': - return redirect('ietf.secr.sreq.views.main') - - form = ToolStatusForm(request.POST) - - if button_text == 'Lock': - if form.is_valid(): - meeting.session_request_lock_message = form.cleaned_data['message'] - meeting.save() - messages.success(request, 'Session Request Tool is now Locked') - return redirect('ietf.secr.sreq.views.main') - - elif button_text == 'Unlock': - meeting.session_request_lock_message = '' - meeting.save() - messages.success(request, 'Session Request Tool is now Unlocked') - return redirect('ietf.secr.sreq.views.main') - - else: - if is_locked: - message = get_lock_message() - initial = {'message': message} - form = ToolStatusForm(initial=initial) - else: - form = ToolStatusForm() + return redirect('ietf.meeting.views_session_request.list_view') - return render(request, 'sreq/tool_status.html', { - 'is_locked': is_locked, - 'form': form}, - ) -@role_required(*AUTHORIZED_ROLES) -def view(request, acronym, num = None): +@check_permissions +def cancel_request(request, acronym): ''' - This view displays the session request info + This view cancels a session request and sends a notification. + To cancel, or withdraw the request set status = deleted. + "canceled" status is used by the secretariat. + + NOTE: this function can also be called after a session has been + scheduled during the period when the session request tool is + reopened. In this case be sure to clear the timeslot assignment as well. ''' - meeting = get_meeting(num,days=14) + meeting = get_meeting(days=14) group = get_object_or_404(Group, acronym=acronym) - sessions = add_event_info_to_session_qs(Session.objects.filter(meeting=meeting, group=group)).filter(Q(current_status__isnull=True) | ~Q(current_status__in=('canceled','notmeet','deleted'))).order_by('id') - - # check if app is locked - is_locked = check_app_locked() - if is_locked: - messages.warning(request, "The Session Request Tool is closed") - - # if there are no session requests yet, redirect to new session request page - if not sessions: - if is_locked: - return redirect('ietf.secr.sreq.views.main') - else: - return redirect('ietf.secr.sreq.views.new', acronym=acronym) - - activities = [{ - 'act_date': e.time.strftime('%b %d, %Y'), - 'act_time': e.time.strftime('%H:%M:%S'), - 'activity': e.status.name, - 'act_by': e.by, - } for e in sessions[0].schedulingevent_set.select_related('status', 'by')] - - # gather outbound conflicts - outbound_dict = OrderedDict() - for obc in group.constraint_source_set.filter(meeting=meeting, name__is_group_conflict=True): - if obc.name.slug not in outbound_dict: - outbound_dict[obc.name.slug] = [] - outbound_dict[obc.name.slug].append(obc.target.acronym) - - session_conflicts = dict( - inbound=inbound_session_conflicts_as_string(group, meeting), - outbound=[dict(name=ConstraintName.objects.get(slug=slug), groups=' '.join(groups)) - for slug, groups in outbound_dict.items()], - ) + sessions = Session.objects.filter(meeting=meeting, group=group).order_by('id') + login = request.user.person - show_approve_button = False + # delete conflicts + Constraint.objects.filter(meeting=meeting, source=group).delete() - # if sessions include a 3rd session waiting approval and the user is a secretariat or AD of the group - # display approve button - if any(s.current_status == 'apprw' for s in sessions): - if has_role(request.user,'Secretariat') or group.parent.role_set.filter(name='ad',person=request.user.person): - show_approve_button = True + # mark sessions as deleted + for session in sessions: + SchedulingEvent.objects.create( + session=session, + status=SessionStatusName.objects.get(slug='deleted'), + by=request.user.person, + ) + session_changed(session) - # build session dictionary (like querydict from new session request form) for use in template - session = get_initial_session(sessions) + # clear schedule assignments if already scheduled + session.timeslotassignments.all().delete() - return render(request, 'sreq/view.html', { - 'can_edit': (not is_locked) or has_role(request.user, 'Secretariat'), - 'can_cancel': (not is_locked) or has_role(request.user, 'Secretariat'), - 'session': session, # legacy processed data - 'sessions': sessions, # actual session instances - 'activities': activities, - 'meeting': meeting, - 'group': group, - 'session_conflicts': session_conflicts, - 'show_approve_button': show_approve_button}, - ) + # send notifitcation + (to_email, cc_list) = gather_address_lists('session_request_cancelled', group=group, person=login) + from_email = (settings.SESSION_REQUEST_FROM_EMAIL) + subject = '%s - Cancelling a meeting request for IETF %s' % (group.acronym, meeting.number) + send_mail(request, to_email, from_email, subject, 'meeting/session_cancel_notification.txt', + {'requester': get_requester_text(login, group), + 'meeting': meeting}, cc=cc_list) + messages.success(request, 'The %s Session Request has been cancelled' % group.acronym) + return redirect('ietf.meeting.views_session_request.list_view') diff --git a/ietf/secr/meetings/views.py b/ietf/secr/meetings/views.py index 47f7b7ffa5..1f6f2f3297 100644 --- a/ietf/secr/meetings/views.py +++ b/ietf/secr/meetings/views.py @@ -1,4 +1,4 @@ -# Copyright The IETF Trust 2007-2023, All Rights Reserved +# Copyright The IETF Trust 2007-2025, All Rights Reserved # -*- coding: utf-8 -*- import datetime @@ -20,12 +20,12 @@ from ietf.meeting.helpers import make_materials_directories, populate_important_dates from ietf.meeting.models import Meeting, Session, Room, TimeSlot, SchedTimeSessAssignment, Schedule, SchedulingEvent from ietf.meeting.utils import add_event_info_to_session_qs +from ietf.meeting.views_session_request import get_initial_session from ietf.name.models import SessionStatusName from ietf.group.models import Group, GroupEvent from ietf.secr.meetings.forms import ( BaseMeetingRoomFormSet, MeetingModelForm, MeetingSelectForm, MeetingRoomForm, MiscSessionForm, TimeSlotForm, RegularSessionEditForm, MeetingRoomOptionsForm ) -from ietf.secr.sreq.views import get_initial_session from ietf.secr.utils.meeting import get_session, get_timeslot from ietf.mailtrigger.utils import gather_address_lists from ietf.utils.timezone import make_aware diff --git a/ietf/secr/sreq/__init__.py b/ietf/secr/sreq/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/ietf/secr/sreq/forms.py b/ietf/secr/sreq/forms.py deleted file mode 100644 index 4a0f449b2a..0000000000 --- a/ietf/secr/sreq/forms.py +++ /dev/null @@ -1,333 +0,0 @@ -# Copyright The IETF Trust 2013-2022, All Rights Reserved -# -*- coding: utf-8 -*- - - -from django import forms -from django.template.defaultfilters import pluralize - -import debug # pyflakes:ignore - -from ietf.name.models import TimerangeName, ConstraintName -from ietf.group.models import Group -from ietf.meeting.forms import sessiondetailsformset_factory -from ietf.meeting.models import ResourceAssociation, Constraint -from ietf.person.fields import SearchablePersonsField -from ietf.person.models import Person -from ietf.utils.fields import ModelMultipleChoiceField -from ietf.utils.html import clean_text_field -from ietf.utils import log - -# ------------------------------------------------- -# Globals -# ------------------------------------------------- - -NUM_SESSION_CHOICES = (('','--Please select'),('1','1'),('2','2')) -SESSION_TIME_RELATION_CHOICES = (('', 'No preference'),) + Constraint.TIME_RELATION_CHOICES -JOINT_FOR_SESSION_CHOICES = (('1', 'First session'), ('2', 'Second session'), ('3', 'Third session'), ) - -# ------------------------------------------------- -# Helper Functions -# ------------------------------------------------- -def allowed_conflicting_groups(): - return Group.objects.filter(type__in=['wg', 'ag', 'rg', 'rag', 'program', 'edwg'], state__in=['bof', 'proposed', 'active']) - -def check_conflict(groups, source_group): - ''' - Takes a string which is a list of group acronyms. Checks that they are all active groups - ''' - # convert to python list (allow space or comma separated lists) - items = groups.replace(',',' ').split() - active_groups = allowed_conflicting_groups() - for group in items: - if group == source_group.acronym: - raise forms.ValidationError("Cannot declare a conflict with the same group: %s" % group) - - if not active_groups.filter(acronym=group): - raise forms.ValidationError("Invalid or inactive group acronym: %s" % group) - -# ------------------------------------------------- -# Forms -# ------------------------------------------------- - -class GroupSelectForm(forms.Form): - group = forms.ChoiceField() - - def __init__(self,*args,**kwargs): - choices = kwargs.pop('choices') - super(GroupSelectForm, self).__init__(*args,**kwargs) - self.fields['group'].widget.choices = choices - - -class NameModelMultipleChoiceField(ModelMultipleChoiceField): - def label_from_instance(self, name): - return name.desc - - -class SessionForm(forms.Form): - num_session = forms.ChoiceField(choices=NUM_SESSION_CHOICES) - # session fields are added in __init__() - session_time_relation = forms.ChoiceField(choices=SESSION_TIME_RELATION_CHOICES, required=False) - attendees = forms.IntegerField() - # FIXME: it would cleaner to have these be - # ModelMultipleChoiceField, and just customize the widgetry, that - # way validation comes for free (applies to this CharField and the - # constraints dynamically instantiated in __init__()) - joint_with_groups = forms.CharField(max_length=255,required=False) - joint_with_groups_selector = forms.ChoiceField(choices=[], required=False) # group select widget for prev field - joint_for_session = forms.ChoiceField(choices=JOINT_FOR_SESSION_CHOICES, required=False) - comments = forms.CharField(max_length=200,required=False) - third_session = forms.BooleanField(required=False) - resources = forms.MultipleChoiceField(widget=forms.CheckboxSelectMultiple,required=False) - bethere = SearchablePersonsField(label="Must be present", required=False) - timeranges = NameModelMultipleChoiceField(widget=forms.CheckboxSelectMultiple, required=False, - queryset=TimerangeName.objects.all()) - adjacent_with_wg = forms.ChoiceField(required=False) - send_notifications = forms.BooleanField(label="Send notification emails?", required=False, initial=False) - - def __init__(self, group, meeting, data=None, *args, **kwargs): - self.hidden = kwargs.pop('hidden', False) - self.notifications_optional = kwargs.pop('notifications_optional', False) - - self.group = group - formset_class = sessiondetailsformset_factory(max_num=3 if group.features.acts_like_wg else 50) - self.session_forms = formset_class(group=self.group, meeting=meeting, data=data) - super(SessionForm, self).__init__(data=data, *args, **kwargs) - if not self.notifications_optional: - self.fields['send_notifications'].widget = forms.HiddenInput() - - # Allow additional sessions for non-wg-like groups - if not self.group.features.acts_like_wg: - self.fields['num_session'].choices = ((n, str(n)) for n in range(1, 51)) - - self.fields['comments'].widget = forms.Textarea(attrs={'rows':'3','cols':'65'}) - - other_groups = list(allowed_conflicting_groups().exclude(pk=group.pk).values_list('acronym', 'acronym').order_by('acronym')) - self.fields['adjacent_with_wg'].choices = [('', '--No preference')] + other_groups - group_acronym_choices = [('','--Select WG(s)')] + other_groups - self.fields['joint_with_groups_selector'].choices = group_acronym_choices - - # Set up constraints for the meeting - self._wg_field_data = [] - for constraintname in meeting.group_conflict_types.all(): - # two fields for each constraint: a CharField for the group list and a selector to add entries - constraint_field = forms.CharField(max_length=255, required=False) - constraint_field.widget.attrs['data-slug'] = constraintname.slug - constraint_field.widget.attrs['data-constraint-name'] = str(constraintname).title() - self._add_widget_class(constraint_field.widget, 'wg_constraint') - - selector_field = forms.ChoiceField(choices=group_acronym_choices, required=False) - selector_field.widget.attrs['data-slug'] = constraintname.slug # used by onchange handler - self._add_widget_class(selector_field.widget, 'wg_constraint_selector') - - cfield_id = 'constraint_{}'.format(constraintname.slug) - cselector_id = 'wg_selector_{}'.format(constraintname.slug) - # keep an eye out for field name conflicts - log.assertion('cfield_id not in self.fields') - log.assertion('cselector_id not in self.fields') - self.fields[cfield_id] = constraint_field - self.fields[cselector_id] = selector_field - self._wg_field_data.append((constraintname, cfield_id, cselector_id)) - - # Show constraints that are not actually used by the meeting so these don't get lost - self._inactive_wg_field_data = [] - inactive_cnames = ConstraintName.objects.filter( - is_group_conflict=True # Only collect group conflicts... - ).exclude( - meeting=meeting # ...that are not enabled for this meeting... - ).filter( - constraint__source=group, # ...but exist for this group... - constraint__meeting=meeting, # ... at this meeting. - ).distinct() - - for inactive_constraint_name in inactive_cnames: - field_id = 'delete_{}'.format(inactive_constraint_name.slug) - self.fields[field_id] = forms.BooleanField(required=False, label='Delete this conflict', help_text='Delete this inactive conflict?') - constraints = group.constraint_source_set.filter(meeting=meeting, name=inactive_constraint_name) - self._inactive_wg_field_data.append( - (inactive_constraint_name, - ' '.join([c.target.acronym for c in constraints]), - field_id) - ) - - self.fields['joint_with_groups_selector'].widget.attrs['onchange'] = "document.form_post.joint_with_groups.value=document.form_post.joint_with_groups.value + ' ' + this.options[this.selectedIndex].value; return 1;" - self.fields["resources"].choices = [(x.pk,x.desc) for x in ResourceAssociation.objects.filter(name__used=True).order_by('name__order') ] - - if self.hidden: - # replace all the widgets to start... - for key in list(self.fields.keys()): - self.fields[key].widget = forms.HiddenInput() - # re-replace a couple special cases - self.fields['resources'].widget = forms.MultipleHiddenInput() - self.fields['timeranges'].widget = forms.MultipleHiddenInput() - # and entirely replace bethere - no need to support searching if input is hidden - self.fields['bethere'] = ModelMultipleChoiceField( - widget=forms.MultipleHiddenInput, required=False, - queryset=Person.objects.all(), - ) - - def wg_constraint_fields(self): - """Iterates over wg constraint fields - - Intended for use in the template. - """ - for cname, cfield_id, cselector_id in self._wg_field_data: - yield cname, self[cfield_id], self[cselector_id] - - def wg_constraint_count(self): - """How many wg constraints are there?""" - return len(self._wg_field_data) - - def wg_constraint_field_ids(self): - """Iterates over wg constraint field IDs""" - for cname, cfield_id, _ in self._wg_field_data: - yield cname, cfield_id - - def inactive_wg_constraints(self): - for cname, value, field_id in self._inactive_wg_field_data: - yield cname, value, self[field_id] - - def inactive_wg_constraint_count(self): - return len(self._inactive_wg_field_data) - - def inactive_wg_constraint_field_ids(self): - """Iterates over wg constraint field IDs""" - for cname, _, field_id in self._inactive_wg_field_data: - yield cname, field_id - - @staticmethod - def _add_widget_class(widget, new_class): - """Add a new class, taking care in case some already exist""" - existing_classes = widget.attrs.get('class', '').split() - widget.attrs['class'] = ' '.join(existing_classes + [new_class]) - - def _join_conflicts(self, cleaned_data, slugs): - """Concatenate constraint fields from cleaned data into a single list""" - conflicts = [] - for cname, cfield_id, _ in self._wg_field_data: - if cname.slug in slugs and cfield_id in cleaned_data: - groups = cleaned_data[cfield_id] - # convert to python list (allow space or comma separated lists) - items = groups.replace(',',' ').split() - conflicts.extend(items) - return conflicts - - def _validate_duplicate_conflicts(self, cleaned_data): - """Validate that no WGs appear in more than one constraint that does not allow duplicates - - Raises ValidationError - """ - # Only the older constraints (conflict, conflic2, conflic3) need to be mutually exclusive. - all_conflicts = self._join_conflicts(cleaned_data, ['conflict', 'conflic2', 'conflic3']) - seen = [] - duplicated = [] - errors = [] - for c in all_conflicts: - if c not in seen: - seen.append(c) - elif c not in duplicated: # only report once - duplicated.append(c) - errors.append(forms.ValidationError('%s appears in conflicts more than once' % c)) - return errors - - def clean_joint_with_groups(self): - groups = self.cleaned_data['joint_with_groups'] - check_conflict(groups, self.group) - return groups - - def clean_comments(self): - return clean_text_field(self.cleaned_data['comments']) - - def clean_bethere(self): - bethere = self.cleaned_data["bethere"] - if bethere: - extra = set( - Person.objects.filter( - role__group=self.group, role__name__in=["chair", "ad"] - ) - & bethere - ) - if extra: - extras = ", ".join(e.name for e in extra) - raise forms.ValidationError( - ( - f"Please remove the following person{pluralize(len(extra))}, the system " - f"tracks their availability due to their role{pluralize(len(extra))}: {extras}." - ) - ) - return bethere - - def clean_send_notifications(self): - return True if not self.notifications_optional else self.cleaned_data['send_notifications'] - - def is_valid(self): - return super().is_valid() and self.session_forms.is_valid() - - def clean(self): - super(SessionForm, self).clean() - self.session_forms.clean() - - data = self.cleaned_data - - # Validate the individual conflict fields - for _, cfield_id, _ in self._wg_field_data: - try: - check_conflict(data[cfield_id], self.group) - except forms.ValidationError as e: - self.add_error(cfield_id, e) - - # Skip remaining tests if individual field tests had errors, - if self.errors: - return data - - # error if conflicts contain disallowed dupes - for error in self._validate_duplicate_conflicts(data): - self.add_error(None, error) - - # Verify expected number of session entries are present - num_sessions_with_data = len(self.session_forms.forms_to_keep) - num_sessions_expected = -1 - try: - num_sessions_expected = int(data.get('num_session', '')) - except ValueError: - self.add_error('num_session', 'Invalid value for number of sessions') - if num_sessions_with_data < num_sessions_expected: - self.add_error('num_session', 'Must provide data for all sessions') - - # if default (empty) option is selected, cleaned_data won't include num_session key - if num_sessions_expected != 2 and num_sessions_expected is not None: - if data.get('session_time_relation'): - self.add_error( - 'session_time_relation', - forms.ValidationError('Time between sessions can only be used when two sessions are requested.') - ) - - joint_session = data.get('joint_for_session', '') - if joint_session != '': - joint_session = int(joint_session) - if joint_session > num_sessions_with_data: - self.add_error( - 'joint_for_session', - forms.ValidationError( - f'Session {joint_session} can not be the joint session, the session has not been requested.' - ) - ) - - return data - - @property - def media(self): - # get media for our formset - return super().media + self.session_forms.media + forms.Media(js=('secr/js/session_form.js',)) - - -# Used for totally virtual meetings during COVID-19 to omit the expected -# number of attendees since there were no room size limitations -# -# class VirtualSessionForm(SessionForm): -# '''A SessionForm customized for special virtual meeting requirements''' -# attendees = forms.IntegerField(required=False) - - -class ToolStatusForm(forms.Form): - message = forms.CharField(widget=forms.Textarea(attrs={'rows':'3','cols':'80'}), strip=False) - diff --git a/ietf/secr/sreq/templatetags/__init__.py b/ietf/secr/sreq/templatetags/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/ietf/secr/sreq/urls.py b/ietf/secr/sreq/urls.py deleted file mode 100644 index 7e0db8117a..0000000000 --- a/ietf/secr/sreq/urls.py +++ /dev/null @@ -1,20 +0,0 @@ -# Copyright The IETF Trust 2007-2019, All Rights Reserved - -from django.conf import settings - -from ietf.secr.sreq import views -from ietf.utils.urls import url - -urlpatterns = [ - url(r'^$', views.main), - url(r'^status/$', views.tool_status), - url(r'^%(acronym)s/$' % settings.URL_REGEXPS, views.view), - url(r'^(?P[A-Za-z0-9_\-\+]+)/%(acronym)s/view/$' % settings.URL_REGEXPS, views.view), - url(r'^%(acronym)s/approve/$' % settings.URL_REGEXPS, views.approve), - url(r'^%(acronym)s/cancel/$' % settings.URL_REGEXPS, views.cancel), - url(r'^%(acronym)s/confirm/$' % settings.URL_REGEXPS, views.confirm), - url(r'^%(acronym)s/edit/$' % settings.URL_REGEXPS, views.edit), - url(r'^%(acronym)s/new/$' % settings.URL_REGEXPS, views.new), - url(r'^%(acronym)s/no_session/$' % settings.URL_REGEXPS, views.no_session), - url(r'^(?P[A-Za-z0-9_\-\+]+)/%(acronym)s/edit/$' % settings.URL_REGEXPS, views.edit), -] diff --git a/ietf/secr/telechat/tests.py b/ietf/secr/telechat/tests.py index 39949b83a2..fa26d33a5c 100644 --- a/ietf/secr/telechat/tests.py +++ b/ietf/secr/telechat/tests.py @@ -13,6 +13,7 @@ IndividualDraftFactory, ConflictReviewFactory) from ietf.doc.models import BallotDocEvent, BallotType, BallotPositionDocEvent, State, Document from ietf.doc.utils import update_telechat, create_ballot_if_not_open +from ietf.meeting.factories import MeetingFactory from ietf.utils.test_utils import TestCase from ietf.utils.timezone import date_today, datetime_today from ietf.iesg.models import TelechatDate @@ -25,6 +26,26 @@ def augment_data(): TelechatDate.objects.create(date=date_today()) +class SecrUrlTests(TestCase): + def test_urls(self): + MeetingFactory(type_id='ietf', date=date_today()) + + # check public options + response = self.client.get("/secr/") + self.assertEqual(response.status_code, 200) + q = PyQuery(response.content) + links = q('div.secr-menu a') + self.assertEqual(len(links), 1) + self.assertEqual(PyQuery(links[0]).text(), 'Announcements') + + # check secretariat only options + self.client.login(username="secretary", password="secretary+password") + response = self.client.get("/secr/") + self.assertEqual(response.status_code, 200) + q = PyQuery(response.content) + links = q('div.secr-menu a') + self.assertEqual(len(links), 4) + class SecrTelechatTestCase(TestCase): def test_main(self): "Main Test" diff --git a/ietf/secr/templates/includes/activities.html b/ietf/secr/templates/includes/activities.html deleted file mode 100644 index 1304b7c48d..0000000000 --- a/ietf/secr/templates/includes/activities.html +++ /dev/null @@ -1,23 +0,0 @@ -

Activities Log

-
From 2cb2ad15ba8500acb79b8dc62fd1903ad127a385 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 17 Mar 2025 17:09:38 -0400 Subject: [PATCH 033/405] chore(deps): bump nanoid in /dev/deploy-to-container in the npm group (#8689) Bumps the npm group in /dev/deploy-to-container with 1 update: [nanoid](https://github.com/ai/nanoid). Updates `nanoid` from 5.1.3 to 5.1.4 - [Release notes](https://github.com/ai/nanoid/releases) - [Changelog](https://github.com/ai/nanoid/blob/main/CHANGELOG.md) - [Commits](https://github.com/ai/nanoid/compare/5.1.3...5.1.4) --- updated-dependencies: - dependency-name: nanoid dependency-type: direct:production update-type: version-update:semver-patch dependency-group: npm ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- dev/deploy-to-container/package-lock.json | 14 +++++++------- dev/deploy-to-container/package.json | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/dev/deploy-to-container/package-lock.json b/dev/deploy-to-container/package-lock.json index c4f675527e..f4ce2c367c 100644 --- a/dev/deploy-to-container/package-lock.json +++ b/dev/deploy-to-container/package-lock.json @@ -8,7 +8,7 @@ "dependencies": { "dockerode": "^4.0.4", "fs-extra": "^11.3.0", - "nanoid": "5.1.3", + "nanoid": "5.1.4", "nanoid-dictionary": "5.0.0-beta.1", "slugify": "1.6.6", "tar": "^7.4.3", @@ -668,9 +668,9 @@ "optional": true }, "node_modules/nanoid": { - "version": "5.1.3", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.1.3.tgz", - "integrity": "sha512-zAbEOEr7u2CbxwoMRlz/pNSpRP0FdAU4pRaYunCdEezWohXFs+a0Xw7RfkKaezMsmSM1vttcLthJtwRnVtOfHQ==", + "version": "5.1.4", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.1.4.tgz", + "integrity": "sha512-GTFcMIDgR7tqji/LpSY8rtg464VnJl/j6ypoehYnuGb+Y8qZUdtKB8WVCXon0UEZgFDbuUxpIl//6FHLHgXSNA==", "funding": [ { "type": "github", @@ -1613,9 +1613,9 @@ "optional": true }, "nanoid": { - "version": "5.1.3", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.1.3.tgz", - "integrity": "sha512-zAbEOEr7u2CbxwoMRlz/pNSpRP0FdAU4pRaYunCdEezWohXFs+a0Xw7RfkKaezMsmSM1vttcLthJtwRnVtOfHQ==" + "version": "5.1.4", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.1.4.tgz", + "integrity": "sha512-GTFcMIDgR7tqji/LpSY8rtg464VnJl/j6ypoehYnuGb+Y8qZUdtKB8WVCXon0UEZgFDbuUxpIl//6FHLHgXSNA==" }, "nanoid-dictionary": { "version": "5.0.0-beta.1", diff --git a/dev/deploy-to-container/package.json b/dev/deploy-to-container/package.json index 8772c568c8..eb8cb9012a 100644 --- a/dev/deploy-to-container/package.json +++ b/dev/deploy-to-container/package.json @@ -4,7 +4,7 @@ "dependencies": { "dockerode": "^4.0.4", "fs-extra": "^11.3.0", - "nanoid": "5.1.3", + "nanoid": "5.1.4", "nanoid-dictionary": "5.0.0-beta.1", "slugify": "1.6.6", "tar": "^7.4.3", From 8af0dc457f4dda714046b6151a722e7d20b1a55d Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Tue, 18 Mar 2025 08:30:52 +0700 Subject: [PATCH 034/405] fix: add doc to raw_id_fields for SlideSubmissionAdmin (#8687) --- ietf/meeting/admin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ietf/meeting/admin.py b/ietf/meeting/admin.py index e975dd38a6..b7e56c7b77 100644 --- a/ietf/meeting/admin.py +++ b/ietf/meeting/admin.py @@ -189,7 +189,7 @@ class ImportantDateAdmin(admin.ModelAdmin): class SlideSubmissionAdmin(admin.ModelAdmin): model = SlideSubmission list_display = ['session', 'submitter', 'title'] - raw_id_fields = ['submitter', 'session'] + raw_id_fields = ['submitter', 'session', 'doc'] admin.site.register(SlideSubmission, SlideSubmissionAdmin) From 563fcee3a80d5036c9c490a794f31a78bd7a8e90 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Tue, 18 Mar 2025 14:53:38 +0700 Subject: [PATCH 035/405] fix: approve slides link for all pending submissions (#8692) * test: slide approval links shown for all sessions * fix: suggest pending slides for all sessions * refactor: flatten logic * refactor: avoid extra exists() query * test: update test comment * test: only pending slides suggested for approval * style: Black * test: temp path override for SessionDetailsTests --- ietf/meeting/tests_views.py | 80 +++++++++++++++++++++ ietf/meeting/views.py | 14 ++-- ietf/templates/meeting/session_details.html | 34 +++++---- 3 files changed, 104 insertions(+), 24 deletions(-) diff --git a/ietf/meeting/tests_views.py b/ietf/meeting/tests_views.py index 59d7e49f7f..111584cc56 100644 --- a/ietf/meeting/tests_views.py +++ b/ietf/meeting/tests_views.py @@ -4527,6 +4527,7 @@ def test_persistent_enabled_timeslot_types(self): class SessionDetailsTests(TestCase): + settings_temp_path_overrides = TestCase.settings_temp_path_overrides + ['SLIDE_STAGING_PATH'] def test_session_details(self): @@ -4659,6 +4660,85 @@ def test_add_session_drafts(self): q = PyQuery(r.content) self.assertEqual(1,len(q(".alert-warning:contains('may affect published proceedings')"))) + def test_proposed_slides_for_approval(self): + # This test overlaps somewhat with MaterialsTests of proposed slides handling. The focus + # here is on the display of slides, not the approval action. + group = GroupFactory() + meeting = MeetingFactory( + type_id="ietf", date=date_today() + datetime.timedelta(days=10) + ) + sessions = SessionFactory.create_batch( + 2, + group=group, + meeting=meeting, + ) + + # slides submission _not_ in the `pending` state + do_not_show = [ + SlideSubmissionFactory( + session=sessions[0], + title="already approved", + status_id="approved", + ), + SlideSubmissionFactory( + session=sessions[1], + title="already rejected", + status_id="rejected", + ), + ] + + # pending submissions + first_session_pending = SlideSubmissionFactory( + session=sessions[0], title="first session title" + ) + second_session_pending = SlideSubmissionFactory( + session=sessions[1], title="second session title" + ) + + # and their approval URLs + def _approval_url(slidesub): + return urlreverse( + "ietf.meeting.views.approve_proposed_slides", + kwargs={"slidesubmission_id": slidesub.pk, "num": meeting.number}, + ) + + first_approval_url = _approval_url(first_session_pending) + second_approval_url = _approval_url(second_session_pending) + do_not_show_urls = [_approval_url(ss) for ss in do_not_show] + + # Retrieve the URL as a group chair + url = urlreverse( + "ietf.meeting.views.session_details", + kwargs={ + "num": meeting.number, + "acronym": group.acronym, + }, + ) + chair = RoleFactory(group=group, name_id="chair").person + self.client.login( + username=chair.user.username, password=f"{chair.user.username}+password" + ) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + pq = PyQuery(r.content) + self.assertEqual( + len(pq(f'a[href="{first_approval_url}"]')), + 1, + "first session proposed slides should be linked for approval", + ) + self.assertEqual( + len(pq(f'a[href="{second_approval_url}"]')), + 1, + "second session proposed slides should be linked for approval", + ) + for no_show_url in do_not_show_urls: + self.assertEqual( + len(pq(f'a[href="{no_show_url}"]')), + 0, + "second session proposed slides should be linked for approval", + ) + + class EditScheduleListTests(TestCase): def setUp(self): super().setUp() diff --git a/ietf/meeting/views.py b/ietf/meeting/views.py index 3fa605ed7e..d02ae40902 100644 --- a/ietf/meeting/views.py +++ b/ietf/meeting/views.py @@ -2509,12 +2509,14 @@ def session_details(request, num, acronym): scheduled_sessions = [s for s in sessions if s.current_status == 'sched'] unscheduled_sessions = [s for s in sessions if s.current_status != 'sched'] - pending_suggestions = None - if request.user.is_authenticated: - if can_manage: - pending_suggestions = session.slidesubmission_set.filter(status__slug='pending') - else: - pending_suggestions = session.slidesubmission_set.filter(status__slug='pending', submitter=request.user.person) + # Start with all the pending suggestions for all the group's sessions + pending_suggestions = SlideSubmission.objects.filter(session__in=sessions, status__slug='pending') + if can_manage: + pass # keep the full set + elif hasattr(request.user, "person"): + pending_suggestions = pending_suggestions.filter(submitter=request.user.person) + else: + pending_suggestions = SlideSubmission.objects.none() return render(request, "meeting/session_details.html", { 'scheduled_sessions':scheduled_sessions , diff --git a/ietf/templates/meeting/session_details.html b/ietf/templates/meeting/session_details.html index 571715b4b4..55fa3d3857 100644 --- a/ietf/templates/meeting/session_details.html +++ b/ietf/templates/meeting/session_details.html @@ -31,30 +31,28 @@

Scheduled Sessions

{% include 'meeting/session_details_panel.html' with sessions=scheduled_sessions %}

Unscheduled Sessions

{% include 'meeting/session_details_panel.html' with sessions=unscheduled_sessions %} - {% if pending_suggestions %} -

+ {% for s in pending_suggestions %} + {% if forloop.first %}

{% if can_manage_materials %} Proposed slides awaiting your approval {% else %} Your proposed slides awaiting chair approval {% endif %}

-
- {% endif %} +
{% endif %} + {% if can_manage_materials %} +

+ + {{ s.submitter }} - {{ s.title }} ({{ s.time }}) + +

+ {% else %} +

+ {{ s.title }} ({{ s.time }}) +

+ {% endif %} + {% if forloop.last %}
{% endif %} + {% endfor %} {% endblock %} {% block js %} From 73abdcc29798850a86ddeabdd65b824adcc62eb6 Mon Sep 17 00:00:00 2001 From: Rudi Matz Date: Tue, 18 Mar 2025 22:25:10 -0400 Subject: [PATCH 036/405] feat(agenda): add preliminary date when no agenda available (#8690) * feat(meetings): add preliminary agenda date when no agenda available * test: adapt test for additional field for preliminary agenda date --------- Co-authored-by: Rudi Matz --- client/agenda/AgendaScheduleList.vue | 1 + ietf/meeting/tests_views.py | 5 +++-- ietf/meeting/views.py | 8 ++++++-- 3 files changed, 10 insertions(+), 4 deletions(-) diff --git a/client/agenda/AgendaScheduleList.vue b/client/agenda/AgendaScheduleList.vue index ab0f6e0184..369025d5da 100644 --- a/client/agenda/AgendaScheduleList.vue +++ b/client/agenda/AgendaScheduleList.vue @@ -15,6 +15,7 @@ td(:colspan='pickerModeActive ? 6 : 5') i.bi.bi-exclamation-triangle.me-2 span(v-if='agendaStore.searchVisible && agendaStore.searchText') No event matching your search query. + span(v-else-if='agendaStore.meeting.prelimAgendaDate') A preliminary agenda is expected to be released on {{ agendaStore.meeting.prelimAgendaDate }} span(v-else) Nothing to display tr( v-for='item of meetingEvents' diff --git a/ietf/meeting/tests_views.py b/ietf/meeting/tests_views.py index 111584cc56..0f91986f77 100644 --- a/ietf/meeting/tests_views.py +++ b/ietf/meeting/tests_views.py @@ -233,6 +233,7 @@ def test_meeting_agenda(self): session.save() slot = TimeSlot.objects.get(sessionassignments__session=session,sessionassignments__schedule=meeting.schedule) meeting.timeslot_set.filter(type_id="break").update(show_location=False) + meeting.importantdate_set.create(name_id='prelimagenda',date=date_today() + datetime.timedelta(days=20)) # self.write_materials_files(meeting, session) # @@ -262,7 +263,8 @@ def test_meeting_agenda(self): "updated": generated_data.get("meeting").get("updated"), # Just expect the value to exist "timezone": meeting.time_zone, "infoNote": meeting.agenda_info_note, - "warningNote": meeting.agenda_warning_note + "warningNote": meeting.agenda_warning_note, + "prelimAgendaDate": (date_today() + datetime.timedelta(days=20)).isoformat() }, "categories": generated_data.get("categories"), # Just expect the value to exist "isCurrentMeeting": True, @@ -9341,4 +9343,3 @@ def test_bluesheet_data(self): {"name": attended_with_affil.person.plain_name(), "affiliation": "Somewhere"}, ] ) - diff --git a/ietf/meeting/views.py b/ietf/meeting/views.py index d02ae40902..6a73059d92 100644 --- a/ietf/meeting/views.py +++ b/ietf/meeting/views.py @@ -59,7 +59,7 @@ from ietf.ietfauth.utils import role_required, has_role, user_is_person from ietf.mailtrigger.utils import gather_address_lists from ietf.meeting.models import Meeting, Session, Schedule, FloorPlan, SessionPresentation, TimeSlot, SlideSubmission, Attended -from ietf.meeting.models import SessionStatusName, SchedulingEvent, SchedTimeSessAssignment, Room, TimeSlotTypeName +from ietf.meeting.models import ImportantDate, SessionStatusName, SchedulingEvent, SchedTimeSessAssignment, Room, TimeSlotTypeName from ietf.meeting.forms import ( CustomDurationField, SwapDaysForm, SwapTimeslotsForm, ImportMinutesForm, TimeSlotCreateForm, TimeSlotEditForm, SessionCancelForm, SessionEditForm ) from ietf.meeting.helpers import get_person_by_email, get_schedule_by_name @@ -1709,6 +1709,9 @@ def generate_agenda_data(num=None, force_refresh=False): # Get Floor Plans floors = FloorPlan.objects.filter(meeting=meeting).order_by('order') + # Get Preliminary Agenda Date + prelimAgendaDate = ImportantDate.objects.filter(name_id="prelimagenda", meeting=meeting).first() + result = { "meeting": { "number": schedule.meeting.number, @@ -1718,7 +1721,8 @@ def generate_agenda_data(num=None, force_refresh=False): "updated": updated, "timezone": meeting.time_zone, "infoNote": schedule.meeting.agenda_info_note, - "warningNote": schedule.meeting.agenda_warning_note + "warningNote": schedule.meeting.agenda_warning_note, + "prelimAgendaDate": prelimAgendaDate.date.isoformat() if prelimAgendaDate else "" }, "categories": filter_organizer.get_filter_categories(), "isCurrentMeeting": is_current_meeting, From 3356505dc89e2a372f54c3ed1da472c3a11bc6de Mon Sep 17 00:00:00 2001 From: Matthew Holloway Date: Wed, 19 Mar 2025 15:28:17 +1300 Subject: [PATCH 037/405] fix(agenda): Agenda 'now' fallback to next event if there is no current event (#8693) * fix: agenda now fallback to next event if there is no current event * chore: agenda goto now PR feedback * chore: simplifying 'agenda goto now' next event logic * chore: simplifying 'agenda goto now' nextEvent var --- client/agenda/AgendaMobileBar.vue | 8 ++++---- client/agenda/AgendaQuickAccess.vue | 8 ++++---- client/agenda/AgendaScheduleList.vue | 16 ++++++++++------ client/agenda/store.js | 22 ++++++++++++++++++++++ 4 files changed, 40 insertions(+), 14 deletions(-) diff --git a/client/agenda/AgendaMobileBar.vue b/client/agenda/AgendaMobileBar.vue index 63611e21c2..43480bedd3 100644 --- a/client/agenda/AgendaMobileBar.vue +++ b/client/agenda/AgendaMobileBar.vue @@ -124,11 +124,11 @@ const downloadIcsOptions = [ function jumpToDay (dayId) { if (dayId === 'now') { - const lastEventId = agendaStore.findCurrentEventId() - if (lastEventId) { - document.getElementById(`agenda-rowid-${lastEventId}`)?.scrollIntoView(true) + const nowEventId = agendaStore.findNowEventId() + if (nowEventId) { + document.getElementById(`agenda-rowid-${nowEventId}`)?.scrollIntoView(true) } else { - message.warning('There is no event happening right now.') + message.warning('There is no event happening right now or in the future.') } } else { document.getElementById(dayId)?.scrollIntoView(true) diff --git a/client/agenda/AgendaQuickAccess.vue b/client/agenda/AgendaQuickAccess.vue index b226d09c60..c9412f6663 100644 --- a/client/agenda/AgendaQuickAccess.vue +++ b/client/agenda/AgendaQuickAccess.vue @@ -204,12 +204,12 @@ function scrollToDay (daySlug, ev) { } function scrollToNow (ev) { - const lastEventId = agendaStore.findCurrentEventId() + const nowEventId = agendaStore.findNowEventId() - if (lastEventId) { - document.getElementById(`agenda-rowid-${lastEventId}`)?.scrollIntoView(true) + if (nowEventId) { + document.getElementById(`agenda-rowid-${nowEventId}`)?.scrollIntoView(true) } else { - message.warning('There is no event happening right now.') + message.warning('There is no event happening right now or in the future.') } } diff --git a/client/agenda/AgendaScheduleList.vue b/client/agenda/AgendaScheduleList.vue index 369025d5da..5a971c25cf 100644 --- a/client/agenda/AgendaScheduleList.vue +++ b/client/agenda/AgendaScheduleList.vue @@ -591,10 +591,10 @@ function renderLinkLabel (opt) { function recalculateRedLine () { state.currentMinute = DateTime.local().minute - const lastEventId = agendaStore.findCurrentEventId() + const currentEventId = agendaStore.findCurrentEventId() - if (lastEventId) { - state.redhandOffset = document.getElementById(`agenda-rowid-${lastEventId}`)?.offsetTop || 0 + if (currentEventId) { + state.redhandOffset = document.getElementById(`agenda-rowid-${currentEventId}`)?.offsetTop || 0 } else { state.redhandOffset = 0 } @@ -615,9 +615,13 @@ function recalculateRedLine () { return } unsubscribe() // we only need to scroll once, so unsubscribe from future updates - if(window.location.hash === "#now") { - const lastEventId = agendaStore.findCurrentEventId() - document.getElementById(`agenda-rowid-${lastEventId}`)?.scrollIntoView(true) + if (window.location.hash === "#now") { + const nowEventId = agendaStore.findNowEvent() + if (nowEventId) { + document.getElementById(`agenda-rowid-${nowEventId}`)?.scrollIntoView(true) + } else { + message.warning('There is no event happening right now or in the future.') + } } else if(window.location.hash.startsWith(`#${daySlugPrefix}`)) { document.getElementById(window.location.hash.substring(1))?.scrollIntoView(true) } diff --git a/client/agenda/store.js b/client/agenda/store.js index 71c1219725..359c5fbf05 100644 --- a/client/agenda/store.js +++ b/client/agenda/store.js @@ -230,6 +230,28 @@ export const useAgendaStore = defineStore('agenda', { return lastEvent.id || null }, + findNowEventId () { + const currentEventId = this.findCurrentEventId() + + if (currentEventId) { + return currentEventId + } + + // if there isn't a current event then instead find the next event + + const current = (this.nowDebugDiff ? DateTime.local().minus(this.nowDebugDiff) : DateTime.local()).setZone(this.timezone) + + // -> Find next event after current time + let nextEventId = undefined + for(const sh of this.scheduleAdjusted) { + if (sh.adjustedStart > current) { + nextEventId = sh.id + break + } + } + + return nextEventId || null + }, hideLoadingScreen () { // -> Hide loading screen const loadingRef = document.querySelector('#app-loading') From c3a296fdb559b015f8e4d457f56fea137e10cf37 Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Wed, 19 Mar 2025 04:56:59 -0400 Subject: [PATCH 038/405] fix(agenda): handle calendar view events that spread across multiple days (#8685) --- client/agenda/AgendaScheduleCalendar.vue | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/client/agenda/AgendaScheduleCalendar.vue b/client/agenda/AgendaScheduleCalendar.vue index 9b56b7f5a7..6701ddabd7 100644 --- a/client/agenda/AgendaScheduleCalendar.vue +++ b/client/agenda/AgendaScheduleCalendar.vue @@ -187,6 +187,7 @@ function refreshData () { let earliestDate = DateTime.fromISO('2200-01-01') let latestDate = DateTime.fromISO('1990-01-01') let nowDate = DateTime.now() + let hasCrossDayEvents = false calendarOptions.events = agendaStore.scheduleAdjusted.map(ev => { // -> Determine boundaries @@ -202,6 +203,9 @@ function refreshData () { if (ev.adjustedEnd < latestDate) { latestDate = ev.adjustedEnd } + if (ev.adjustedStart.day !== ev.adjustedEnd.day) { + hasCrossDayEvents = true + } // -> Build event object return { id: ev.id, @@ -214,8 +218,8 @@ function refreshData () { }) // -> Display settings - calendarOptions.slotMinTime = `${earliestHour.toString().padStart(2, '0')}:00:00` - calendarOptions.slotMaxTime = `${latestHour.toString().padStart(2, '0')}:00:00` + calendarOptions.slotMinTime = hasCrossDayEvents ? '00:00:00' : `${earliestHour.toString().padStart(2, '0')}:00:00` + calendarOptions.slotMaxTime = hasCrossDayEvents ? '23:59:59' : `${latestHour.toString().padStart(2, '0')}:00:00` calendarOptions.validRange.start = earliestDate.minus({ days: 1 }).toISODate() calendarOptions.validRange.end = latestDate.plus({ days: 1 }).toISODate() // calendarOptions.scrollTime = `${earliestHour.toString().padStart(2, '0')}:00:00` From fde8136df53b3b9d2666674e16e15a7edf7e0ed1 Mon Sep 17 00:00:00 2001 From: Rich Salz Date: Wed, 19 Mar 2025 05:06:47 -0400 Subject: [PATCH 039/405] chore: Add draft name to review-completed email (#8676) * chore: Add draft name to review-completed email Fixes: #7866 * fix: typo * fix: show title and add test --------- Co-authored-by: Robert Sparks --- ietf/doc/tests_review.py | 5 ++++- ietf/templates/review/completed_review.txt | 4 +++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/ietf/doc/tests_review.py b/ietf/doc/tests_review.py index e93bc02181..13ddbc22ba 100644 --- a/ietf/doc/tests_review.py +++ b/ietf/doc/tests_review.py @@ -902,7 +902,10 @@ def test_complete_review_enter_content(self): self.assertEqual(len(outbox), 1) self.assertIn(assignment.review_request.team.list_email, outbox[0]["To"]) - self.assertIn("This is a review", get_payload_text(outbox[0])) + payload = get_payload_text(outbox[0]) + self.assertIn("This is a review", payload) + self.assertIn(f"Document: {assignment.review_request.doc.name}", payload) + self.assertIn(f"Title: {assignment.review_request.doc.title}", payload) self.assertIn(settings.MAILING_LIST_ARCHIVE_URL, assignment.review.external_url) diff --git a/ietf/templates/review/completed_review.txt b/ietf/templates/review/completed_review.txt index bdbe321ca1..7d10d8bf13 100644 --- a/ietf/templates/review/completed_review.txt +++ b/ietf/templates/review/completed_review.txt @@ -1,7 +1,9 @@ {% load ietf_filters %}{% autoescape off %}{% filter maybewordwrap:80 %}{% if assignment.state_id == "part-completed" %} Review is partially done. Another assignment may be needed to complete it. -{% endif %}Reviewer: {{ assignment.reviewer.person }} +{% endif %}Document: {{ assignment.review_request.doc.name }} +Title: {{ assignment.review_request.doc.title }} +Reviewer: {{ assignment.reviewer.person }} Review result: {{ assignment.result.name }} {{ content }} From 2d974ed09b8e9ff93805fb06cfa65ab6f814e261 Mon Sep 17 00:00:00 2001 From: Matthew Holloway Date: Fri, 21 Mar 2025 13:45:27 +1300 Subject: [PATCH 040/405] feat(agenda): Agenda meeting materials in overflow menu (#8698) * feat: agenda meeting materials in overflow menu * chore: fixing agenda meeting materials tests * chore: fix agenda meeting material tests * chore: fixing agenda meeting materials test * fix: changing to -mat suffix rather than -meeting-materials * fix: agenda meeting materials lnk -> btn * fix: click handler on agenda meeting materials --- client/agenda/AgendaScheduleList.vue | 60 ++++++++++++++++++++----- playwright/tests/meeting/agenda.spec.js | 8 ++-- 2 files changed, 52 insertions(+), 16 deletions(-) diff --git a/client/agenda/AgendaScheduleList.vue b/client/agenda/AgendaScheduleList.vue index 5a971c25cf..1e50df5fb4 100644 --- a/client/agenda/AgendaScheduleList.vue +++ b/client/agenda/AgendaScheduleList.vue @@ -121,20 +121,12 @@ :options='item.links' key-field='id' :render-icon='renderLinkIcon' - :render-label='renderLinkLabel' + :render-label='renderLink' ) n-button(size='tiny') i.bi.bi-three-dots .agenda-table-cell-links-buttons(v-else-if='item.links && item.links.length > 0') - template(v-if='item.flags.agenda') - n-popover - template(#trigger) - i.bi.bi-collection( - :id='`btn-lnk-` + item.key + `-mat`' - @click='showMaterials(item.key)' - ) - span Show meeting materials - template(v-else-if='item.type === `regular`') + template(v-if='!item.flags.agenda && item.type === `regular`') n-popover template(#trigger) i.no-meeting-materials @@ -143,7 +135,16 @@ span No meeting materials yet. n-popover(v-for='lnk of item.links', :key='lnk.id') template(#trigger) + button( + v-if="lnk.click" + type="button" + :id='`btn-` + lnk.id' + @click='lnk.click' + :aria-label='lnk.label' + :class='`border-0 bg-transparent text-` + lnk.color' + ): i.bi(:class='`bi-` + lnk.icon') a( + v-else :id='`btn-` + lnk.id' :href='lnk.href' :aria-label='lnk.label' @@ -281,13 +282,28 @@ const meetingEvents = computed(() => { } acc.lastTypeName = typeName - // -> Populate event links + // + /** + * -> Populate event menu items + * + * links is an array of either, + * 1. { href: "...", click: undefined, ...sharedProps } + * 2. { click: () => {...}, href: undefined, ...sharedProps } + */ const links = [] const typesWithLinks = ['regular', 'plenary', 'other'] const purposesWithoutLinks = ['admin', 'closed_meeting', 'officehours', 'social'] if (item.flags.showAgenda || (typesWithLinks.includes(item.type) && !purposesWithoutLinks.includes(item.purpose))) { if (item.flags.agenda) { // -> Meeting Materials + links.push({ + id: `btn-${item.id}-mat`, + label: 'Show meeting materials', + icon: 'collection', + href: undefined, + click: () => showMaterials(item.id), + color: 'black' + }) links.push({ id: `lnk-${item.id}-tar`, label: 'Download meeting materials as .tar archive', @@ -585,7 +601,11 @@ function renderLinkIcon (opt) { return h('i', { class: `bi bi-${opt.icon} text-${opt.color}` }) } -function renderLinkLabel (opt) { +function renderLink (opt) { + if (opt.click) { + return h('button', { type: 'button', class: 'overflow-button', onClick: opt.click }, opt.label) + } + return h('a', { href: opt.href, target: '_blank' }, opt.label) } @@ -1564,6 +1584,22 @@ onBeforeUnmount(() => { } } +.overflow-button { + font-size: inherit; + padding: 0; + border: 0; + background: transparent; + + &:before { + content: ""; + position: absolute; + left: 0; + right: 0; + top: 0; + bottom: 0; + } +} + @keyframes fadeInAnim { 0% { opacity: 0; diff --git a/playwright/tests/meeting/agenda.spec.js b/playwright/tests/meeting/agenda.spec.js index e6c6e17031..b7e3df5718 100644 --- a/playwright/tests/meeting/agenda.spec.js +++ b/playwright/tests/meeting/agenda.spec.js @@ -275,7 +275,7 @@ test.describe('past - desktop', () => { const eventButtons = row.locator('.agenda-table-cell-links > .agenda-table-cell-links-buttons') if (event.flags.agenda) { // Show meeting materials button - await expect(eventButtons.locator('i.bi.bi-collection')).toBeVisible() + await expect(eventButtons.locator(`#btn-btn-${event.id}-mat`)).toBeVisible() // ZIP materials button await expect(eventButtons.locator(`#btn-lnk-${event.id}-tar`)).toHaveAttribute('href', `/meeting/${meetingData.meeting.number}/agenda/${event.acronym}-drafts.tgz`) await expect(eventButtons.locator(`#btn-lnk-${event.id}-tar > i.bi`)).toBeVisible() @@ -425,7 +425,7 @@ test.describe('past - desktop', () => { }) }) // Open dialog - await page.locator(`#agenda-rowid-${event.id} #btn-lnk-${event.id}-mat`).click() + await page.locator(`#agenda-rowid-${event.id} #btn-btn-${event.id}-mat`).click() await expect(page.locator('.agenda-eventdetails')).toBeVisible() // Header await expect(page.locator('.agenda-eventdetails .n-card-header__main > .detail-header > .bi')).toBeVisible() @@ -507,7 +507,7 @@ test.describe('past - desktop', () => { }) }) // Open dialog - await page.locator(`#btn-lnk-${event.id}-mat`).click() + await page.locator(`#btn-btn-${event.id}-mat`).click() await expect(page.locator('.agenda-eventdetails')).toBeVisible() // Slides Tab await page.locator('.agenda-eventdetails .detail-nav > a').nth(1).click() @@ -1158,7 +1158,7 @@ test.describe('future - desktop', () => { if (event.flags.showAgenda || (['regular', 'plenary', 'other'].includes(event.type) && !['admin', 'closed_meeting', 'officehours', 'social'].includes(event.purpose))) { if (event.flags.agenda) { // Show meeting materials button - await expect(eventButtons.locator('i.bi.bi-collection')).toBeVisible() + await expect(eventButtons.locator(`#btn-btn-${event.id}-mat`)).toBeVisible() // ZIP materials button await expect(eventButtons.locator(`#btn-lnk-${event.id}-tar`)).toHaveAttribute('href', `/meeting/${meetingData.meeting.number}/agenda/${event.acronym}-drafts.tgz`) await expect(eventButtons.locator(`#btn-lnk-${event.id}-tar > i.bi`)).toBeVisible() From 27ea6234a8b0b74062c0a57e849e316087c849f3 Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Fri, 21 Mar 2025 04:43:33 -0400 Subject: [PATCH 041/405] feat(agenda): keep video client + onsite tool buttons for plenary for rest of day (#8706) --- client/agenda/AgendaScheduleList.vue | 17 ++++ playwright/helpers/common.js | 24 ++++++ playwright/helpers/meeting.js | 3 + playwright/tests/meeting/agenda.spec.js | 102 ++++++++++++++++++++---- 4 files changed, 129 insertions(+), 17 deletions(-) diff --git a/client/agenda/AgendaScheduleList.vue b/client/agenda/AgendaScheduleList.vue index 1e50df5fb4..86c91bed85 100644 --- a/client/agenda/AgendaScheduleList.vue +++ b/client/agenda/AgendaScheduleList.vue @@ -464,6 +464,23 @@ const meetingEvents = computed(() => { color: 'purple' }) } + // -> Keep showing video client / on-site tool for Plenary until end of day, in case it goes over the planned time range + if (item.type === 'plenary' && item.adjustedEnd.day === current.day) { + links.push({ + id: `lnk-${item.id}-video`, + label: 'Full Client with Video', + icon: 'camera-video', + href: item.links.videoStream, + color: 'purple' + }) + links.push({ + id: `lnk-${item.id}-onsitetool`, + label: 'Onsite tool', + icon: 'telephone-outbound', + href: item.links.onsiteTool, + color: 'teal' + }) + } } } } diff --git a/playwright/helpers/common.js b/playwright/helpers/common.js index 5ba39ba022..c4dd7e2640 100644 --- a/playwright/helpers/common.js +++ b/playwright/helpers/common.js @@ -13,5 +13,29 @@ module.exports = { return rect.top < bottom && rect.top > 0 - rect.height }) + }, + /** + * Override page DateTime with a new value + * + * @param {Object} page Page object + * @param {Object} dateTimeOverride New DateTime object + */ + overridePageDateTime: async (page, dateTimeOverride) => { + await page.addInitScript(`{ + // Extend Date constructor to default to fixed time + Date = class extends Date { + constructor(...args) { + if (args.length === 0) { + super(${dateTimeOverride.toMillis()}); + } else { + super(...args); + } + } + } + // Override Date.now() to start from fixed time + const __DateNowOffset = ${dateTimeOverride.toMillis()} - Date.now(); + const __DateNow = Date.now; + Date.now = () => __DateNow() + __DateNowOffset; + }`) } } diff --git a/playwright/helpers/meeting.js b/playwright/helpers/meeting.js index 9722ffc68b..634ca2e8c6 100644 --- a/playwright/helpers/meeting.js +++ b/playwright/helpers/meeting.js @@ -609,6 +609,9 @@ module.exports = { startDateTime: curDay.set({ hour: 17, minute: 30 }), duration: '2h', type: 'plenary', + showAgenda: true, + hasAgenda: true, + hasRecordings: true, ...findAreaGroup('ietf-plenary', categories[2]) }, floors)) } diff --git a/playwright/tests/meeting/agenda.spec.js b/playwright/tests/meeting/agenda.spec.js index b7e3df5718..412a3fe9b8 100644 --- a/playwright/tests/meeting/agenda.spec.js +++ b/playwright/tests/meeting/agenda.spec.js @@ -1213,7 +1213,7 @@ test.describe('future - desktop', () => { await expect(eventButtons.locator(`#btn-lnk-${event.id}-remotecallin`)).toHaveAttribute('href', remoteCallInUrl) await expect(eventButtons.locator(`#btn-lnk-${event.id}-remotecallin > i.bi`)).toBeVisible() } - // calendar + // Calendar if (event.links.calendar) { await expect(eventButtons.locator(`#btn-lnk-${event.id}-calendar`)).toHaveAttribute('href', event.links.calendar) await expect(eventButtons.locator(`#btn-lnk-${event.id}-calendar > i.bi`)).toBeVisible() @@ -1278,22 +1278,7 @@ test.describe('live - desktop', () => { }) // Override Date in page to fixed time - await page.addInitScript(`{ - // Extend Date constructor to default to fixed time - Date = class extends Date { - constructor(...args) { - if (args.length === 0) { - super(${currentTime.toMillis()}); - } else { - super(...args); - } - } - } - // Override Date.now() to start from fixed time - const __DateNowOffset = ${currentTime.toMillis()} - Date.now(); - const __DateNow = Date.now; - Date.now = () => __DateNow() + __DateNowOffset; - }`) + await commonHelper.overridePageDateTime(page, currentTime) // Visit agenda page and await Meeting Data API call to complete await Promise.all([ @@ -1348,6 +1333,89 @@ test.describe('live - desktop', () => { }) }) +// ==================================================================== +// AGENDA (live meeting) | DESKTOP viewport | Plenary Extended Time Buttons +// ==================================================================== + +test.describe('live - desktop - plenary extended time buttons', () => { + let meetingData + let plenarySessionId + + test.beforeAll(async () => { + // Generate meeting data + meetingData = meetingHelper.generateAgendaResponse({ dateMode: 'current' }) + plenarySessionId = meetingData.schedule.find(s => s.type === 'plenary').id + }) + + test.beforeEach(async ({ page }) => { + // Intercept Meeting Data API + await page.route(`**/api/meeting/${meetingData.meeting.number}/agenda-data`, route => { + route.fulfill({ + status: 200, + contentType: 'application/json', + body: JSON.stringify(meetingData) + }) + }) + + await page.setViewportSize({ + width: viewports.desktop[0], + height: viewports.desktop[1] + }) + }) + + // -> BUTTONS PRESENT AFTER EVENT, SAME DAY + + test('same day - after event', async ({ page }) => { + // Override Date in page to fixed time + const currentTime = DateTime.fromISO('2022-02-01T13:45:15', { zone: 'Asia/Tokyo' }).plus({ days: 1 }).set({ hour: 20, minute: 30 }) + await commonHelper.overridePageDateTime(page, currentTime) + + // Visit agenda page and await Meeting Data API call to complete + await Promise.all([ + page.waitForResponse(`**/api/meeting/${meetingData.meeting.number}/agenda-data`), + page.goto(`/meeting/${meetingData.meeting.number}/agenda`) + ]) + + // Wait for page to be ready + await page.locator('.agenda h1').waitFor({ state: 'visible' }) + await setTimeout(500) + + // Check for plenary event + await expect(page.locator('.agenda .agenda-table-display-event.agenda-table-type-plenary')).toBeVisible() + await page.locator('.agenda .agenda-table-display-event.agenda-table-type-plenary').scrollIntoViewIfNeeded() + + // Check for full video client + on-site tool + await expect(page.locator(`.agenda .agenda-table-display-event.agenda-table-type-plenary .agenda-table-cell-links-buttons a#btn-lnk-${plenarySessionId}-video`)).toBeVisible() + await expect(page.locator(`.agenda .agenda-table-display-event.agenda-table-type-plenary .agenda-table-cell-links-buttons a#btn-lnk-${plenarySessionId}-onsitetool`)).toBeVisible() + }) + + // -> BUTTONS NO LONGER PRESENT AFTER EVENT, NEXT DAY + + test('next day - after event', async ({ page }) => { + // Override Date in page to fixed time + const currentTime = DateTime.fromISO('2022-02-01T13:45:15', { zone: 'Asia/Tokyo' }).plus({ days: 2 }).set({ hour: 2, minute: 30 }) + await commonHelper.overridePageDateTime(page, currentTime) + + // Visit agenda page and await Meeting Data API call to complete + await Promise.all([ + page.waitForResponse(`**/api/meeting/${meetingData.meeting.number}/agenda-data`), + page.goto(`/meeting/${meetingData.meeting.number}/agenda`) + ]) + + // Wait for page to be ready + await page.locator('.agenda h1').waitFor({ state: 'visible' }) + await setTimeout(500) + + // Check for plenary event + await expect(page.locator('.agenda .agenda-table-display-event.agenda-table-type-plenary')).toBeVisible() + await page.locator('.agenda .agenda-table-display-event.agenda-table-type-plenary').scrollIntoViewIfNeeded() + + // Check for full video client + on-site tool + await expect(page.locator(`.agenda .agenda-table-display-event.agenda-table-type-plenary .agenda-table-cell-links-buttons a#btn-lnk-${plenarySessionId}-video`)).not.toBeVisible() + await expect(page.locator(`.agenda .agenda-table-display-event.agenda-table-type-plenary .agenda-table-cell-links-buttons a#btn-lnk-${plenarySessionId}-onsitetool`)).not.toBeVisible() + }) +}) + // ==================================================================== // AGENDA (past meeting) | SMALL DESKTOP/TABLET/MOBILE viewports // ==================================================================== From ed2059a5813130cbf31ce77c0978a35751addc6c Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Fri, 21 Mar 2025 15:45:26 +0700 Subject: [PATCH 042/405] fix: add blank=true to SlideSubmission.doc (#8688) Allows admin to save an instance with a null doc reference, which was already permitted. --- .../0011_alter_slidesubmission_doc.py | 26 +++++++++++++++++++ ietf/meeting/models.py | 2 +- 2 files changed, 27 insertions(+), 1 deletion(-) create mode 100644 ietf/meeting/migrations/0011_alter_slidesubmission_doc.py diff --git a/ietf/meeting/migrations/0011_alter_slidesubmission_doc.py b/ietf/meeting/migrations/0011_alter_slidesubmission_doc.py new file mode 100644 index 0000000000..b9cbc58e99 --- /dev/null +++ b/ietf/meeting/migrations/0011_alter_slidesubmission_doc.py @@ -0,0 +1,26 @@ +# Generated by Django 4.2.19 on 2025-03-17 09:37 + +from django.db import migrations +import django.db.models.deletion +import ietf.utils.models + + +class Migration(migrations.Migration): + + dependencies = [ + ("doc", "0025_storedobject_storedobject_unique_name_per_store"), + ("meeting", "0010_alter_floorplan_image_alter_meetinghost_logo"), + ] + + operations = [ + migrations.AlterField( + model_name="slidesubmission", + name="doc", + field=ietf.utils.models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="doc.document", + ), + ), + ] diff --git a/ietf/meeting/models.py b/ietf/meeting/models.py index 5284420731..84d151c310 100644 --- a/ietf/meeting/models.py +++ b/ietf/meeting/models.py @@ -1385,7 +1385,7 @@ class SlideSubmission(models.Model): apply_to_all = models.BooleanField(default=False) submitter = ForeignKey(Person) status = ForeignKey(SlideSubmissionStatusName, null=True, default='pending', on_delete=models.SET_NULL) - doc = ForeignKey(Document, null=True, on_delete=models.SET_NULL) + doc = ForeignKey(Document, blank=True, null=True, on_delete=models.SET_NULL) def staged_filepath(self): return os.path.join(settings.SLIDE_STAGING_PATH , self.filename) From 37b4448b3d390b102b7e14890880bcfd1e965bcc Mon Sep 17 00:00:00 2001 From: Benson Muite Date: Fri, 21 Mar 2025 11:52:18 +0300 Subject: [PATCH 043/405] fix: Clarify legacy description in RFC banner (#8424) (#8670) --- ietf/templates/doc/disclaimer.html | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/ietf/templates/doc/disclaimer.html b/ietf/templates/doc/disclaimer.html index 31ce6c397e..db4c42ed68 100644 --- a/ietf/templates/doc/disclaimer.html +++ b/ietf/templates/doc/disclaimer.html @@ -3,12 +3,18 @@ {% load ietf_filters %} {% origin %} {% if doc.type_id == "rfc" %} - {% if doc.stream.slug != "ietf" and doc.std_level.slug|default:"unk" not in "bcp,ds,ps,std"|split:"," %} + {% if doc.stream.slug != "ietf" and doc.stream.desc != "Legacy" and doc.std_level.slug|default:"unk" not in "bcp,ds,ps,std"|split:"," %} + {% elif doc.stream.slug != "ietf" and doc.stream.desc == "Legacy" and doc.std_level.slug|default:"unk" not in "bcp,ds,ps,std"|split:"," %} + {% endif %} {% elif doc|is_in_stream %} {% if doc.stream.slug != "ietf" and doc.std_level.slug|default:"unk" not in "bcp,ds,ps,std"|split:"," %} @@ -25,4 +31,4 @@ This I-D is not endorsed by the IETF and has no formal standing in the IETF standards process. -{% endif %} \ No newline at end of file +{% endif %} From 8fec17282f5dc7ef28355990e2f74bff2a309002 Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Fri, 21 Mar 2025 19:30:40 -0400 Subject: [PATCH 044/405] ci: Update build-base-app.yml --- .github/workflows/build-base-app.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build-base-app.yml b/.github/workflows/build-base-app.yml index c8f66a22b7..5f0a0d11b8 100644 --- a/.github/workflows/build-base-app.yml +++ b/.github/workflows/build-base-app.yml @@ -51,7 +51,7 @@ jobs: push: true tags: | ghcr.io/ietf-tools/datatracker-app-base:${{ env.IMGVERSION }} - ghcr.io/ietf-tools/datatracker-app-base:latest + ${{ github.ref == 'refs/heads/main' && 'ghcr.io/ietf-tools/datatracker-app-base:latest' || '' }} - name: Update version references run: | @@ -61,6 +61,6 @@ jobs: - name: Commit CHANGELOG.md uses: stefanzweifel/git-auto-commit-action@v5 with: - branch: main + branch: ${{ github.ref_name }} commit_message: 'ci: update base image target version to ${{ env.IMGVERSION }}' file_pattern: dev/build/Dockerfile dev/build/TARGET_BASE From 6da36da3fde2721d3885001e4a4174c85579db8e Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Thu, 27 Mar 2025 00:29:48 +0700 Subject: [PATCH 045/405] chore: bump rabbitmq to 3.13 (#8603) * ci: param for rabbitmq_version * ci: drop latest tagging from datatracker-mq * chore: bump rabbitmq to 3.13 * Revert "ci: drop latest tagging from datatracker-mq" This reverts commit 58cea207c7300727f9cfc19a57859ab16c3b2c01. --- .github/workflows/build-mq-broker.yml | 15 +++++++++------ k8s/rabbitmq.yaml | 2 +- 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/.github/workflows/build-mq-broker.yml b/.github/workflows/build-mq-broker.yml index 8c6f1e6ae1..85c27c23cc 100644 --- a/.github/workflows/build-mq-broker.yml +++ b/.github/workflows/build-mq-broker.yml @@ -8,10 +8,13 @@ on: - 'dev/mq/**' - '.github/workflows/build-mq-broker.yml' - workflow_dispatch: - -env: - RABBITMQ_VERSION: 3.12-alpine + workflow_dispatch: + inputs: + rabbitmq_version: + description: 'RabbitMQ Version' + default: '3.13-alpine' + required: true + type: string jobs: publish: @@ -45,7 +48,7 @@ jobs: file: dev/mq/Dockerfile platforms: linux/amd64,linux/arm64 push: true - build-args: RABBITMQ_VERSION=${{ env.RABBITMQ_VERSION }} + build-args: RABBITMQ_VERSION=${{ inputs.rabbitmq_version }} tags: | - ghcr.io/ietf-tools/datatracker-mq:${{ env.RABBITMQ_VERSION }} + ghcr.io/ietf-tools/datatracker-mq:${{ inputs.rabbitmq_version }} ghcr.io/ietf-tools/datatracker-mq:latest diff --git a/k8s/rabbitmq.yaml b/k8s/rabbitmq.yaml index 3cab7ff565..0c8f0705b5 100644 --- a/k8s/rabbitmq.yaml +++ b/k8s/rabbitmq.yaml @@ -29,7 +29,7 @@ spec: # ----------------------------------------------------- # RabbitMQ Container # ----------------------------------------------------- - - image: "ghcr.io/ietf-tools/datatracker-mq:3.12-alpine" + - image: "ghcr.io/ietf-tools/datatracker-mq:3.13-alpine" imagePullPolicy: Always name: rabbitmq ports: From 9dd25b9aee2ccb565d74d55d90c67bf8388c910e Mon Sep 17 00:00:00 2001 From: Matthew Holloway Date: Fri, 28 Mar 2025 04:07:53 +1300 Subject: [PATCH 046/405] chore: vite sourcemap (#8719) --- vite.config.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/vite.config.js b/vite.config.js index 41a2cb02e0..bde2b9ed57 100644 --- a/vite.config.js +++ b/vite.config.js @@ -16,7 +16,8 @@ export default defineConfig(({ command, mode }) => { main: 'client/main.js', embedded: 'client/embedded.js' } - } + }, + sourcemap: true }, cacheDir: '.vite', plugins: [ From 7e9a46af63da0e45f3ea0e4ccfda69875b282190 Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Thu, 27 Mar 2025 13:59:24 -0500 Subject: [PATCH 047/405] feat: move base containers to bookworm (#8710) --- docker/base.Dockerfile | 17 ++++++++++------- docker/scripts/app-setup-debian.sh | 16 +++------------- ietf/submit/tests.py | 4 ++-- 3 files changed, 15 insertions(+), 22 deletions(-) diff --git a/docker/base.Dockerfile b/docker/base.Dockerfile index e2465f33c2..f364456c7a 100644 --- a/docker/base.Dockerfile +++ b/docker/base.Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.9-bullseye +FROM python:3.9-bookworm LABEL maintainer="IETF Tools Team " ENV DEBIAN_FRONTEND=noninteractive @@ -14,6 +14,9 @@ RUN apt-get install -y --no-install-recommends ca-certificates curl gnupg \ && mkdir -p /etc/apt/keyrings\ && curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg RUN echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_$NODE_MAJOR.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list +RUN echo "Package: nodejs" >> /etc/apt/preferences.d/preferences && \ + echo "Pin: origin deb.nodesource.com" >> /etc/apt/preferences.d/preferences && \ + echo "Pin-Priority: 1001" >> /etc/apt/preferences.d/preferences # Add Docker Source RUN curl -fsSL https://download.docker.com/linux/debian/gpg | gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg @@ -56,12 +59,13 @@ RUN apt-get update --fix-missing && apt-get install -qy --no-install-recommends libmagic-dev \ libmariadb-dev \ libmemcached-tools \ + libyang2-tools \ locales \ make \ mariadb-client \ memcached \ nano \ - netcat \ + netcat-traditional \ nodejs \ pgloader \ pigz \ @@ -77,7 +81,6 @@ RUN apt-get update --fix-missing && apt-get install -qy --no-install-recommends wget \ xauth \ xvfb \ - yang-tools \ zsh # Install kramdown-rfc2629 (ruby) @@ -106,11 +109,11 @@ RUN apt-get autoremove -y && apt-get clean -y && rm -rf /var/lib/apt/lists/* /va ENV DBUS_SESSION_BUS_ADDRESS=/dev/null # avoid million NPM install messages -ENV npm_config_loglevel warn +ENV npm_config_loglevel=warn # allow installing when the main user is root -ENV npm_config_unsafe_perm true +ENV npm_config_unsafe_perm=true # disable NPM funding messages -ENV npm_config_fund false +ENV npm_config_fund=false # Set locale to en_US.UTF-8 RUN echo "LC_ALL=en_US.UTF-8" >> /etc/environment && \ @@ -119,7 +122,7 @@ RUN echo "LC_ALL=en_US.UTF-8" >> /etc/environment && \ dpkg-reconfigure locales && \ locale-gen en_US.UTF-8 && \ update-locale LC_ALL en_US.UTF-8 -ENV LC_ALL en_US.UTF-8 +ENV LC_ALL=en_US.UTF-8 # Install idnits ADD https://raw.githubusercontent.com/ietf-tools/idnits-mirror/main/idnits /usr/local/bin/ diff --git a/docker/scripts/app-setup-debian.sh b/docker/scripts/app-setup-debian.sh index ddfc351995..ea9cc3fb87 100644 --- a/docker/scripts/app-setup-debian.sh +++ b/docker/scripts/app-setup-debian.sh @@ -10,7 +10,6 @@ # Syntax: ./common-debian.sh [install zsh flag] [username] [user UID] [user GID] [upgrade packages flag] [install Oh My Zsh! flag] [Add non-free packages] set -e - INSTALL_ZSH=${1:-"true"} USERNAME=${2:-"automatic"} USER_UID=${3:-"automatic"} @@ -116,18 +115,9 @@ if [ "${PACKAGES_ALREADY_INSTALLED}" != "true" ]; then # Needed for adding manpages-posix and manpages-posix-dev which are non-free packages in Debian if [ "${ADD_NON_FREE_PACKAGES}" = "true" ]; then # Bring in variables from /etc/os-release like VERSION_CODENAME - . /etc/os-release - sed -i -E "s/deb http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME} main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME} main contrib non-free/" /etc/apt/sources.list - sed -i -E "s/deb-src http:\/\/(deb|httredir)\.debian\.org\/debian ${VERSION_CODENAME} main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME} main contrib non-free/" /etc/apt/sources.list - sed -i -E "s/deb http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME}-updates main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME}-updates main contrib non-free/" /etc/apt/sources.list - sed -i -E "s/deb-src http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME}-updates main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME}-updates main contrib non-free/" /etc/apt/sources.list - sed -i "s/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main contrib non-free/" /etc/apt/sources.list - sed -i "s/deb-src http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main contrib non-free/" /etc/apt/sources.list - sed -i "s/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main contrib non-free/" /etc/apt/sources.list - sed -i "s/deb-src http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main contrib non-free/" /etc/apt/sources.list - # Handle bullseye location for security https://www.debian.org/releases/bullseye/amd64/release-notes/ch-information.en.html - sed -i "s/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main contrib non-free/" /etc/apt/sources.list - sed -i "s/deb-src http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main contrib non-free/" /etc/apt/sources.list + . /etc/os-release + sed -i -E "s/Components: main/Components: main contrib non-free/" /etc/apt/sources.list.d/debian.sources + echo "Running apt-get update..." apt-get update package_list="${package_list} manpages-posix manpages-posix-dev" diff --git a/ietf/submit/tests.py b/ietf/submit/tests.py index 9a993480cd..6b1c998e76 100644 --- a/ietf/submit/tests.py +++ b/ietf/submit/tests.py @@ -1888,7 +1888,7 @@ def test_submit_invalid_yang(self): r = self.client.get(status_url) q = PyQuery(r.content) # - self.assertContains(r, 'The yang validation returned 1 error') + self.assertContains(r, 'The yang validation returned 3 errors') # m = q('#yang-validation-message').text() for command in ['xym', 'pyang', 'yanglint']: @@ -1898,7 +1898,7 @@ def test_submit_invalid_yang(self): self.assertIn("draft-yang-testing-invalid-00.txt", m) self.assertIn("error: syntax error: illegal keyword: ;", m) if settings.SUBMIT_YANGLINT_COMMAND and os.path.exists(settings.YANGLINT_BINARY): - self.assertIn("No validation errors", m) + self.assertIn('libyang err : Parsing module "ietf-yang-metadata" failed.', m) def submit_conflicting_submissiondocevent_rev(self, new_rev='01', existing_rev='01'): """Test submitting a rev when an equal or later SubmissionDocEvent rev exists From 431c475060778ccaaf611e0de7a9bcc0c3b93c42 Mon Sep 17 00:00:00 2001 From: rjsparks <10996692+rjsparks@users.noreply.github.com> Date: Thu, 27 Mar 2025 19:12:31 +0000 Subject: [PATCH 048/405] ci: update base image target version to 20250327T1859 --- dev/build/Dockerfile | 2 +- dev/build/TARGET_BASE | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/dev/build/Dockerfile b/dev/build/Dockerfile index 2b02a091c5..cc55c92881 100644 --- a/dev/build/Dockerfile +++ b/dev/build/Dockerfile @@ -1,4 +1,4 @@ -FROM ghcr.io/ietf-tools/datatracker-app-base:20250315T0745 +FROM ghcr.io/ietf-tools/datatracker-app-base:20250327T1859 LABEL maintainer="IETF Tools Team " ENV DEBIAN_FRONTEND=noninteractive diff --git a/dev/build/TARGET_BASE b/dev/build/TARGET_BASE index e6f490b168..50e8bfd839 100644 --- a/dev/build/TARGET_BASE +++ b/dev/build/TARGET_BASE @@ -1 +1 @@ -20250315T0745 +20250327T1859 From 752bc2103146b534e1bb6188c7d54d11f07ba885 Mon Sep 17 00:00:00 2001 From: Rudi Matz Date: Thu, 27 Mar 2025 18:19:30 -0400 Subject: [PATCH 049/405] feat: highlight unexpected state in AD dashboard (#8738) * feat: highlight unexpected state in AD dashboard * test: add tests for is_unexpected_wg_state * test: improve tests using WgDraftFactory --- ietf/doc/templatetags/ietf_filters.py | 13 +++++++++++++ ietf/doc/templatetags/tests_ietf_filters.py | 16 ++++++++++++++++ ietf/templates/doc/search/status_columns.html | 4 ++++ 3 files changed, 33 insertions(+) diff --git a/ietf/doc/templatetags/ietf_filters.py b/ietf/doc/templatetags/ietf_filters.py index 86507eeaaa..d4adf96a27 100644 --- a/ietf/doc/templatetags/ietf_filters.py +++ b/ietf/doc/templatetags/ietf_filters.py @@ -480,6 +480,19 @@ def state(doc, slug): slug = "%s-stream-%s" % (doc.type_id, doc.stream_id) return doc.get_state(slug) + +@register.filter +def is_unexpected_wg_state(doc): + """Returns a flag indicating whether the document has an unexpected wg state.""" + if not doc.type_id == "draft": + return False + + draft_iesg_state = doc.get_state("draft-iesg") + draft_stream_state = doc.get_state("draft-stream-ietf") + + return draft_iesg_state.slug != "idexists" and draft_stream_state is not None and draft_stream_state.slug != "sub-pub" + + @register.filter def statehelp(state): "Output help icon with tooltip for state." diff --git a/ietf/doc/templatetags/tests_ietf_filters.py b/ietf/doc/templatetags/tests_ietf_filters.py index f018b7d9b3..b5130849ea 100644 --- a/ietf/doc/templatetags/tests_ietf_filters.py +++ b/ietf/doc/templatetags/tests_ietf_filters.py @@ -14,12 +14,14 @@ ConflictReviewFactory, BofreqFactory, StatementFactory, + RfcFactory, ) from ietf.doc.models import DocEvent from ietf.doc.templatetags.ietf_filters import ( urlize_ietf_docs, is_valid_url, is_in_stream, + is_unexpected_wg_state, ) from ietf.person.models import Person from ietf.utils.test_utils import TestCase @@ -174,3 +176,17 @@ def test_urlize_ietf_docs(self): for input, output in cases: # debug.show("(input, urlize_ietf_docs(input), output)") self.assertEqual(urlize_ietf_docs(input), output) + + def test_is_unexpected_wg_state(self): + """ + Test that the unexpected_wg_state function works correctly + """ + # test documents with expected wg states + self.assertFalse(is_unexpected_wg_state(RfcFactory())) + self.assertFalse(is_unexpected_wg_state(WgDraftFactory (states=[('draft-stream-ietf', 'sub-pub')]))) + self.assertFalse(is_unexpected_wg_state(WgDraftFactory (states=[('draft-iesg', 'idexists')]))) + self.assertFalse(is_unexpected_wg_state(WgDraftFactory (states=[('draft-stream-ietf', 'wg-cand'), ('draft-iesg','idexists')]))) + + # test documents with unexpected wg states due to invalid combination of states + self.assertTrue(is_unexpected_wg_state(WgDraftFactory (states=[('draft-stream-ietf', 'wg-cand'), ('draft-iesg','lc-req')]))) + self.assertTrue(is_unexpected_wg_state(WgDraftFactory (states=[('draft-stream-ietf', 'chair-w'), ('draft-iesg','pub-req')]))) diff --git a/ietf/templates/doc/search/status_columns.html b/ietf/templates/doc/search/status_columns.html index 15f284fd12..5ba41bb9c4 100644 --- a/ietf/templates/doc/search/status_columns.html +++ b/ietf/templates/doc/search/status_columns.html @@ -78,6 +78,10 @@ {% person_link action_holder.person title=action_holder.role_for_doc %}{% if action_holder|action_holder_badge %} {{ action_holder|action_holder_badge }}{% endif %}{% if not forloop.last %},{% endif %} {% endfor %} {% endif %} + {% if doc|is_unexpected_wg_state %} +
+ Unexpected WG state + {% endif %} {% else %} {# RFC #} {{ doc.std_level|safe }} RFC From aa2a3217a7006c05292650bb0d75b0243d2cc67b Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Fri, 28 Mar 2025 10:59:56 -0400 Subject: [PATCH 050/405] fix(agenda): strikethrough cancelled and rescheduled events (#8694) * fix(agenda): strikethrough cancelled and rescheduled events * fix(agenda): rescheduled + canceled badges width on mobile * fix(agenda): use em for canceled / rescheduled badge on mobile * fix: deleting badge width/display:block CSS --------- Co-authored-by: Matthew Holloway --- client/agenda/AgendaDetailsModal.vue | 1 - client/agenda/AgendaScheduleCalendar.vue | 1 - client/agenda/AgendaScheduleList.vue | 12 ++++++++++-- 3 files changed, 10 insertions(+), 4 deletions(-) diff --git a/client/agenda/AgendaDetailsModal.vue b/client/agenda/AgendaDetailsModal.vue index 037e32f9cb..2582bf2159 100644 --- a/client/agenda/AgendaDetailsModal.vue +++ b/client/agenda/AgendaDetailsModal.vue @@ -327,7 +327,6 @@ async function fetchSessionMaterials () { border-radius: 5px; .badge { - width: 30px; font-size: .7em; background-color: $yellow-200; border-bottom: 1px solid $yellow-500; diff --git a/client/agenda/AgendaScheduleCalendar.vue b/client/agenda/AgendaScheduleCalendar.vue index 6701ddabd7..9863296341 100644 --- a/client/agenda/AgendaScheduleCalendar.vue +++ b/client/agenda/AgendaScheduleCalendar.vue @@ -330,7 +330,6 @@ function close () { } .badge { - width: 30px; font-size: .7em; border: 1px solid #CCC; text-transform: uppercase; diff --git a/client/agenda/AgendaScheduleList.vue b/client/agenda/AgendaScheduleList.vue index 86c91bed85..d38c7842eb 100644 --- a/client/agenda/AgendaScheduleList.vue +++ b/client/agenda/AgendaScheduleList.vue @@ -1097,8 +1097,6 @@ onBeforeUnmount(() => { } @media screen and (max-width: $bs5-break-md) { - width: 30px; - display: block; margin: 2px 0 0 0; } } @@ -1321,6 +1319,11 @@ onBeforeUnmount(() => { &.agenda-table-cell-room { border-right: 1px solid darken($red-100, 5%) !important; + text-decoration: line-through; + } + + &.agenda-table-cell-name > a, &.agenda-table-cell-name > span { + text-decoration: line-through; } &:last-child { @@ -1348,6 +1351,11 @@ onBeforeUnmount(() => { &.agenda-table-cell-room { border-right: 1px solid darken($orange-100, 5%) !important; + text-decoration: line-through; + } + + &.agenda-table-cell-name > a, &.agenda-table-cell-name > span { + text-decoration: line-through; } &:last-child { From 527db359e3836a587ba67432b814004b75635022 Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Tue, 1 Apr 2025 09:03:59 -0400 Subject: [PATCH 051/405] chore: fix line ending for .github/ISSUE_TEMPLATE/config.yml --- .github/ISSUE_TEMPLATE/config.yml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index 5e27af9fed..320614b17e 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -1,8 +1,8 @@ -blank_issues_enabled: false -contact_links: - - name: Help and questions - url: https://github.com/ietf-tools/datatracker/discussions/categories/help-questions - about: Need help? Have a question on setting up the project or its usage? - - name: Discuss new ideas - url: https://github.com/ietf-tools/datatracker/discussions/categories/ideas - about: Submit ideas for new features or improvements to be discussed. +blank_issues_enabled: false +contact_links: + - name: Help and questions + url: https://github.com/ietf-tools/datatracker/discussions/categories/help-questions + about: Need help? Have a question on setting up the project or its usage? + - name: Discuss new ideas + url: https://github.com/ietf-tools/datatracker/discussions/categories/ideas + about: Submit ideas for new features or improvements to be discussed. From 9957cf190a4a61369f97e0224e0bb2ae6ed9c8be Mon Sep 17 00:00:00 2001 From: Rudi Matz Date: Tue, 1 Apr 2025 14:35:04 -0400 Subject: [PATCH 052/405] feat: block iesg ballot for docs from outside streams (#8761) * feat: block iesg ballot for docs from outside streams * test: correct failing irsg test * feat: improve/simplify tests and condition * test: filter out pyflakes tests related to globals --- ietf/doc/tests_ballot.py | 29 +++++++++++++++++++++++------ ietf/doc/tests_irsg_ballot.py | 2 +- ietf/doc/views_ballot.py | 5 ++++- ietf/utils/test_runner.py | 9 ++++++++- 4 files changed, 36 insertions(+), 9 deletions(-) diff --git a/ietf/doc/tests_ballot.py b/ietf/doc/tests_ballot.py index c7362b58e2..ec23f3d491 100644 --- a/ietf/doc/tests_ballot.py +++ b/ietf/doc/tests_ballot.py @@ -17,7 +17,7 @@ from ietf.doc.models import (Document, State, DocEvent, BallotPositionDocEvent, LastCallDocEvent, WriteupDocEvent, TelechatDocEvent) from ietf.doc.factories import (DocumentFactory, IndividualDraftFactory, IndividualRfcFactory, WgDraftFactory, - BallotPositionDocEventFactory, BallotDocEventFactory, IRSGBallotDocEventFactory) + BallotPositionDocEventFactory, BallotDocEventFactory, IRSGBallotDocEventFactory, RgDraftFactory) from ietf.doc.templatetags.ietf_filters import can_defer from ietf.doc.utils import create_ballot_if_not_open from ietf.doc.views_ballot import parse_ballot_edit_return_point @@ -360,7 +360,7 @@ def test_request_last_call(self): self.assertTrue('aread@' in outbox[-1]['Cc']) def test_edit_ballot_writeup(self): - draft = IndividualDraftFactory(states=[('draft','active'),('draft-iesg','iesg-eva')]) + draft = IndividualDraftFactory(states=[('draft','active'),('draft-iesg','iesg-eva')], stream_id='ietf') url = urlreverse('ietf.doc.views_ballot.ballot_writeupnotes', kwargs=dict(name=draft.name)) login_testing_unauthorized(self, "secretary", url) @@ -390,8 +390,25 @@ def test_edit_ballot_writeup(self): self.assertTrue("This is a simple test" in d.latest_event(WriteupDocEvent, type="changed_ballot_writeup_text").text) self.assertTrue('iesg-eva' == d.get_state_slug('draft-iesg')) + def test_edit_ballot_writeup_unauthorized_stream(self): + # Test that accessing a document from unauthorized (irtf) stream returns a 404 error + draft = RgDraftFactory() + url = urlreverse('ietf.doc.views_ballot.ballot_writeupnotes', kwargs=dict(name=draft.name)) + login_testing_unauthorized(self, "ad", url) + + r = self.client.get(url) + self.assertEqual(r.status_code, 404) + + def test_edit_ballot_writeup_invalid_name(self): + # Test that accessing a non-existent document returns a 404 error + url = urlreverse('ietf.doc.views_ballot.ballot_writeupnotes', kwargs=dict(name="invalid_name")) + login_testing_unauthorized(self, "ad", url) + + r = self.client.get(url) + self.assertEqual(r.status_code, 404) + def test_edit_ballot_writeup_already_approved(self): - draft = IndividualDraftFactory(states=[('draft','active'),('draft-iesg','approved')]) + draft = IndividualDraftFactory(states=[('draft','active'),('draft-iesg','approved')], stream_id='ietf') url = urlreverse('ietf.doc.views_ballot.ballot_writeupnotes', kwargs=dict(name=draft.name)) login_testing_unauthorized(self, "secretary", url) @@ -465,7 +482,7 @@ def test_edit_ballot_rfceditornote(self): def test_issue_ballot(self): ad = Person.objects.get(user__username="ad") for case in ('none','past','future'): - draft = IndividualDraftFactory(ad=ad) + draft = IndividualDraftFactory(ad=ad, stream_id='ietf') if case in ('past','future'): LastCallDocEvent.objects.create( by=Person.objects.get(name='(System)'), @@ -504,7 +521,7 @@ def test_issue_ballot(self): def test_issue_ballot_auto_state_change(self): ad = Person.objects.get(user__username="ad") - draft = IndividualDraftFactory(ad=ad, states=[('draft','active'),('draft-iesg','writeupw')]) + draft = IndividualDraftFactory(ad=ad, states=[('draft','active'),('draft-iesg','writeupw')], stream_id='ietf') url = urlreverse('ietf.doc.views_ballot.ballot_writeupnotes', kwargs=dict(name=draft.name)) login_testing_unauthorized(self, "secretary", url) @@ -528,7 +545,7 @@ def test_issue_ballot_auto_state_change(self): def test_issue_ballot_warn_if_early(self): ad = Person.objects.get(user__username="ad") - draft = IndividualDraftFactory(ad=ad, states=[('draft','active'),('draft-iesg','lc')]) + draft = IndividualDraftFactory(ad=ad, states=[('draft','active'),('draft-iesg','lc')], stream_id='ietf') url = urlreverse('ietf.doc.views_ballot.ballot_writeupnotes', kwargs=dict(name=draft.name)) login_testing_unauthorized(self, "secretary", url) diff --git a/ietf/doc/tests_irsg_ballot.py b/ietf/doc/tests_irsg_ballot.py index 92752e48c4..aa62d8aaf9 100644 --- a/ietf/doc/tests_irsg_ballot.py +++ b/ietf/doc/tests_irsg_ballot.py @@ -288,7 +288,7 @@ def test_edit_ballot_position_permissions(self): def test_iesg_ballot_no_irsg_actions(self): ad = Person.objects.get(user__username="ad") - wg_draft = IndividualDraftFactory(ad=ad) + wg_draft = IndividualDraftFactory(ad=ad, stream_id='ietf') irsgmember = get_active_irsg()[0] url = urlreverse('ietf.doc.views_ballot.ballot_writeupnotes', kwargs=dict(name=wg_draft.name)) diff --git a/ietf/doc/views_ballot.py b/ietf/doc/views_ballot.py index 4ff40d2268..9e2a417933 100644 --- a/ietf/doc/views_ballot.py +++ b/ietf/doc/views_ballot.py @@ -611,6 +611,10 @@ def clean_ballot_writeup(self): def ballot_writeupnotes(request, name): """Editing of ballot write-up and notes""" doc = get_object_or_404(Document, name=name) + + if doc.stream_id is None or doc.stream_id != 'ietf': + raise Http404("The requested operation is not allowed for this document.") + prev_state = doc.get_state("draft-iesg") login = request.user.person @@ -1335,4 +1339,3 @@ def parse_ballot_edit_return_point(path, doc_name, ballot_id): "ietf.iesg.views.past_documents", } return validate_return_to_path(path, get_default_path, allowed_path_handlers) - diff --git a/ietf/utils/test_runner.py b/ietf/utils/test_runner.py index 3c89a2d01c..6b6f282c49 100644 --- a/ietf/utils/test_runner.py +++ b/ietf/utils/test_runner.py @@ -263,7 +263,14 @@ def pyflakes_test(self): path = os.path.join(settings.BASE_DIR) warnings = [] warnings = pyflakes.checkPaths([path], verbosity=0) - self.assertEqual([], [str(w) for w in warnings]) + + # Filter out warnings about unused global variables + filtered_warnings = [ + w for w in warnings + if not re.search(r"`global \w+` is unused: name is never assigned in scope", str(w)) + ] + + self.assertEqual([], [str(w) for w in filtered_warnings]) class MyPyTest(TestCase): From 48f97c68406a158a4d4bf5da58e9168d12a5f92e Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Tue, 1 Apr 2025 16:12:33 -0300 Subject: [PATCH 053/405] chore: fix/refactor code near unneeded global declarations (#8765) * refactor: unused vars in draft.py * refactor: unwrap/simplify random_faker() prep * chore: types/globals in test_runner.py * chore: drop unneeded globals --- ietf/doc/templatetags/wg_menu.py | 2 -- ietf/person/factories.py | 26 ++++++++++++++------------ ietf/utils/draft.py | 8 +------- ietf/utils/patch.py | 3 +-- ietf/utils/test_runner.py | 31 ++++++++++++++----------------- 5 files changed, 30 insertions(+), 40 deletions(-) diff --git a/ietf/doc/templatetags/wg_menu.py b/ietf/doc/templatetags/wg_menu.py index 76bf7eb4d0..3e8d209448 100644 --- a/ietf/doc/templatetags/wg_menu.py +++ b/ietf/doc/templatetags/wg_menu.py @@ -62,8 +62,6 @@ @register.simple_tag def wg_menu(flavor): - global parents - for p in parents: p.short_name = parent_short_names.get(p.acronym) or p.name if p.short_name.endswith(" Area"): diff --git a/ietf/person/factories.py b/ietf/person/factories.py index 45de554766..98756f26c8 100644 --- a/ietf/person/factories.py +++ b/ietf/person/factories.py @@ -26,20 +26,22 @@ fake = faker.Factory.create() -def setup(): - global acceptable_fakers - # The transliteration of some Arabic and Devanagari names introduces - # non-alphabetic characters that don't work with the draft author - # extraction code, and also don't seem to match the way people with Arabic - # names romanize Arabic names. Exclude those locales from name generation - # in order to avoid test failures. - locales = set( [ l for l in faker.config.AVAILABLE_LOCALES if not (l.startswith('ar_') or l.startswith('sg_') or l=='fr_QC') ] ) - acceptable_fakers = [faker.Faker(locale) for locale in locales] -setup() +# The transliteration of some Arabic and Devanagari names introduces +# non-alphabetic characters that don't work with the draft author +# extraction code, and also don't seem to match the way people with Arabic +# names romanize Arabic names. Exclude those locales from name generation +# in order to avoid test failures. +_acceptable_fakers = [ + faker.Faker(locale) + for locale in set(faker.config.AVAILABLE_LOCALES) + if not (locale.startswith('ar_') or locale.startswith('sg_') or locale == 'fr_QC') +] + def random_faker(): - global acceptable_fakers - return random.sample(acceptable_fakers, 1)[0] + """Helper to get a random faker acceptable for User names""" + return random.sample(_acceptable_fakers, 1)[0] + class UserFactory(factory.django.DjangoModelFactory): class Meta: diff --git a/ietf/utils/draft.py b/ietf/utils/draft.py index 50add5abba..53d3d40811 100755 --- a/ietf/utils/draft.py +++ b/ietf/utils/draft.py @@ -65,7 +65,6 @@ opt_debug = False opt_timestamp = False opt_trace = False -opt_authorinfo = False opt_attributes = False # Don't forget to add the option variable to the globals list in _main below @@ -1332,8 +1331,6 @@ def getmeta(fn): # ---------------------------------------------------------------------- def _output(docname, fields, outfile=sys.stdout): - global company_domain - if opt_attributes: def outputkey(key, fields): field = fields[key] @@ -1373,9 +1370,8 @@ def _printmeta(fn, outfile=sys.stdout): # Main # ---------------------------------------------------------------------- -company_domain = {} # type: Dict[str, str] def _main(outfile=sys.stdout): - global opt_debug, opt_timestamp, opt_trace, opt_authorinfo, files, company_domain, opt_attributes + global opt_debug, opt_timestamp, opt_trace, files, opt_attributes # set default values, if any # ---------------------------------------------------------------------- # Option processing @@ -1423,8 +1419,6 @@ def _main(outfile=sys.stdout): elif opt in ["-T", "--trace"]: # Emit trace information while working opt_trace = True - company_domain = {} - if not files: files = [ "-" ] diff --git a/ietf/utils/patch.py b/ietf/utils/patch.py index 9de2270ebb..fd3e4a165d 100644 --- a/ietf/utils/patch.py +++ b/ietf/utils/patch.py @@ -87,8 +87,7 @@ def createLock(self): debugmode = False def setdebug(): - global debugmode, streamhandler - + global debugmode debugmode = True loglevel = logging.DEBUG logformat = "%(levelname)8s %(message)s" diff --git a/ietf/utils/test_runner.py b/ietf/utils/test_runner.py index 6b6f282c49..a77377ffb5 100644 --- a/ietf/utils/test_runner.py +++ b/ietf/utils/test_runner.py @@ -53,9 +53,10 @@ import factory.random import urllib3 import warnings -from urllib.parse import urlencode from fnmatch import fnmatch +from typing import Callable, Optional +from urllib.parse import urlencode from coverage.report import Reporter from coverage.results import Numbers @@ -90,11 +91,11 @@ from mypy_boto3_s3.service_resource import Bucket -loaded_templates = set() -visited_urls = set() -test_database_name = None -old_destroy = None -old_create = None +loaded_templates: set[str] = set() +visited_urls: set[str] = set() +test_database_name: Optional[str] = None +old_destroy: Optional[Callable] = None +old_create: Optional[Callable] = None template_coverage_collection = None code_coverage_collection = None @@ -230,10 +231,12 @@ def load_and_run_fixtures(verbosity): fn() def safe_create_test_db(self, verbosity, *args, **kwargs): - global test_database_name, old_create + if old_create is None: + raise RuntimeError("old_create has not been set, cannot proceed") keepdb = kwargs.get('keepdb', False) if not keepdb: print(" Creating test database...") + global test_database_name test_database_name = old_create(self, 0, *args, **kwargs) if settings.GLOBAL_TEST_FIXTURES: @@ -243,8 +246,9 @@ def safe_create_test_db(self, verbosity, *args, **kwargs): return test_database_name def safe_destroy_test_db(*args, **kwargs): + if old_destroy is None: + raise RuntimeError("old_destroy has not been set, cannot proceed") sys.stdout.write('\n') - global test_database_name, old_destroy keepdb = kwargs.get('keepdb', False) if not keepdb: if settings.DATABASES["default"]["NAME"] != test_database_name: @@ -358,15 +362,13 @@ class TemplateCoverageLoader(BaseLoader): is_usable = True def get_template(self, template_name, skip=None): - global template_coverage_collection, loaded_templates - if template_coverage_collection == True: + if template_coverage_collection: loaded_templates.add(str(template_name)) raise TemplateDoesNotExist(template_name) def record_urls_middleware(get_response): def record_urls(request): - global url_coverage_collection, visited_urls - if url_coverage_collection == True: + if url_coverage_collection: visited_urls.add(request.path) return get_response(request) return record_urls @@ -532,7 +534,6 @@ def report_test_result(self, test): ( test, test_coverage*100, latest_coverage_version, master_coverage*100, )) def template_coverage_test(self): - global loaded_templates if self.runner.check_coverage: apps = [ app.split('.')[-1] for app in self.runner.test_apps ] all = get_template_paths(apps) @@ -760,7 +761,6 @@ def __init__( self.show_logging = show_logging self.rerun = rerun self.test_labels = None - global validation_settings validation_settings["validate_html"] = self if validate_html else None validation_settings["validate_html_harder"] = self if validate_html and validate_html_harder else None validation_settings["show_logging"] = show_logging @@ -783,9 +783,6 @@ def __init__( self.blobstoremanager = TestBlobstoreManager() if manage_blobstore else None def setup_test_environment(self, **kwargs): - global template_coverage_collection - global url_coverage_collection - ietf.utils.mail.test_mode = True ietf.utils.mail.SMTP_ADDR['ip4'] = '127.0.0.1' ietf.utils.mail.SMTP_ADDR['port'] = 2025 From 48bee8a58381e5caa6cf5d6b56bd58eff614a918 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Tue, 1 Apr 2025 17:19:30 -0300 Subject: [PATCH 054/405] chore: update libreoffice install for bookworm (#8768) --- dev/build/Dockerfile | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/dev/build/Dockerfile b/dev/build/Dockerfile index cc55c92881..c25298d652 100644 --- a/dev/build/Dockerfile +++ b/dev/build/Dockerfile @@ -10,9 +10,8 @@ RUN groupadd -g 1000 datatracker && \ RUN apt-get purge -y imagemagick imagemagick-6-common # Install libreoffice (needed via PPT2PDF_COMMAND) -RUN echo "deb http://deb.debian.org/debian bullseye-backports main" > /etc/apt/sources.list.d/bullseye-backports.list && \ - apt-get update && \ - apt-get -qyt bullseye-backports install libreoffice-nogui +RUN apt-get update && \ + apt-get -qy install libreoffice-nogui COPY . . COPY ./dev/build/start.sh ./start.sh From 15ef59133a0e8b62158836b697d9413dc395228f Mon Sep 17 00:00:00 2001 From: Matthew Holloway Date: Thu, 3 Apr 2025 05:10:22 +1300 Subject: [PATCH 055/405] chore: libreoffice flags for resilience (#8769) --- ietf/settings.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/ietf/settings.py b/ietf/settings.py index 1fe5f48229..33a2f976d9 100644 --- a/ietf/settings.py +++ b/ietf/settings.py @@ -1103,7 +1103,12 @@ def skip_unreadable_post(record): DATATRACKER_MAX_UPLOAD_SIZE = 40960000 PPT2PDF_COMMAND = [ - "/usr/bin/soffice", "--headless", "--convert-to", "pdf:writer_globaldocument_pdf_Export", "--outdir" + "/usr/bin/soffice", + "--headless", # no GUI + "--safe-mode", # use a new libreoffice profile every time (ensures no reliance on accumulated profile config) + "--norestore", # don't attempt to restore files after a previous crash (ensures that one crash won't block future conversions until UI intervention) + "--convert-to", "pdf:writer_globaldocument_pdf_Export", + "--outdir" ] STATS_REGISTRATION_ATTENDEES_JSON_URL = 'https://registration.ietf.org/{number}/attendees/' From afa79dc55b16463d56b531bb1b6b06fbbeb12ace Mon Sep 17 00:00:00 2001 From: Jim Fenton Date: Wed, 2 Apr 2025 09:10:38 -0700 Subject: [PATCH 056/405] feat: Warn if uploading minutes before session end (#8700) * Warn if uploading minutes before sessionn end * Remove extraneous btn-primary for session future Co-authored-by: Robert Sparks * fix: guard against unscheduled sessions * fix: test addition of warning * fix: another guard against unscheduled sessions * feat: test future warning on session details pannel --------- Co-authored-by: Robert Sparks --- ietf/meeting/tests_views.py | 286 ++++++++++-------- ietf/meeting/views.py | 6 + .../meeting/session_details_panel.html | 2 +- .../meeting/upload_session_minutes.html | 5 + 4 files changed, 168 insertions(+), 131 deletions(-) diff --git a/ietf/meeting/tests_views.py b/ietf/meeting/tests_views.py index 0f91986f77..a93a26b981 100644 --- a/ietf/meeting/tests_views.py +++ b/ietf/meeting/tests_views.py @@ -6541,110 +6541,130 @@ def test_upload_bluesheets_interim_chair_access(self): self.assertIn('Upload', str(q("title"))) - def test_upload_minutes_agenda(self): - for doctype in ('minutes','agenda'): - session = SessionFactory(meeting__type_id='ietf') - if doctype == 'minutes': - url = urlreverse('ietf.meeting.views.upload_session_minutes',kwargs={'num':session.meeting.number,'session_id':session.id}) - else: - url = urlreverse('ietf.meeting.views.upload_session_agenda',kwargs={'num':session.meeting.number,'session_id':session.id}) - self.client.logout() - login_testing_unauthorized(self,"secretary",url) - r = self.client.get(url) - self.assertEqual(r.status_code, 200) - q = PyQuery(r.content) - self.assertIn('Upload', str(q("Title"))) - self.assertFalse(session.presentations.exists()) - self.assertFalse(q('form input[type="checkbox"]')) - - session2 = SessionFactory(meeting=session.meeting,group=session.group) - r = self.client.get(url) - self.assertEqual(r.status_code, 200) - q = PyQuery(r.content) - self.assertTrue(q('form input[type="checkbox"]')) - - # test not submitting a file - r = self.client.post(url, dict(submission_method="upload")) - self.assertEqual(r.status_code, 200) - q = PyQuery(r.content) - self.assertTrue(q("form .is-invalid")) - - test_file = BytesIO(b'this is some text for a test') - test_file.name = "not_really.json" - r = self.client.post(url,dict(submission_method="upload",file=test_file)) - self.assertEqual(r.status_code, 200) - q = PyQuery(r.content) - self.assertTrue(q('form .is-invalid')) - - test_file = BytesIO(b'this is some text for a test'*1510000) - test_file.name = "not_really.pdf" - r = self.client.post(url,dict(submission_method="upload",file=test_file)) - self.assertEqual(r.status_code, 200) - q = PyQuery(r.content) - self.assertTrue(q('form .is-invalid')) - - test_file = BytesIO(b'') - test_file.name = "not_really.html" - r = self.client.post(url,dict(submission_method="upload",file=test_file)) - self.assertEqual(r.status_code, 200) - q = PyQuery(r.content) - self.assertTrue(q('form .is-invalid')) - - # Test html sanitization - test_file = BytesIO(b'Title

Title

Some text
') - test_file.name = "some.html" - r = self.client.post(url,dict(submission_method="upload",file=test_file)) - self.assertEqual(r.status_code, 302) - doc = session.presentations.filter(document__type_id=doctype).first().document - self.assertEqual(doc.rev,'00') - text = doc.text() - self.assertIn('Some text', text) - self.assertNotIn('
', text) - text = retrieve_str(doctype, f"{doc.name}-{doc.rev}.html") - self.assertIn('Some text', text) - self.assertNotIn('
', text) - - # txt upload - test_bytes = b'This is some text for a test, with the word\nvirtual at the beginning of a line.' - test_file = BytesIO(test_bytes) - test_file.name = "some.txt" - r = self.client.post(url,dict(submission_method="upload",file=test_file,apply_to_all=False)) - self.assertEqual(r.status_code, 302) - doc = session.presentations.filter(document__type_id=doctype).first().document - self.assertEqual(doc.rev,'01') - self.assertFalse(session2.presentations.filter(document__type_id=doctype)) - retrieved_bytes = retrieve_bytes(doctype, f"{doc.name}-{doc.rev}.txt") - self.assertEqual(retrieved_bytes, test_bytes) - + def test_label_future_sessions(self): + self.client.login(username='secretary', password='secretary+password') + for future in (True, False): + mtg_date = date_today()+datetime.timedelta(days=180 if future else -180) + session = SessionFactory(meeting__type_id='ietf', meeting__date=mtg_date) + # Verify future warning shows on the session details panel + url = urlreverse('ietf.meeting.views.session_details', kwargs={'num':session.meeting.number, 'acronym': session.group.acronym}) r = self.client.get(url) - self.assertEqual(r.status_code, 200) - q = PyQuery(r.content) - self.assertIn('Revise', str(q("Title"))) - test_bytes = b'this is some different text for a test' - test_file = BytesIO(test_bytes) - test_file.name = "also_some.txt" - r = self.client.post(url,dict(submission_method="upload",file=test_file,apply_to_all=True)) - self.assertEqual(r.status_code, 302) - doc = Document.objects.get(pk=doc.pk) - self.assertEqual(doc.rev,'02') - self.assertTrue(session2.presentations.filter(document__type_id=doctype)) - retrieved_bytes = retrieve_bytes(doctype, f"{doc.name}-{doc.rev}.txt") - self.assertEqual(retrieved_bytes, test_bytes) - - # Test bad encoding - test_file = BytesIO('

Title

Some\x93text
'.encode('latin1')) - test_file.name = "some.html" - r = self.client.post(url,dict(submission_method="upload",file=test_file)) - self.assertContains(r, 'Could not identify the file encoding') - doc = Document.objects.get(pk=doc.pk) - self.assertEqual(doc.rev,'02') + self.assertTrue(r.status_code==200) + if future: + self.assertContains(r, "Session has not ended yet") + else: + self.assertNotContains(r, "Session has not ended yet") - # Verify that we don't have dead links - url = urlreverse('ietf.meeting.views.session_details', kwargs={'num':session.meeting.number, 'acronym': session.group.acronym}) - top = '/meeting/%s/' % session.meeting.number - self.requests_mock.get(f'{session.notes_url()}/download', text='markdown notes') - self.requests_mock.get(f'{session.notes_url()}/info', text=json.dumps({'title': 'title', 'updatetime': '2021-12-01T17:11:00z'})) - self.crawl_materials(url=url, top=top) + def test_upload_minutes_agenda(self): + for doctype in ('minutes','agenda'): + for future in (True, False): + mtg_date = date_today()+datetime.timedelta(days=180 if future else -180) + session = SessionFactory(meeting__type_id='ietf', meeting__date=mtg_date) + if doctype == 'minutes': + url = urlreverse('ietf.meeting.views.upload_session_minutes',kwargs={'num':session.meeting.number,'session_id':session.id}) + else: + url = urlreverse('ietf.meeting.views.upload_session_agenda',kwargs={'num':session.meeting.number,'session_id':session.id}) + self.client.logout() + login_testing_unauthorized(self,"secretary",url) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + q = PyQuery(r.content) + self.assertIn('Upload', str(q("Title"))) + self.assertFalse(session.presentations.exists()) + self.assertFalse(q('form input[type="checkbox"]')) + if future and doctype == "minutes": + self.assertContains(r, "Session has not ended yet") + else: + self.assertNotContains(r, "Session has not ended yet") + + session2 = SessionFactory(meeting=session.meeting,group=session.group) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + q = PyQuery(r.content) + self.assertTrue(q('form input[type="checkbox"]')) + + # test not submitting a file + r = self.client.post(url, dict(submission_method="upload")) + self.assertEqual(r.status_code, 200) + q = PyQuery(r.content) + self.assertTrue(q("form .is-invalid")) + + test_file = BytesIO(b'this is some text for a test') + test_file.name = "not_really.json" + r = self.client.post(url,dict(submission_method="upload",file=test_file)) + self.assertEqual(r.status_code, 200) + q = PyQuery(r.content) + self.assertTrue(q('form .is-invalid')) + + test_file = BytesIO(b'this is some text for a test'*1510000) + test_file.name = "not_really.pdf" + r = self.client.post(url,dict(submission_method="upload",file=test_file)) + self.assertEqual(r.status_code, 200) + q = PyQuery(r.content) + self.assertTrue(q('form .is-invalid')) + + test_file = BytesIO(b'') + test_file.name = "not_really.html" + r = self.client.post(url,dict(submission_method="upload",file=test_file)) + self.assertEqual(r.status_code, 200) + q = PyQuery(r.content) + self.assertTrue(q('form .is-invalid')) + + # Test html sanitization + test_file = BytesIO(b'Title

Title

Some text
') + test_file.name = "some.html" + r = self.client.post(url,dict(submission_method="upload",file=test_file)) + self.assertEqual(r.status_code, 302) + doc = session.presentations.filter(document__type_id=doctype).first().document + self.assertEqual(doc.rev,'00') + text = doc.text() + self.assertIn('Some text', text) + self.assertNotIn('
', text) + text = retrieve_str(doctype, f"{doc.name}-{doc.rev}.html") + self.assertIn('Some text', text) + self.assertNotIn('
', text) + + # txt upload + test_bytes = b'This is some text for a test, with the word\nvirtual at the beginning of a line.' + test_file = BytesIO(test_bytes) + test_file.name = "some.txt" + r = self.client.post(url,dict(submission_method="upload",file=test_file,apply_to_all=False)) + self.assertEqual(r.status_code, 302) + doc = session.presentations.filter(document__type_id=doctype).first().document + self.assertEqual(doc.rev,'01') + self.assertFalse(session2.presentations.filter(document__type_id=doctype)) + retrieved_bytes = retrieve_bytes(doctype, f"{doc.name}-{doc.rev}.txt") + self.assertEqual(retrieved_bytes, test_bytes) + + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + q = PyQuery(r.content) + self.assertIn('Revise', str(q("Title"))) + test_bytes = b'this is some different text for a test' + test_file = BytesIO(test_bytes) + test_file.name = "also_some.txt" + r = self.client.post(url,dict(submission_method="upload",file=test_file,apply_to_all=True)) + self.assertEqual(r.status_code, 302) + doc = Document.objects.get(pk=doc.pk) + self.assertEqual(doc.rev,'02') + self.assertTrue(session2.presentations.filter(document__type_id=doctype)) + retrieved_bytes = retrieve_bytes(doctype, f"{doc.name}-{doc.rev}.txt") + self.assertEqual(retrieved_bytes, test_bytes) + + # Test bad encoding + test_file = BytesIO('

Title

Some\x93text
'.encode('latin1')) + test_file.name = "some.html" + r = self.client.post(url,dict(submission_method="upload",file=test_file)) + self.assertContains(r, 'Could not identify the file encoding') + doc = Document.objects.get(pk=doc.pk) + self.assertEqual(doc.rev,'02') + + # Verify that we don't have dead links + url = urlreverse('ietf.meeting.views.session_details', kwargs={'num':session.meeting.number, 'acronym': session.group.acronym}) + top = '/meeting/%s/' % session.meeting.number + self.requests_mock.get(f'{session.notes_url()}/download', text='markdown notes') + self.requests_mock.get(f'{session.notes_url()}/info', text=json.dumps({'title': 'title', 'updatetime': '2021-12-01T17:11:00z'})) + self.crawl_materials(url=url, top=top) def test_upload_minutes_agenda_unscheduled(self): for doctype in ('minutes','agenda'): @@ -6661,6 +6681,7 @@ def test_upload_minutes_agenda_unscheduled(self): self.assertIn('Upload', str(q("Title"))) self.assertFalse(session.presentations.exists()) self.assertFalse(q('form input[type="checkbox"]')) + self.assertNotContains(r, "Session has not ended yet") test_file = BytesIO(b'this is some text for a test') test_file.name = "not_really.txt" @@ -6669,35 +6690,40 @@ def test_upload_minutes_agenda_unscheduled(self): @override_settings(MEETING_MATERIALS_SERVE_LOCALLY=True) def test_upload_minutes_agenda_interim(self): - session=SessionFactory(meeting__type_id='interim') for doctype in ('minutes','agenda'): - if doctype=='minutes': - url = urlreverse('ietf.meeting.views.upload_session_minutes',kwargs={'num':session.meeting.number,'session_id':session.id}) - else: - url = urlreverse('ietf.meeting.views.upload_session_agenda',kwargs={'num':session.meeting.number,'session_id':session.id}) - self.client.logout() - login_testing_unauthorized(self,"secretary",url) - r = self.client.get(url) - self.assertEqual(r.status_code, 200) - q = PyQuery(r.content) - self.assertIn('Upload', str(q("title"))) - self.assertFalse(session.presentations.filter(document__type_id=doctype)) - test_bytes = b'this is some text for a test' - test_file = BytesIO(test_bytes) - test_file.name = "not_really.txt" - r = self.client.post(url,dict(submission_method="upload",file=test_file)) - self.assertEqual(r.status_code, 302) - doc = session.presentations.filter(document__type_id=doctype).first().document - self.assertEqual(doc.rev,'00') - retrieved_bytes = retrieve_bytes(doctype, f"{doc.name}-{doc.rev}.txt") - self.assertEqual(retrieved_bytes, test_bytes) - - # Verify that we don't have dead links - url = urlreverse('ietf.meeting.views.session_details', kwargs={'num':session.meeting.number, 'acronym': session.group.acronym}) - top = '/meeting/%s/' % session.meeting.number - self.requests_mock.get(f'{session.notes_url()}/download', text='markdown notes') - self.requests_mock.get(f'{session.notes_url()}/info', text=json.dumps({'title': 'title', 'updatetime': '2021-12-01T17:11:00z'})) - self.crawl_materials(url=url, top=top) + for future in (True, False): + session=SessionFactory(meeting__type_id='interim', meeting__date = date_today()+datetime.timedelta(days=180 if future else -180)) + if doctype=='minutes': + url = urlreverse('ietf.meeting.views.upload_session_minutes',kwargs={'num':session.meeting.number,'session_id':session.id}) + else: + url = urlreverse('ietf.meeting.views.upload_session_agenda',kwargs={'num':session.meeting.number,'session_id':session.id}) + self.client.logout() + login_testing_unauthorized(self,"secretary",url) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + q = PyQuery(r.content) + self.assertIn('Upload', str(q("title"))) + self.assertFalse(session.presentations.filter(document__type_id=doctype)) + if future and doctype == "minutes": + self.assertContains(r, "Session has not ended yet") + else: + self.assertNotContains(r, "Session has not ended yet") + test_bytes = b'this is some text for a test' + test_file = BytesIO(test_bytes) + test_file.name = "not_really.txt" + r = self.client.post(url,dict(submission_method="upload",file=test_file)) + self.assertEqual(r.status_code, 302) + doc = session.presentations.filter(document__type_id=doctype).first().document + self.assertEqual(doc.rev,'00') + retrieved_bytes = retrieve_bytes(doctype, f"{doc.name}-{doc.rev}.txt") + self.assertEqual(retrieved_bytes, test_bytes) + + # Verify that we don't have dead links + url = urlreverse('ietf.meeting.views.session_details', kwargs={'num':session.meeting.number, 'acronym': session.group.acronym}) + top = '/meeting/%s/' % session.meeting.number + self.requests_mock.get(f'{session.notes_url()}/download', text='markdown notes') + self.requests_mock.get(f'{session.notes_url()}/info', text=json.dumps({'title': 'title', 'updatetime': '2021-12-01T17:11:00z'})) + self.crawl_materials(url=url, top=top) @override_settings(MEETING_MATERIALS_SERVE_LOCALLY=True) def test_upload_narrativeminutes(self): diff --git a/ietf/meeting/views.py b/ietf/meeting/views.py index 6a73059d92..722bf829e1 100644 --- a/ietf/meeting/views.py +++ b/ietf/meeting/views.py @@ -2522,6 +2522,8 @@ def session_details(request, num, acronym): else: pending_suggestions = SlideSubmission.objects.none() + tsa = session.official_timeslotassignment() + future = tsa is not None and timezone.now() < tsa.timeslot.end_time() return render(request, "meeting/session_details.html", { 'scheduled_sessions':scheduled_sessions , 'unscheduled_sessions':unscheduled_sessions , @@ -2532,6 +2534,7 @@ def session_details(request, num, acronym): 'can_manage_materials' : can_manage, 'can_view_request': can_view_request, 'thisweek': datetime_today()-datetime.timedelta(days=7), + 'future': future, }) class SessionDraftsForm(forms.Form): @@ -2823,11 +2826,14 @@ def upload_session_minutes(request, session_id, num): else: form = UploadMinutesForm(show_apply_to_all_checkbox) + tsa = session.official_timeslotassignment() + future = tsa is not None and timezone.now() < tsa.timeslot.end_time() return render(request, "meeting/upload_session_minutes.html", {'session': session, 'session_number': session_number, 'minutes_sp' : minutes_sp, 'form': form, + 'future': future, }) @role_required("Secretariat") diff --git a/ietf/templates/meeting/session_details_panel.html b/ietf/templates/meeting/session_details_panel.html index 87d9e3d672..7c52ac0b4a 100644 --- a/ietf/templates/meeting/session_details_panel.html +++ b/ietf/templates/meeting/session_details_panel.html @@ -109,7 +109,7 @@

Agenda, Minutes, and Bluesheets

{% endif %} {% if not session.type_counter.minutes %} - Import minutes from notes.ietf.org + Import minutes from notes.ietf.org Upload minutes diff --git a/ietf/templates/meeting/upload_session_minutes.html b/ietf/templates/meeting/upload_session_minutes.html index 30eadda277..324440681f 100644 --- a/ietf/templates/meeting/upload_session_minutes.html +++ b/ietf/templates/meeting/upload_session_minutes.html @@ -26,6 +26,11 @@

{% if session_number %}

Session {{ session_number }} : {{ session.official_timeslotassignment.timeslot.time|timezone:session.meeting.time_zone|date:"D M-d-Y Hi" }}

{% endif %} + {% if future %} +

+ Caution: Session has not ended yet +

+ {% endif %}
{% csrf_token %} {% bootstrap_form form %} From c70e67dc917dea9fe0347575811d5ab02ef91ce8 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Wed, 2 Apr 2025 13:11:08 -0300 Subject: [PATCH 057/405] chore: unpin importlib-metadata (#8762) --- requirements.txt | 1 - 1 file changed, 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 8bd906c220..cd93f448e2 100644 --- a/requirements.txt +++ b/requirements.txt @@ -42,7 +42,6 @@ gunicorn>=20.1.0 hashids>=1.3.1 html2text>=2020.1.16 # Used only to clean comment field of secr/sreq html5lib>=1.1 # Only used in tests -importlib-metadata<8.5.0 # indirect req of Markdown/inflect; https://github.com/ietf-tools/datatracker/issues/7924 inflect>= 6.0.2 jsonfield>=3.1.0 # for SubmissionCheck. This is https://github.com/bradjasper/django-jsonfield/. jsonschema[format]>=4.2.1 From b95bbe670fb1f3506ef156034fd5107c1dca3365 Mon Sep 17 00:00:00 2001 From: rjsparks <10996692+rjsparks@users.noreply.github.com> Date: Wed, 2 Apr 2025 16:24:20 +0000 Subject: [PATCH 058/405] ci: update base image target version to 20250402T1611 --- dev/build/Dockerfile | 2 +- dev/build/TARGET_BASE | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/dev/build/Dockerfile b/dev/build/Dockerfile index c25298d652..cd0a70667c 100644 --- a/dev/build/Dockerfile +++ b/dev/build/Dockerfile @@ -1,4 +1,4 @@ -FROM ghcr.io/ietf-tools/datatracker-app-base:20250327T1859 +FROM ghcr.io/ietf-tools/datatracker-app-base:20250402T1611 LABEL maintainer="IETF Tools Team " ENV DEBIAN_FRONTEND=noninteractive diff --git a/dev/build/TARGET_BASE b/dev/build/TARGET_BASE index 50e8bfd839..1195fc9a0b 100644 --- a/dev/build/TARGET_BASE +++ b/dev/build/TARGET_BASE @@ -1 +1 @@ -20250327T1859 +20250402T1611 From 2b7d4ad414e61b001cab30703a270929852a6b57 Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Fri, 4 Apr 2025 02:19:43 -0400 Subject: [PATCH 059/405] ci: add update staging DB step to deploy workflow --- .github/workflows/build.yml | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 123bd5c65a..36439aa6cf 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -444,6 +444,19 @@ jobs: PKG_VERSION: ${{needs.prepare.outputs.pkg_version}} steps: + - name: Refresh Staging DB + uses: the-actions-org/workflow-dispatch@v4 + with: + workflow: update-staging-db.yml + repo: ietf-tools/infra-k8s + ref: main + token: ${{ secrets.GH_INFRA_K8S_TOKEN }} + inputs: '{ "sourceDb":"datatracker" }' + wait-for-completion: true + wait-for-completion-timeout: 5m + wait-for-completion-interval: 20s + display-workflow-run-url: false + - name: Deploy to staging uses: the-actions-org/workflow-dispatch@v4 with: From f9ca03086493a00ca188cb4cd949ba1831fd452a Mon Sep 17 00:00:00 2001 From: Rudi Matz Date: Tue, 8 Apr 2025 20:24:00 -0400 Subject: [PATCH 060/405] fix: unexpected chatlog links (#8774) * fix: unexpected chatlog links * fix: separate handling of polls and chatlogs --- ietf/meeting/tests_views.py | 38 +++++++++++++++++++ ietf/meeting/views.py | 8 +++- .../meeting/session_buttons_include.html | 11 +++--- 3 files changed, 51 insertions(+), 6 deletions(-) diff --git a/ietf/meeting/tests_views.py b/ietf/meeting/tests_views.py index a93a26b981..dfb414b61b 100644 --- a/ietf/meeting/tests_views.py +++ b/ietf/meeting/tests_views.py @@ -7736,6 +7736,44 @@ def test_add_session_recordings(self): self.assertEqual(r.status_code, 404) self.assertFalse(mock_delete.called) + def test_show_chatlog_links(self): + meeting = MeetingFactory(type_id='ietf', number='122') + session = SessionFactory(meeting=meeting) + doc_name = 'chatlog-72-mars-197001010000' + SessionPresentation.objects.create(session=session,document=DocumentFactory(type_id='chatlog', name=doc_name)) + + session_url = urlreverse('ietf.meeting.views.session_details', + kwargs={'num':meeting.number, 'acronym':session.group.acronym}) + + r = self.client.get(session_url) + + self.assertEqual(r.status_code, 200) + + q = PyQuery(r.content) + + # Find the chatlog link in the desktop view + link_chatlog_box = q(f'a[title="Chat logs for {session.group.acronym}"]') + self.assertTrue(link_chatlog_box, 'Expected element with title "Chat logs for {group.acronym}" not found.') + self.assertEqual(link_chatlog_box.attr('href'), '/doc/'+ doc_name) + + # Find the chatlog link in the mobile view + link_chatlog_list = q('li:contains("Chat logs")') + self.assertTrue(link_chatlog_list, 'Expected
  • element containing "Chat logs" not found.') + self.assertEqual(link_chatlog_list.find('a').attr('href'), '/doc/'+ doc_name) + + def test_hide_chatlog_links(self): + # mock meeting and session, but no chatlog document + meeting = MeetingFactory(type_id='ietf', number='122') + session = SessionFactory(meeting=meeting) + + session_url = urlreverse('ietf.meeting.views.session_details', + kwargs={'num':meeting.number, 'acronym':session.group.acronym}) + + r = self.client.get(session_url) + + self.assertEqual(r.status_code, 200) + # validate no links for chat logs exist + self.assertNotContains(r, 'Chat logs') class HasMeetingsTests(TestCase): diff --git a/ietf/meeting/views.py b/ietf/meeting/views.py index 722bf829e1..591b934b58 100644 --- a/ietf/meeting/views.py +++ b/ietf/meeting/views.py @@ -23,6 +23,7 @@ from urllib.parse import parse_qs, unquote, urlencode, urlsplit, urlunsplit, urlparse from tempfile import mkstemp from wsgiref.handlers import format_date_time +from itertools import chain from django import forms from django.core.cache import caches @@ -2496,7 +2497,12 @@ def session_details(request, num, acronym): session.filtered_artifacts.sort(key=lambda d:artifact_types.index(d.document.type.slug)) session.filtered_slides = session.presentations.filter(document__type__slug='slides').order_by('order') session.filtered_drafts = session.presentations.filter(document__type__slug='draft') - session.filtered_chatlog_and_polls = session.presentations.filter(document__type__slug__in=('chatlog', 'polls')).order_by('document__type__slug') + + filtered_polls = session.presentations.filter(document__type__slug=('polls')) + filtered_chatlogs = session.presentations.filter(document__type__slug=('chatlog')) + session.filtered_chatlog_and_polls = chain(filtered_chatlogs, filtered_polls) + session.chatlog = filtered_chatlogs.first() + # TODO FIXME Deleted materials shouldn't be in the presentations for qs in [session.filtered_artifacts,session.filtered_slides,session.filtered_drafts]: qs = [p for p in qs if p.document.get_state_slug(p.document.type_id)!='deleted'] diff --git a/ietf/templates/meeting/session_buttons_include.html b/ietf/templates/meeting/session_buttons_include.html index bb5144b45d..b1fd92ba5b 100644 --- a/ietf/templates/meeting/session_buttons_include.html +++ b/ietf/templates/meeting/session_buttons_include.html @@ -2,7 +2,8 @@ {% load origin %} {% load static %} {% load textfilters tz %} -{% load ietf_filters %} +{% load ietf_filters session_filters %} + {% origin %} {% if item and item|should_show_agenda_session_buttons %} {% with slug=item.slug session=item.session timeslot=item.timeslot %} @@ -126,10 +127,10 @@ {% else %} {# chat logs #} - {% if meeting.has_chat_logs %} + {% if meeting.has_chat_logs and session.chatlog %} @@ -303,10 +304,10 @@
  • {% else %} {# chat logs #} - {% if meeting.has_chat_logs %} + {% if meeting.has_chat_logs and session.chatlog %}
  • + href="/doc/{{ session.chatlog.document.name }}"> Chat logs
  • From a728cf2440ab5f4acc88714023d3b2e2f834018b Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Tue, 8 Apr 2025 21:24:22 -0300 Subject: [PATCH 061/405] test: fix apparent typo / tautological test (#8773) Co-authored-by: Robert Sparks --- ietf/api/tests.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ietf/api/tests.py b/ietf/api/tests.py index ac0b37a608..7cc56c2d0a 100644 --- a/ietf/api/tests.py +++ b/ietf/api/tests.py @@ -1440,7 +1440,7 @@ def test_api_top_level(self): resource_list = r.json() for name in self.apps: - if not name in self.apps: + if not name in resource_list: sys.stderr.write("Expected a REST API resource for %s, but didn't find one\n" % name) for name in self.apps: From 0348dbd865b99692235c2f6e5a75a8064f8113a1 Mon Sep 17 00:00:00 2001 From: Ryan Cross Date: Tue, 15 Apr 2025 07:37:40 -0700 Subject: [PATCH 062/405] feat: new meeting registration implementation (#8408) * feat: new meeting registration implementation * fix: use on_delete=models.PROTECT for certain FKs * fix: convert outlier reg_types,ticket_types on initial migration * fix: fix initial migration mistake * fix: remove management command. add task * fix: move migration 0010 to 0011 to resolve conflict * fix: add missing migration for model changes * fix: add reg type names for unknown * fix: change migration to use 'unknown'. Add test function * fix: merge migrations * fix: rename test_migrate_registrations to check_migrate_registrations * fix: update names.json * fix: fix migration issue and add task * fix: fix broken migration dependency --- ietf/api/tests.py | 193 +++++++++++++++++- ietf/api/urls.py | 1 + ietf/api/views.py | 143 ++++++++++++- ietf/meeting/admin.py | 32 ++- ietf/meeting/factories.py | 28 ++- .../0011_registration_registrationticket.py | 90 ++++++++ ietf/meeting/models.py | 41 +++- ietf/meeting/resources.py | 43 +++- ietf/meeting/tasks.py | 17 ++ ietf/meeting/tests_utils.py | 91 +++++++++ ietf/meeting/utils.py | 109 +++++++++- ietf/name/admin.py | 4 + ietf/name/fixtures/names.json | 140 +++++++++++++ ...ancetypename_registrationtickettypename.py | 47 +++++ .../migrations/0017_populate_new_reg_names.py | 39 ++++ ietf/name/models.py | 5 +- ietf/name/resources.py | 32 ++- 17 files changed, 1045 insertions(+), 10 deletions(-) create mode 100644 ietf/meeting/migrations/0011_registration_registrationticket.py create mode 100644 ietf/meeting/tests_utils.py create mode 100644 ietf/name/migrations/0016_attendancetypename_registrationtickettypename.py create mode 100644 ietf/name/migrations/0017_populate_new_reg_names.py diff --git a/ietf/api/tests.py b/ietf/api/tests.py index 7cc56c2d0a..23e9ff0103 100644 --- a/ietf/api/tests.py +++ b/ietf/api/tests.py @@ -1,6 +1,7 @@ # Copyright The IETF Trust 2015-2024, All Rights Reserved # -*- coding: utf-8 -*- import base64 +import copy import datetime import json import html @@ -31,7 +32,7 @@ from ietf.doc.factories import IndividualDraftFactory, WgDraftFactory, WgRfcFactory from ietf.group.factories import RoleFactory from ietf.meeting.factories import MeetingFactory, SessionFactory -from ietf.meeting.models import Session +from ietf.meeting.models import Session, Registration from ietf.nomcom.models import Volunteer from ietf.nomcom.factories import NomComFactory, nomcom_kwargs_for_year from ietf.person.factories import PersonFactory, random_faker, EmailFactory, PersonalApiKeyFactory @@ -828,6 +829,196 @@ def test_api_new_meeting_registration_nomcom_volunteer(self): self.assertEqual(volunteer.nomcom, nomcom) self.assertEqual(volunteer.origin, 'registration') + @override_settings(APP_API_TOKENS={"ietf.api.views.api_new_meeting_registration_v2": ["valid-token"]}) + def test_api_new_meeting_registration_v2(self): + meeting = MeetingFactory(type_id='ietf') + person = PersonFactory() + regs = [ + { + 'affiliation': "Alguma Corporação", + 'country_code': 'PT', + 'email': person.email().address, + 'first_name': person.first_name(), + 'last_name': person.last_name(), + 'meeting': str(meeting.number), + 'reg_type': 'onsite', + 'ticket_type': 'week_pass', + 'checkedin': False, + 'is_nomcom_volunteer': False, + 'cancelled': False, + } + ] + + url = urlreverse('ietf.api.views.api_new_meeting_registration_v2') + # + # Test invalid key + r = self.client.post(url, data=json.dumps(regs), content_type='application/json', headers={"X-Api-Key": "invalid-token"}) + self.assertEqual(r.status_code, 403) + # + # Test invalid data + bad_regs = copy.deepcopy(regs) + del(bad_regs[0]['email']) + r = self.client.post(url, data=json.dumps(bad_regs), content_type='application/json', headers={"X-Api-Key": "valid-token"}) + self.assertEqual(r.status_code, 400) + # + # Test valid POST + r = self.client.post(url, data=json.dumps(regs), content_type='application/json', headers={"X-Api-Key": "valid-token"}) + self.assertContains(r, "Success", status_code=202) + # + # Check record + reg = regs[0] + objects = Registration.objects.filter(email=reg['email'], meeting__number=reg['meeting']) + self.assertEqual(objects.count(), 1) + obj = objects[0] + for key in ['affiliation', 'country_code', 'first_name', 'last_name', 'checkedin']: + self.assertEqual(getattr(obj, key), False if key=='checkedin' else reg.get(key) , "Bad data for field '%s'" % key) + self.assertEqual(obj.tickets.count(), 1) + ticket = obj.tickets.first() + self.assertEqual(ticket.ticket_type.slug, regs[0]['ticket_type']) + self.assertEqual(ticket.attendance_type.slug, regs[0]['reg_type']) + self.assertEqual(obj.person, person) + # + # Test update (switch to remote) + regs = [ + { + 'affiliation': "Alguma Corporação", + 'country_code': 'PT', + 'email': person.email().address, + 'first_name': person.first_name(), + 'last_name': person.last_name(), + 'meeting': str(meeting.number), + 'reg_type': 'remote', + 'ticket_type': 'week_pass', + 'checkedin': False, + 'is_nomcom_volunteer': False, + 'cancelled': False, + } + ] + r = self.client.post(url, data=json.dumps(regs), content_type='application/json', headers={"X-Api-Key": "valid-token"}) + self.assertContains(r, "Success", status_code=202) + objects = Registration.objects.filter(email=reg['email'], meeting__number=reg['meeting']) + self.assertEqual(objects.count(), 1) + obj = objects[0] + self.assertEqual(obj.tickets.count(), 1) + ticket = obj.tickets.first() + self.assertEqual(ticket.ticket_type.slug, regs[0]['ticket_type']) + self.assertEqual(ticket.attendance_type.slug, regs[0]['reg_type']) + # + # Test multiple + regs = [ + { + 'affiliation': "Alguma Corporação", + 'country_code': 'PT', + 'email': person.email().address, + 'first_name': person.first_name(), + 'last_name': person.last_name(), + 'meeting': str(meeting.number), + 'reg_type': 'onsite', + 'ticket_type': 'one_day', + 'checkedin': False, + 'is_nomcom_volunteer': False, + 'cancelled': False, + }, + + { + 'affiliation': "Alguma Corporação", + 'country_code': 'PT', + 'email': person.email().address, + 'first_name': person.first_name(), + 'last_name': person.last_name(), + 'meeting': str(meeting.number), + 'reg_type': 'remote', + 'ticket_type': 'week_pass', + 'checkedin': False, + 'is_nomcom_volunteer': False, + 'cancelled': False, + } + ] + + r = self.client.post(url, data=json.dumps(regs), content_type='application/json', headers={"X-Api-Key": "valid-token"}) + self.assertContains(r, "Success", status_code=202) + objects = Registration.objects.filter(email=reg['email'], meeting__number=reg['meeting']) + self.assertEqual(objects.count(), 1) + obj = objects[0] + self.assertEqual(obj.tickets.count(), 2) + self.assertEqual(obj.tickets.filter(attendance_type__slug='onsite').count(), 1) + self.assertEqual(obj.tickets.filter(attendance_type__slug='remote').count(), 1) + + @override_settings(APP_API_TOKENS={"ietf.api.views.api_new_meeting_registration_v2": ["valid-token"]}) + def test_api_new_meeting_registration_v2_cancelled(self): + meeting = MeetingFactory(type_id='ietf') + person = PersonFactory() + regs = [ + { + 'affiliation': "Acme", + 'country_code': 'US', + 'email': person.email().address, + 'first_name': person.first_name(), + 'last_name': person.last_name(), + 'meeting': str(meeting.number), + 'reg_type': 'onsite', + 'ticket_type': 'week_pass', + 'checkedin': False, + 'is_nomcom_volunteer': False, + 'cancelled': False, + } + ] + url = urlreverse('ietf.api.views.api_new_meeting_registration_v2') + self.assertEqual(Registration.objects.count(), 0) + r = self.client.post(url, data=json.dumps(regs), content_type='application/json', headers={"X-Api-Key": "valid-token"}) + self.assertContains(r, "Success", status_code=202) + self.assertEqual(Registration.objects.count(), 1) + regs[0]['cancelled'] = True + r = self.client.post(url, data=json.dumps(regs), content_type='application/json', headers={"X-Api-Key": "valid-token"}) + self.assertContains(r, "Success", status_code=202) + self.assertEqual(Registration.objects.count(), 0) + + @override_settings(APP_API_TOKENS={"ietf.api.views.api_new_meeting_registration_v2": ["valid-token"]}) + def test_api_new_meeting_registration_v2_nomcom(self): + meeting = MeetingFactory(type_id='ietf') + person = PersonFactory() + regs = [ + { + 'affiliation': "Acme", + 'country_code': 'US', + 'email': person.email().address, + 'first_name': person.first_name(), + 'last_name': person.last_name(), + 'meeting': str(meeting.number), + 'reg_type': 'onsite', + 'ticket_type': 'week_pass', + 'checkedin': False, + 'is_nomcom_volunteer': False, + 'cancelled': False, + } + ] + + url = urlreverse('ietf.api.views.api_new_meeting_registration_v2') + now = datetime.datetime.now() + if now.month > 10: + year = now.year + 1 + else: + year = now.year + # create appropriate group and nomcom objects + nomcom = NomComFactory.create(is_accepting_volunteers=True, **nomcom_kwargs_for_year(year)) + + # first test is_nomcom_volunteer False + r = self.client.post(url, data=json.dumps(regs), content_type='application/json', headers={"X-Api-Key": "valid-token"}) + self.assertContains(r, "Success", status_code=202) + # assert no Volunteers exists + self.assertEqual(Volunteer.objects.count(), 0) + + # test is_nomcom_volunteer True + regs[0]['is_nomcom_volunteer'] = True + r = self.client.post(url, data=json.dumps(regs), content_type='application/json', headers={"X-Api-Key": "valid-token"}) + self.assertContains(r, "Success", status_code=202) + # assert Volunteer exists + self.assertEqual(Volunteer.objects.count(), 1) + volunteer = Volunteer.objects.last() + self.assertEqual(volunteer.person, person) + self.assertEqual(volunteer.nomcom, nomcom) + self.assertEqual(volunteer.origin, 'registration') + def test_api_version(self): DumpInfo.objects.create(date=timezone.datetime(2022,8,31,7,10,1,tzinfo=datetime.timezone.utc), host='testapi.example.com',tz='UTC') url = urlreverse('ietf.api.views.version') diff --git a/ietf/api/urls.py b/ietf/api/urls.py index b0dbaf91ce..2fcaba6094 100644 --- a/ietf/api/urls.py +++ b/ietf/api/urls.py @@ -66,6 +66,7 @@ # Let MeetEcho upload session polls url(r'^notify/session/polls/?$', meeting_views.api_upload_polls), # Let the registration system notify us about registrations + url(r'^notify/meeting/registration/v2/?', api_views.api_new_meeting_registration_v2), url(r'^notify/meeting/registration/?', api_views.api_new_meeting_registration), # OpenID authentication provider url(r'^openid/$', TemplateView.as_view(template_name='api/openid-issuer.html'), name='ietf.api.urls.oidc_issuer'), diff --git a/ietf/api/views.py b/ietf/api/views.py index 2fd9d2730f..4cc3560627 100644 --- a/ietf/api/views.py +++ b/ietf/api/views.py @@ -45,7 +45,7 @@ from ietf.ietfauth.utils import role_required from ietf.ietfauth.views import send_account_creation_email from ietf.ipr.utils import ingest_response_email as ipr_ingest_response_email -from ietf.meeting.models import Meeting +from ietf.meeting.models import Meeting, Registration from ietf.nomcom.models import Volunteer, NomCom from ietf.nomcom.utils import ingest_feedback_email as nomcom_ingest_feedback_email from ietf.person.models import Person, Email @@ -236,6 +236,147 @@ def err(code, text): return HttpResponse(status=405) +_new_registration_json_validator = jsonschema.Draft202012Validator( + schema={ + "type": "array", + "items": { + "type": "object", + "properties": { + "meeting": {"type": "string"}, + "first_name": {"type": "string"}, + "last_name": {"type": "string"}, + "affiliation": {"type": "string"}, + "country_code": {"type": "string"}, + "email": {"type": "string"}, + "reg_type": {"type": "string"}, + "ticket_type": {"type": "string"}, + "checkedin": {"type": "boolean"}, + "is_nomcom_volunteer": {"type": "boolean"}, + "cancelled": {"type": "boolean"}, + }, + "required": ["meeting", "first_name", "last_name", "affiliation", "country_code", "email", "reg_type", "ticket_type", "checkedin", "is_nomcom_volunteer", "cancelled"], + "additionalProperties": "false" + } + } +) + + +@requires_api_token +@csrf_exempt +def api_new_meeting_registration_v2(request): + '''REST API to notify the datatracker about a new meeting registration''' + def _http_err(code, text): + return HttpResponse(text, status=code, content_type="text/plain") + + def _api_response(result): + return JsonResponse(data={"result": result}) + + if request.method != "POST": + return _http_err(405, "Method not allowed") + + if request.content_type != "application/json": + return _http_err(415, "Content-Type must be application/json") + + # Validate + try: + payload = json.loads(request.body) + _new_registration_json_validator.validate(payload) + except json.decoder.JSONDecodeError as err: + return _http_err(400, f"JSON parse error at line {err.lineno} col {err.colno}: {err.msg}") + except jsonschema.exceptions.ValidationError as err: + return _http_err(400, f"JSON schema error at {err.json_path}: {err.message}") + except Exception: + return _http_err(400, "Invalid request format") + + # Validate consistency + # - if receive multiple records they should be for same meeting, same person (email) + if len(payload) > 1: + if len(set([r['meeting'] for r in payload])) != 1: + return _http_err(400, "Different meeting values") + if len(set([r['email'] for r in payload])) != 1: + return _http_err(400, "Different email values") + + # Validate meeting + number = payload[0]['meeting'] + try: + meeting = Meeting.objects.get(number=number) + except Meeting.DoesNotExist: + return _http_err(400, "Invalid meeting value: '%s'" % (number, )) + + # Validate email + email = payload[0]['email'] + try: + validate_email(email) + except ValidationError: + return _http_err(400, "Invalid email value: '%s'" % (email, )) + + # get person + person = Person.objects.filter(email__address=email).first() + if not person: + log.log(f"api_new_meeting_registration_v2 no Person found for {email}") + + registration = payload[0] + # handle cancelled + if registration['cancelled']: + if len(payload) > 1: + return _http_err(400, "Error. Received cancelled registration notification with more than one record. ({})".format(email)) + try: + obj = Registration.objects.get(meeting=meeting, email=email) + except Registration.DoesNotExist: + return _http_err(400, "Error. Received cancelled registration notification for non-existing registration. ({})".format(email)) + if obj.tickets.count() == 1: + obj.delete() + else: + obj.tickets.filter( + attendance_type__slug=registration.reg_type, + ticket_type__slug=registration.ticket_type).delete() + return HttpResponse('Success', status=202, content_type='text/plain') + + # create or update MeetingRegistration + update_fields = ['first_name', 'last_name', 'affiliation', 'country_code', 'checkedin', 'is_nomcom_volunteer'] + try: + reg = Registration.objects.get(meeting=meeting, email=email) + for key, value in registration.items(): + if key in update_fields: + setattr(reg, key, value) + reg.save() + except Registration.DoesNotExist: + reg = Registration.objects.create( + meeting_id=meeting.pk, + person=person, + email=email, + first_name=registration['first_name'], + last_name=registration['last_name'], + affiliation=registration['affiliation'], + country_code=registration['country_code'], + checkedin=registration['checkedin']) + + # handle registration tickets + reg.tickets.all().delete() + for registration in payload: + reg.tickets.create( + attendance_type_id=registration['reg_type'], + ticket_type_id=registration['ticket_type'], + ) + # handle nomcom volunteer + if registration['is_nomcom_volunteer'] and person: + try: + nomcom = NomCom.objects.get(is_accepting_volunteers=True) + except (NomCom.DoesNotExist, NomCom.MultipleObjectsReturned): + nomcom = None + if nomcom: + Volunteer.objects.get_or_create( + nomcom=nomcom, + person=person, + defaults={ + "affiliation": registration["affiliation"], + "origin": "registration" + } + ) + + return HttpResponse('Success', status=202, content_type='text/plain') + + def version(request): dumpdate = None dumpinfo = DumpInfo.objects.order_by('-date').first() diff --git a/ietf/meeting/admin.py b/ietf/meeting/admin.py index b7e56c7b77..5d6adb7294 100644 --- a/ietf/meeting/admin.py +++ b/ietf/meeting/admin.py @@ -7,7 +7,7 @@ from ietf.meeting.models import (Attended, Meeting, Room, Session, TimeSlot, Constraint, Schedule, SchedTimeSessAssignment, ResourceAssociation, FloorPlan, UrlResource, SessionPresentation, ImportantDate, SlideSubmission, SchedulingEvent, BusinessConstraint, - ProceedingsMaterial, MeetingHost) + ProceedingsMaterial, MeetingHost, Registration, RegistrationTicket) class UrlResourceAdmin(admin.ModelAdmin): @@ -213,3 +213,33 @@ class AttendedAdmin(admin.ModelAdmin): search_fields = ["person__name", "session__group__acronym", "session__meeting__number", "session__name", "session__purpose__name"] raw_id_fields= ["person", "session"] admin.site.register(Attended, AttendedAdmin) + +class MeetingFilter(admin.SimpleListFilter): + title = 'Meeting Filter' + parameter_name = 'meeting_id' + + def lookups(self, request, model_admin): + # Your queryset to limit choices + choices = Meeting.objects.filter(type='ietf').values_list('id', 'number') + return choices + + def queryset(self, request, queryset): + if self.value(): + return queryset.filter(meeting__id=self.value()) + return queryset +class RegistrationAdmin(admin.ModelAdmin): + model = Registration + # list_filter = [('meeting', Meeting.objects.filter(type='ietf')), ] + list_filter = [MeetingFilter, ] + list_display = ['meeting', 'first_name', 'last_name', 'affiliation', 'country_code', 'person', 'email', ] + search_fields = ['meeting__number', 'first_name', 'last_name', 'affiliation', 'country_code', 'email', ] + raw_id_fields = ['person'] +admin.site.register(Registration, RegistrationAdmin) + +class RegistrationTicketAdmin(admin.ModelAdmin): + model = RegistrationTicket + list_filter = ['attendance_type', ] + list_display = ['registration', 'attendance_type', 'ticket_type'] + search_fields = ['registration__first_name', 'registration__last_name', 'registration__email'] + raw_id_fields = ['registration'] +admin.site.register(RegistrationTicket, RegistrationTicketAdmin) diff --git a/ietf/meeting/factories.py b/ietf/meeting/factories.py index eb36e9e756..b3d21830fe 100644 --- a/ietf/meeting/factories.py +++ b/ietf/meeting/factories.py @@ -12,7 +12,7 @@ from ietf.doc.storage_utils import store_str from ietf.meeting.models import (Attended, Meeting, Session, SchedulingEvent, Schedule, TimeSlot, SessionPresentation, FloorPlan, Room, SlideSubmission, Constraint, - MeetingHost, ProceedingsMaterial) + MeetingHost, ProceedingsMaterial, Registration, RegistrationTicket) from ietf.name.models import (ConstraintName, SessionStatusName, ProceedingsMaterialTypeName, TimerangeName, SessionPurposeName) from ietf.doc.factories import ProceedingsMaterialDocFactory @@ -318,3 +318,29 @@ class Meta: session = factory.SubFactory(SessionFactory) person = factory.SubFactory(PersonFactory) + + +class RegistrationFactory(factory.django.DjangoModelFactory): + class Meta: + model = Registration + skip_postgeneration_save = True + + meeting = factory.SubFactory(MeetingFactory) + person = factory.SubFactory(PersonFactory) + email = factory.LazyAttribute(lambda obj: obj.person.email()) + first_name = factory.LazyAttribute(lambda obj: obj.person.first_name()) + last_name = factory.LazyAttribute(lambda obj: obj.person.last_name()) + affiliation = factory.Faker('company') + country_code = factory.Faker('country_code') + attended = False + checkedin = False + + +class RegistrationTicketFactory(factory.django.DjangoModelFactory): + class Meta: + model = RegistrationTicket + skip_postgeneration_save = True + + registration = factory.SubFactory(RegistrationFactory) + attendance_type_id = 'onsite' + ticket_type_id = 'week_pass' diff --git a/ietf/meeting/migrations/0011_registration_registrationticket.py b/ietf/meeting/migrations/0011_registration_registrationticket.py new file mode 100644 index 0000000000..fc161f303d --- /dev/null +++ b/ietf/meeting/migrations/0011_registration_registrationticket.py @@ -0,0 +1,90 @@ +# Generated by Django 4.2.20 on 2025-03-16 08:37 + +from django.db import migrations, models +import django.db.models.deletion +import ietf.utils.models + + +class Migration(migrations.Migration): + + dependencies = [ + ("name", "0017_populate_new_reg_names"), + ("person", "0004_alter_person_photo_alter_person_photo_thumb"), + ("meeting", "0010_alter_floorplan_image_alter_meetinghost_logo"), + ] + + operations = [ + migrations.CreateModel( + name="Registration", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("first_name", models.CharField(max_length=255)), + ("last_name", models.CharField(max_length=255)), + ("affiliation", models.CharField(blank=True, max_length=255)), + ("country_code", models.CharField(max_length=2)), + ("email", models.EmailField(blank=True, max_length=254, null=True)), + ("attended", models.BooleanField(default=False)), + ("checkedin", models.BooleanField(default=False)), + ( + "meeting", + ietf.utils.models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="meeting.meeting", + ), + ), + ( + "person", + ietf.utils.models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.PROTECT, + to="person.person", + ), + ), + ], + ), + migrations.CreateModel( + name="RegistrationTicket", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ( + "attendance_type", + ietf.utils.models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, + to="name.attendancetypename", + ), + ), + ( + "registration", + ietf.utils.models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="tickets", + to="meeting.registration", + ), + ), + ( + "ticket_type", + ietf.utils.models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, + to="name.registrationtickettypename", + ), + ), + ], + ), + ] diff --git a/ietf/meeting/models.py b/ietf/meeting/models.py index 84d151c310..003f8cd76e 100644 --- a/ietf/meeting/models.py +++ b/ietf/meeting/models.py @@ -1,5 +1,5 @@ -# Copyright The IETF Trust 2007-2024, All Rights Reserved # -*- coding: utf-8 -*- +# Copyright The IETF Trust 2007-2024, All Rights Reserved # old meeting models can be found in ../proceedings/models.py @@ -34,7 +34,7 @@ from ietf.name.models import ( MeetingTypeName, TimeSlotTypeName, SessionStatusName, ConstraintName, RoomResourceName, ImportantDateName, TimerangeName, SlideSubmissionStatusName, ProceedingsMaterialTypeName, - SessionPurposeName, + SessionPurposeName, AttendanceTypeName, RegistrationTicketTypeName ) from ietf.person.models import Person from ietf.utils.decorators import memoize @@ -1483,3 +1483,40 @@ class Meta: def __str__(self): return f'{self.person} at {self.session}' + + +class RegistrationManager(models.Manager): + def onsite(self): + return self.get_queryset().filter(registrationticket__attendance_type__slug='onsite') + + def remote(self): + return self.get_queryset().filter(registrationticket__attendance_type__slug='remote').exclude(registrationticket__attendance_type__slug='onsite') + +class Registration(models.Model): + """Registration attendee records from the IETF registration system""" + meeting = ForeignKey(Meeting) + first_name = models.CharField(max_length=255) + last_name = models.CharField(max_length=255) + affiliation = models.CharField(blank=True, max_length=255) + country_code = models.CharField(max_length=2) # ISO 3166 + person = ForeignKey(Person, blank=True, null=True, on_delete=models.PROTECT) + email = models.EmailField(blank=True, null=True) + # attended was used prior to the introduction of the ietf.meeting.Attended model and is still used by + # Meeting.get_attendance() for older meetings. It should not be used except for dealing with legacy data. + attended = models.BooleanField(default=False) + # checkedin indicates that the badge was picked up + checkedin = models.BooleanField(default=False) + + # custom manager + objects = RegistrationManager() + + def __str__(self): + return "{} {}".format(self.first_name, self.last_name) + +class RegistrationTicket(models.Model): + registration = ForeignKey(Registration, related_name='tickets') + attendance_type = ForeignKey(AttendanceTypeName, on_delete=models.PROTECT) + ticket_type = ForeignKey(RegistrationTicketTypeName, on_delete=models.PROTECT) + + def __str__(self): + return "{}:{}".format(self.attendance_type, self.ticket_type) diff --git a/ietf/meeting/resources.py b/ietf/meeting/resources.py index de9ca01476..ede2b5b993 100644 --- a/ietf/meeting/resources.py +++ b/ietf/meeting/resources.py @@ -14,7 +14,8 @@ from ietf.meeting.models import ( Meeting, ResourceAssociation, Constraint, Room, Schedule, Session, TimeSlot, SchedTimeSessAssignment, SessionPresentation, FloorPlan, UrlResource, ImportantDate, SlideSubmission, SchedulingEvent, - BusinessConstraint, ProceedingsMaterial, MeetingHost, Attended) + BusinessConstraint, ProceedingsMaterial, MeetingHost, Attended, + Registration, RegistrationTicket) from ietf.name.resources import MeetingTypeNameResource class MeetingResource(ModelResource): @@ -431,3 +432,43 @@ class Meta: "session": ALL_WITH_RELATIONS, } api.meeting.register(AttendedResource()) + +from ietf.meeting.resources import MeetingResource +from ietf.person.resources import PersonResource +class RegistrationResource(ModelResource): + meeting = ToOneField(MeetingResource, 'meeting') + person = ToOneField(PersonResource, 'person', null=True) + class Meta: + queryset = Registration.objects.all() + serializer = api.Serializer() + cache = SimpleCache() + #resource_name = 'registration' + ordering = ['id', ] + filtering = { + "id": ALL, + "first_name": ALL, + "last_name": ALL, + "affiliation": ALL, + "country_code": ALL, + "email": ALL, + "attended": ALL, + "meeting": ALL_WITH_RELATIONS, + "person": ALL_WITH_RELATIONS, + } +api.meeting.register(RegistrationResource()) + +class RegistrationTicketResource(ModelResource): + registration = ToOneField(RegistrationResource, 'registration') + class Meta: + queryset = RegistrationTicket.objects.all() + serializer = api.Serializer() + cache = SimpleCache() + #resource_name = 'registrationticket' + ordering = ['id', ] + filtering = { + "id": ALL, + "ticket_type": ALL, + "attendance_type": ALL, + "registration": ALL_WITH_RELATIONS, + } +api.meeting.register(RegistrationTicketResource()) diff --git a/ietf/meeting/tasks.py b/ietf/meeting/tasks.py index 2b7c2fca91..f1b7e8e079 100644 --- a/ietf/meeting/tasks.py +++ b/ietf/meeting/tasks.py @@ -9,6 +9,7 @@ from .models import Meeting from .utils import generate_proceedings_content from .views import generate_agenda_data +from .utils import migrate_registrations, check_migrate_registrations @shared_task @@ -17,6 +18,22 @@ def agenda_data_refresh(): @shared_task +def migrate_registrations_task(initial=False): + """ Migrate ietf.stats.MeetingRegistration to ietf.meeting.Registration + If initial is True, migrate all meetings otherwise only future meetings. + This function is idempotent. It can be run regularly from cron. + """ + migrate_registrations(initial=initial) + + +@shared_task +def check_migrate_registrations_task(): + """ Compare MeetingRegistration with Registration to ensure + all records migrated + """ + check_migrate_registrations() + + def proceedings_content_refresh_task(*, all=False): """Refresh meeting proceedings cache diff --git a/ietf/meeting/tests_utils.py b/ietf/meeting/tests_utils.py new file mode 100644 index 0000000000..4bda3a65fa --- /dev/null +++ b/ietf/meeting/tests_utils.py @@ -0,0 +1,91 @@ +# Copyright The IETF Trust 2025, All Rights Reserved +# -*- coding: utf-8 -*- + +import datetime +import debug # pyflakes: ignore +from ietf.meeting.factories import MeetingFactory # RegistrationFactory, RegistrationTicketFactory +from ietf.meeting.models import Registration +from ietf.meeting.utils import migrate_registrations, get_preferred +from ietf.stats.factories import MeetingRegistrationFactory +from ietf.utils.test_utils import TestCase + + +class MigrateRegistrationsTests(TestCase): + def test_new_meeting_registration(self): + meeting = MeetingFactory(type_id='ietf', number='109') + reg = MeetingRegistrationFactory(meeting=meeting, reg_type='onsite', ticket_type='week_pass') + self.assertEqual(Registration.objects.count(), 0) + migrate_registrations(initial=True) + self.assertEqual(Registration.objects.count(), 1) + new = Registration.objects.first() + self.assertEqual(new.first_name, reg.first_name) + self.assertEqual(new.last_name, reg.last_name) + self.assertEqual(new.email, reg.email) + self.assertEqual(new.person, reg.person) + self.assertEqual(new.meeting, meeting) + self.assertEqual(new.affiliation, reg.affiliation) + self.assertEqual(new.country_code, reg.country_code) + self.assertEqual(new.checkedin, reg.checkedin) + self.assertEqual(new.attended, reg.attended) + + def test_migrate_non_initial(self): + # with only old meeting + meeting = MeetingFactory(type_id='ietf', number='109') + MeetingRegistrationFactory(meeting=meeting, reg_type='onsite', ticket_type='week_pass') + self.assertEqual(Registration.objects.count(), 0) + migrate_registrations() + self.assertEqual(Registration.objects.count(), 0) + # with new meeting + new_meeting = MeetingFactory(type_id='ietf', number='150') + new_meeting.date = datetime.date.today() + datetime.timedelta(days=30) + new_meeting.save() + MeetingRegistrationFactory(meeting=new_meeting, reg_type='onsite', ticket_type='week_pass') + migrate_registrations() + self.assertEqual(Registration.objects.count(), 1) + + def test_updated_meeting_registration(self): + # setup test initial conditions + meeting = MeetingFactory(type_id='ietf', number='109') + reg = MeetingRegistrationFactory(meeting=meeting, reg_type='onsite', ticket_type='week_pass') + migrate_registrations(initial=True) + # change first_name and save + original = reg.first_name + reg.first_name = 'NewBob' + reg.save() + new = Registration.objects.first() + self.assertEqual(new.first_name, original) + migrate_registrations(initial=True) + new.refresh_from_db() + self.assertEqual(new.first_name, reg.first_name) + + def test_additional_ticket(self): + # setup test initial conditions + meeting = MeetingFactory(type_id='ietf', number='109') + reg = MeetingRegistrationFactory(meeting=meeting, reg_type='onsite', ticket_type='week_pass') + migrate_registrations(initial=True) + new = Registration.objects.first() + self.assertEqual(new.tickets.count(), 1) + # add a second ticket + reg.reg_type = 'remote' + reg.pk = None + reg.save() + migrate_registrations(initial=True) + # new.refresh_from_db() + self.assertEqual(new.tickets.count(), 2) + + def test_cancelled_registration(self): + # setup test initial conditions + meeting = MeetingFactory(type_id='ietf', number='109') + reg = MeetingRegistrationFactory(meeting=meeting, reg_type='onsite', ticket_type='week_pass') + migrate_registrations(initial=True) + reg.delete() + # do test + migrate_registrations(initial=True) + self.assertEqual(Registration.objects.count(), 0) + + def test_get_preferred(self): + meeting = MeetingFactory(type_id='ietf', number='109') + onsite = MeetingRegistrationFactory(meeting=meeting, reg_type='onsite', ticket_type='week_pass') + hackathon = MeetingRegistrationFactory(meeting=meeting, reg_type='hackathon_onsite', ticket_type='week_pass') + result = get_preferred([onsite, hackathon]) + self.assertEqual(result, onsite) diff --git a/ietf/meeting/utils.py b/ietf/meeting/utils.py index 9603278399..d35914fd60 100644 --- a/ietf/meeting/utils.py +++ b/ietf/meeting/utils.py @@ -26,7 +26,8 @@ from ietf.dbtemplate.models import DBTemplate from ietf.doc.storage_utils import store_bytes, store_str from ietf.meeting.models import (Session, SchedulingEvent, TimeSlot, - Constraint, SchedTimeSessAssignment, SessionPresentation, Attended) + Constraint, SchedTimeSessAssignment, SessionPresentation, Attended, + Registration, Meeting) from ietf.doc.models import Document, State, NewRevisionDocEvent, StateDocEvent from ietf.doc.models import DocEvent from ietf.group.models import Group @@ -1013,6 +1014,112 @@ def participants_for_meeting(meeting): return (checked_in, attended) +def get_preferred(regs): + """ Return a preferred regular registration (non hackathon) from + a list of registrations if there is one, otherwise any. + """ + for reg in regs: + if reg.reg_type in ['onsite', 'remote']: + return reg + return reg + + +def migrate_registrations(initial=False): + """ Migrate ietf.stats.MeetingRegistration to ietf.meeting.Registration + If initial is True, migrate all meetings otherwise only future meetings. + This function is idempotent. It can be run regularly from cron. + """ + if initial: + meetings = Meeting.objects.filter(type='ietf') + MeetingRegistration.objects.filter(reg_type='hackathon').update(reg_type='hackathon_remote') + MeetingRegistration.objects.filter(ticket_type='full_week_pass').update(ticket_type='week_pass') + MeetingRegistration.objects.filter(pk=49645).update(ticket_type='one_day') + MeetingRegistration.objects.filter(pk=50804).update(ticket_type='week_pass') + MeetingRegistration.objects.filter(pk=42386).update(ticket_type='week_pass') + MeetingRegistration.objects.filter(pk=42782).update(ticket_type='one_day') + MeetingRegistration.objects.filter(pk=43464).update(ticket_type='week_pass') + else: + # still process records during week of meeting + one_week_ago = datetime.date.today() - datetime.timedelta(days=7) + meetings = Meeting.objects.filter(type='ietf', date__gt=one_week_ago) + + for meeting in meetings: + # gather all MeetingRegistrations by person (email) + emails = {} + for meeting_reg in MeetingRegistration.objects.filter(meeting=meeting): + if meeting_reg.email in emails: + emails[meeting_reg.email].append(meeting_reg) + else: + emails[meeting_reg.email] = [meeting_reg] + # process each person's registrations + for email, meeting_regs in emails.items(): + preferred_reg = get_preferred(meeting_regs) + reg, created = Registration.objects.get_or_create( + meeting=meeting, + email=email, + defaults={ + 'first_name': preferred_reg.first_name, + 'last_name': preferred_reg.last_name, + 'affiliation': preferred_reg.affiliation, + 'country_code': preferred_reg.country_code, + 'person': preferred_reg.person, + 'attended': preferred_reg.attended, + 'checkedin': preferred_reg.checkedin, + } + ) + if created: + for meeting_reg in meeting_regs: + reg.tickets.create( + attendance_type_id=meeting_reg.reg_type or 'unknown', + ticket_type_id=meeting_reg.ticket_type or 'unknown', + ) + else: + # check if tickets differ + reg_tuple_list = [(t.attendance_type_id, t.ticket_type_id) for t in reg.tickets.all()] + meeting_reg_tuple_list = [(mr.reg_type or 'unknown', mr.ticket_type or 'unknown') for mr in meeting_regs] + if not set(reg_tuple_list) == set(meeting_reg_tuple_list): + # update tickets + reg.tickets.all().delete() + for meeting_reg in meeting_regs: + reg.tickets.create( + attendance_type_id=meeting_reg.reg_type or 'unknown', + ticket_type_id=meeting_reg.ticket_type or 'unknown', + ) + # check fields for updates + fields_to_check = [ + 'first_name', 'last_name', 'affiliation', 'country_code', + 'attended', 'checkedin' + ] + + changed = False + for field in fields_to_check: + new_value = getattr(preferred_reg, field) + if getattr(reg, field) != new_value: + setattr(reg, field, new_value) + changed = True + + if changed: + reg.save() + # delete cancelled Registrations + meeting_reg_email_set = set(emails.keys()) + reg_email_set = set(Registration.objects.filter(meeting=meeting).values_list('email', flat=True)) + for email in reg_email_set - meeting_reg_email_set: + Registration.objects.filter(meeting=meeting, email=email).delete() + + return + + +def check_migrate_registrations(): + """A simple utility function to test that all MeetingRegistration + records got migrated + """ + for mr in MeetingRegistration.objects.all(): + reg = Registration.objects.get(meeting=mr.meeting, email=mr.email) + assert reg.tickets.filter( + attendance_type__slug=mr.reg_type or 'unknown', + ticket_type__slug=mr.ticket_type or 'unknown').exists() + + def generate_proceedings_content(meeting, force_refresh=False): """Render proceedings content for a meeting and update cache diff --git a/ietf/name/admin.py b/ietf/name/admin.py index 2458da37d9..4336e0569c 100644 --- a/ietf/name/admin.py +++ b/ietf/name/admin.py @@ -3,6 +3,7 @@ from ietf.name.models import ( AgendaTypeName, + AttendanceTypeName, BallotPositionName, ConstraintName, ContinentName, @@ -29,6 +30,7 @@ LiaisonStatementTagName, MeetingTypeName, NomineePositionStateName, + RegistrationTicketTypeName, ReviewRequestStateName, ReviewResultName, ReviewTypeName, @@ -137,6 +139,7 @@ class ProceedingsMaterialTypeNameAdmin(NameAdmin): admin.site.register(AgendaFilterTypeName, NameAdmin) admin.site.register(AgendaTypeName, NameAdmin) admin.site.register(AppealArtifactTypeName, NameAdmin) +admin.site.register(AttendanceTypeName, NameAdmin) admin.site.register(BallotPositionName, NameAdmin) admin.site.register(ConstraintName, NameAdmin) admin.site.register(ContinentName, NameAdmin) @@ -158,6 +161,7 @@ class ProceedingsMaterialTypeNameAdmin(NameAdmin): admin.site.register(LiaisonStatementTagName, NameAdmin) admin.site.register(MeetingTypeName, NameAdmin) admin.site.register(NomineePositionStateName, NameAdmin) +admin.site.register(RegistrationTicketTypeName, NameAdmin) admin.site.register(ReviewRequestStateName, NameAdmin) admin.site.register(ReviewAssignmentStateName, NameAdmin) admin.site.register(ReviewResultName, NameAdmin) diff --git a/ietf/name/fixtures/names.json b/ietf/name/fixtures/names.json index 96273fbc35..15ae71d849 100644 --- a/ietf/name/fixtures/names.json +++ b/ietf/name/fixtures/names.json @@ -6827,6 +6827,66 @@ "model": "name.appealartifacttypename", "pk": "response" }, + { + "fields": { + "desc": "", + "name": "ANRW Onsite", + "order": 0, + "used": true + }, + "model": "name.attendancetypename", + "pk": "anrw_onsite" + }, + { + "fields": { + "desc": "", + "name": "Hackathon Onsite", + "order": 0, + "used": true + }, + "model": "name.attendancetypename", + "pk": "hackathon_onsite" + }, + { + "fields": { + "desc": "", + "name": "Hackathon Remote", + "order": 0, + "used": true + }, + "model": "name.attendancetypename", + "pk": "hackathon_remote" + }, + { + "fields": { + "desc": "", + "name": "Onsite", + "order": 0, + "used": true + }, + "model": "name.attendancetypename", + "pk": "onsite" + }, + { + "fields": { + "desc": "", + "name": "Remote", + "order": 0, + "used": true + }, + "model": "name.attendancetypename", + "pk": "remote" + }, + { + "fields": { + "desc": "", + "name": "Unknown", + "order": 0, + "used": true + }, + "model": "name.attendancetypename", + "pk": "unknown" + }, { "fields": { "blocking": false, @@ -12877,6 +12937,86 @@ "model": "name.proceedingsmaterialtypename", "pk": "wiki" }, + { + "fields": { + "desc": "", + "name": "ANRW Combo", + "order": 0, + "used": true + }, + "model": "name.registrationtickettypename", + "pk": "anrw_combo" + }, + { + "fields": { + "desc": "", + "name": "ANRW Only", + "order": 0, + "used": true + }, + "model": "name.registrationtickettypename", + "pk": "anrw_only" + }, + { + "fields": { + "desc": "", + "name": "Hackathon Combo", + "order": 0, + "used": true + }, + "model": "name.registrationtickettypename", + "pk": "hackathon_combo" + }, + { + "fields": { + "desc": "", + "name": "Hackathon Only", + "order": 0, + "used": true + }, + "model": "name.registrationtickettypename", + "pk": "hackathon_only" + }, + { + "fields": { + "desc": "", + "name": "One Day", + "order": 0, + "used": true + }, + "model": "name.registrationtickettypename", + "pk": "one_day" + }, + { + "fields": { + "desc": "", + "name": "Student", + "order": 0, + "used": true + }, + "model": "name.registrationtickettypename", + "pk": "student" + }, + { + "fields": { + "desc": "", + "name": "Unknown", + "order": 0, + "used": true + }, + "model": "name.registrationtickettypename", + "pk": "unknown" + }, + { + "fields": { + "desc": "", + "name": "Week Pass", + "order": 0, + "used": true + }, + "model": "name.registrationtickettypename", + "pk": "week_pass" + }, { "fields": { "desc": "The reviewer has accepted the assignment", diff --git a/ietf/name/migrations/0016_attendancetypename_registrationtickettypename.py b/ietf/name/migrations/0016_attendancetypename_registrationtickettypename.py new file mode 100644 index 0000000000..9376d3a4c6 --- /dev/null +++ b/ietf/name/migrations/0016_attendancetypename_registrationtickettypename.py @@ -0,0 +1,47 @@ +# Generated by Django 4.2.17 on 2025-01-02 18:21 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("name", "0015_last_call_name"), + ] + + operations = [ + migrations.CreateModel( + name="AttendanceTypeName", + fields=[ + ( + "slug", + models.CharField(max_length=32, primary_key=True, serialize=False), + ), + ("name", models.CharField(max_length=255)), + ("desc", models.TextField(blank=True)), + ("used", models.BooleanField(default=True)), + ("order", models.IntegerField(default=0)), + ], + options={ + "ordering": ["order", "name"], + "abstract": False, + }, + ), + migrations.CreateModel( + name="RegistrationTicketTypeName", + fields=[ + ( + "slug", + models.CharField(max_length=32, primary_key=True, serialize=False), + ), + ("name", models.CharField(max_length=255)), + ("desc", models.TextField(blank=True)), + ("used", models.BooleanField(default=True)), + ("order", models.IntegerField(default=0)), + ], + options={ + "ordering": ["order", "name"], + "abstract": False, + }, + ), + ] diff --git a/ietf/name/migrations/0017_populate_new_reg_names.py b/ietf/name/migrations/0017_populate_new_reg_names.py new file mode 100644 index 0000000000..51954885c0 --- /dev/null +++ b/ietf/name/migrations/0017_populate_new_reg_names.py @@ -0,0 +1,39 @@ +# Generated by Django 4.2.17 on 2025-01-02 18:26 + +from django.db import migrations + +def forward(apps, schema_editor): + AttendanceTypeName = apps.get_model('name', 'AttendanceTypeName') + RegistrationTicketTypeName = apps.get_model('name', 'RegistrationTicketTypeName') + AttendanceTypeName.objects.create(slug='onsite', name='Onsite') + AttendanceTypeName.objects.create(slug='remote', name='Remote') + AttendanceTypeName.objects.create(slug='hackathon_onsite', name='Hackathon Onsite') + AttendanceTypeName.objects.create(slug='hackathon_remote', name='Hackathon Remote') + AttendanceTypeName.objects.create(slug='anrw_onsite', name='ANRW Onsite') + AttendanceTypeName.objects.create(slug='unknown', name='Unknown') + RegistrationTicketTypeName.objects.create(slug='week_pass', name='Week Pass') + RegistrationTicketTypeName.objects.create(slug='one_day', name='One Day') + RegistrationTicketTypeName.objects.create(slug='student', name='Student') + RegistrationTicketTypeName.objects.create(slug='hackathon_only', name='Hackathon Only') + RegistrationTicketTypeName.objects.create(slug='hackathon_combo', name='Hackathon Combo') + RegistrationTicketTypeName.objects.create(slug='anrw_only', name='ANRW Only') + RegistrationTicketTypeName.objects.create(slug='anrw_combo', name='ANRW Combo') + RegistrationTicketTypeName.objects.create(slug='unknown', name='Unknown') + + +def reverse(apps, schema_editor): + AttendanceTypeName = apps.get_model('name', 'AttendanceTypeName') + RegistrationTicketTypeName = apps.get_model('name', 'RegistrationTicketTypeName') + AttendanceTypeName.objects.delete() + RegistrationTicketTypeName.objects.delete() + + +class Migration(migrations.Migration): + + dependencies = [ + ("name", "0016_attendancetypename_registrationtickettypename"), + ] + + operations = [ + migrations.RunPython(forward, reverse), + ] diff --git a/ietf/name/models.py b/ietf/name/models.py index 8c2520a489..0e87d43548 100644 --- a/ietf/name/models.py +++ b/ietf/name/models.py @@ -151,6 +151,9 @@ class SlideSubmissionStatusName(NameModel): "Pending, Accepted, Rejected" class TelechatAgendaSectionName(NameModel): """roll_call, minutes, action_items""" - class AppealArtifactTypeName(NameModel): pass +class AttendanceTypeName(NameModel): + """onsite, remote, hackathon_onsite, hackathon_remote""" +class RegistrationTicketTypeName(NameModel): + """week, one_day, student""" diff --git a/ietf/name/resources.py b/ietf/name/resources.py index dffa7669db..0cb0e41e0b 100644 --- a/ietf/name/resources.py +++ b/ietf/name/resources.py @@ -19,7 +19,7 @@ RoleName, RoomResourceName, SessionStatusName, StdLevelName, StreamName, TimeSlotTypeName, TopicAudienceName, ReviewerQueuePolicyName, TimerangeName, ExtResourceTypeName, ExtResourceName, SlideSubmissionStatusName, ProceedingsMaterialTypeName, SessionPurposeName, TelechatAgendaSectionName, - AppealArtifactTypeName ) + AppealArtifactTypeName, AttendanceTypeName, RegistrationTicketTypeName ) class TimeSlotTypeNameResource(ModelResource): class Meta: @@ -752,3 +752,33 @@ class Meta: "order": ALL, } api.name.register(AppealArtifactTypeNameResource()) + + +class AttendanceTypeNameResource(ModelResource): + class Meta: + cache = SimpleCache() + queryset = AttendanceTypeName.objects.all() + serializer = api.Serializer() + filtering = { + "slug": ALL, + "name": ALL, + "desc": ALL, + "used": ALL, + "order": ALL, + } +api.name.register(AttendanceTypeNameResource()) + + +class RegistrationTicketTypeNameResource(ModelResource): + class Meta: + cache = SimpleCache() + queryset = RegistrationTicketTypeName.objects.all() + serializer = api.Serializer() + filtering = { + "slug": ALL, + "name": ALL, + "desc": ALL, + "used": ALL, + "order": ALL, + } +api.name.register(RegistrationTicketTypeNameResource()) From d5e3840d4dbeec37415fae89ab578b1b71046b6a Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Tue, 15 Apr 2025 11:17:33 -0400 Subject: [PATCH 063/405] fix: 500->400 for bad /api/v1/ requests (#8802) * fix: 400->500 for bad /api/v1/ requests * chore: remove unused import --- ietf/api/views.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/ietf/api/views.py b/ietf/api/views.py index 4cc3560627..e5b1f96aed 100644 --- a/ietf/api/views.py +++ b/ietf/api/views.py @@ -18,7 +18,7 @@ from django.contrib.auth.models import User from django.core.exceptions import ValidationError from django.core.validators import validate_email -from django.http import HttpResponse, Http404, JsonResponse +from django.http import HttpResponse, Http404, JsonResponse, HttpResponseBadRequest from django.shortcuts import render, get_object_or_404 from django.urls import reverse from django.utils.decorators import method_decorator @@ -68,7 +68,10 @@ def top_level(request): } serializer = Serializer() - desired_format = determine_format(request, serializer) + try: + desired_format = determine_format(request, serializer) + except BadRequest as err: + return HttpResponseBadRequest(str(err)) options = {} @@ -76,10 +79,12 @@ def top_level(request): callback = request.GET.get('callback', 'callback') if not is_valid_jsonp_callback_value(callback): - raise BadRequest('JSONP callback name is invalid.') + return HttpResponseBadRequest("JSONP callback name is invalid") options['callback'] = callback + # This might raise UnsupportedFormat, but that indicates a real server misconfiguration + # so let it bubble up unhandled and trigger a 500 / email to admins. serialized = serializer.serialize(available_resources, desired_format, options) return HttpResponse(content=serialized, content_type=build_content_type(desired_format)) From 86988eb6072b1331c393f5f7999350f45d653f04 Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Tue, 15 Apr 2025 11:45:48 -0400 Subject: [PATCH 064/405] fix: remove credentials omit from agenda fetch call (#8779) --- client/agenda/store.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/agenda/store.js b/client/agenda/store.js index 359c5fbf05..b5498303a6 100644 --- a/client/agenda/store.js +++ b/client/agenda/store.js @@ -141,7 +141,7 @@ export const useAgendaStore = defineStore('agenda', { meetingNumber = meetingData.meetingNumber } - const resp = await fetch(`/api/meeting/${meetingNumber}/agenda-data`, { credentials: 'omit' }) + const resp = await fetch(`/api/meeting/${meetingNumber}/agenda-data`) if (!resp.ok) { throw new Error(resp.statusText) } From be06d7c4d34a6fc18bc30d4a064dc3ffcf18c2cf Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Wed, 16 Apr 2025 12:00:04 -0500 Subject: [PATCH 065/405] chore: reorder migrations (#8804) * chore: reorder migrations * chore: copyright --- ...ationticket.py => 0012_registration_registrationticket.py} | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) rename ietf/meeting/migrations/{0011_registration_registrationticket.py => 0012_registration_registrationticket.py} (96%) diff --git a/ietf/meeting/migrations/0011_registration_registrationticket.py b/ietf/meeting/migrations/0012_registration_registrationticket.py similarity index 96% rename from ietf/meeting/migrations/0011_registration_registrationticket.py rename to ietf/meeting/migrations/0012_registration_registrationticket.py index fc161f303d..c555f52e8b 100644 --- a/ietf/meeting/migrations/0011_registration_registrationticket.py +++ b/ietf/meeting/migrations/0012_registration_registrationticket.py @@ -1,4 +1,4 @@ -# Generated by Django 4.2.20 on 2025-03-16 08:37 +# Copyright The IETF Trust 2025, All Rights Reserved from django.db import migrations, models import django.db.models.deletion @@ -10,7 +10,7 @@ class Migration(migrations.Migration): dependencies = [ ("name", "0017_populate_new_reg_names"), ("person", "0004_alter_person_photo_alter_person_photo_thumb"), - ("meeting", "0010_alter_floorplan_image_alter_meetinghost_logo"), + ("meeting", "0011_alter_slidesubmission_doc"), ] operations = [ From 3d69b2c03e24d78f28a824e04f2254ff33f651d7 Mon Sep 17 00:00:00 2001 From: Ryan Cross Date: Wed, 16 Apr 2025 12:46:27 -0700 Subject: [PATCH 066/405] feat: add API for related emails (#8671) * feat: add api for related emails. Fixes #8275 * fix: switch from querystring parameter to URL parameter * fix: exclude null character in URL regex --------- Co-authored-by: Robert Sparks --- ietf/api/tests.py | 33 +++++++++++++++++++++++++++++++++ ietf/api/urls.py | 2 ++ ietf/api/views.py | 25 +++++++++++++++++++++++++ 3 files changed, 60 insertions(+) diff --git a/ietf/api/tests.py b/ietf/api/tests.py index 23e9ff0103..809b45cc2b 100644 --- a/ietf/api/tests.py +++ b/ietf/api/tests.py @@ -1164,6 +1164,39 @@ def test_active_email_list(self): self.assertCountEqual(result.keys(), ["addresses"]) self.assertCountEqual(result["addresses"], Email.objects.filter(active=True).values_list("address", flat=True)) + @override_settings(APP_API_TOKENS={"ietf.api.views.related_email_list": ["valid-token"]}) + def test_related_email_list(self): + joe = EmailFactory(address='joe@work.com') + EmailFactory(address='joe@home.com', person=joe.person) + EmailFactory(address='jòe@spain.com', person=joe.person) + url = urlreverse("ietf.api.views.related_email_list", kwargs={'email': 'joe@home.com'}) + # no api key + r = self.client.get(url, headers={}) + self.assertEqual(r.status_code, 403) + # invalid api key + r = self.client.get(url, headers={"X-Api-Key": "not-the-valid-token"}) + self.assertEqual(r.status_code, 403) + # wrong method + r = self.client.post(url, headers={"X-Api-Key": "valid-token"}) + self.assertEqual(r.status_code, 405) + # valid + r = self.client.get(url, headers={"X-Api-Key": "valid-token"}) + self.assertEqual(r.status_code, 200) + self.assertEqual(r.headers["Content-Type"], "application/json") + result = json.loads(r.content) + self.assertCountEqual(result.keys(), ["addresses"]) + self.assertCountEqual(result["addresses"], joe.person.email_set.exclude(address='joe@home.com').values_list("address", flat=True)) + # non-ascii + non_ascii_url = urlreverse("ietf.api.views.related_email_list", kwargs={'email': 'jòe@spain.com'}) + r = self.client.get(non_ascii_url, headers={"X-Api-Key": "valid-token"}) + self.assertEqual(r.status_code, 200) + result = json.loads(r.content) + self.assertTrue('joe@home.com' in result["addresses"]) + # email not found + not_found_url = urlreverse("ietf.api.views.related_email_list", kwargs={'email': 'nobody@nowhere.com'}) + r = self.client.get(not_found_url, headers={"X-Api-Key": "valid-token"}) + self.assertEqual(r.status_code, 404) + @override_settings(APP_API_TOKENS={"ietf.api.views.role_holder_addresses": ["valid-token"]}) def test_role_holder_addresses(self): url = urlreverse("ietf.api.views.role_holder_addresses") diff --git a/ietf/api/urls.py b/ietf/api/urls.py index 2fcaba6094..bafd5c5b76 100644 --- a/ietf/api/urls.py +++ b/ietf/api/urls.py @@ -73,6 +73,8 @@ url(r'^openid/', include('oidc_provider.urls', namespace='oidc_provider')), # Email alias listing url(r'^person/email/$', api_views.active_email_list), + # Related Email listing + url(r'^person/email/(?P[^/\x00]+)/related/$', api_views.related_email_list), # Draft submission API url(r'^submit/?$', submit_views.api_submit_tombstone), # Draft upload API diff --git a/ietf/api/views.py b/ietf/api/views.py index e5b1f96aed..97b9793048 100644 --- a/ietf/api/views.py +++ b/ietf/api/views.py @@ -691,6 +691,31 @@ def active_email_list(request): return HttpResponse(status=405) +@requires_api_token +@csrf_exempt +def related_email_list(request, email): + """Given an email address, returns all other email addresses known + to Datatracker, via Person object + """ + def _http_err(code, text): + return HttpResponse(text, status=code, content_type="text/plain") + + if request.method == "GET": + try: + email_obj = Email.objects.get(address=email) + except Email.DoesNotExist: + return _http_err(404, "Email not found") + person = email_obj.person + if not person: + return JsonResponse({"addresses": []}) + return JsonResponse( + { + "addresses": list(person.email_set.exclude(address=email).values_list("address", flat=True)), + } + ) + return HttpResponse(status=405) + + @requires_api_token def role_holder_addresses(request): if request.method == "GET": From 4a5716e2e89044981905e9af08775ba2f5386fa7 Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Fri, 18 Apr 2025 19:39:32 -0400 Subject: [PATCH 067/405] ci: Update build workflow to handle AWS breaking things for no reason --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 36439aa6cf..e5c627c950 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -235,7 +235,7 @@ jobs: docker run --rm --name collectstatics -v $(pwd):/workspace ghcr.io/ietf-tools/datatracker-app-base:${{ env.TARGET_BASE }} sh dev/build/collectstatics.sh echo "Pushing statics..." cd static - aws s3 sync . s3://static/dt/$PKG_VERSION --only-show-errors + aws s3 sync . s3://static/dt/$PKG_VERSION --only-show-errors --checksum-algorithm CRC32 - name: Augment dockerignore for docker image build env: From 24042631348135dd8b345715526815f245f9ad51 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Mon, 21 Apr 2025 13:00:32 -0300 Subject: [PATCH 068/405] refactor: smtpd->aiosmtpd (#8805) * refactor: smtpd -> aiosmtpd * test: set mock return value for EmailOnFailureCommandTests The test has been working, but in a broken way, for as long as it has existed. The smtpd-based test_smtpserver was masking an exception that did not interfere with the test's effectiveness. * test: increase SMTP.line_length_limit --- ietf/utils/management/tests.py | 2 +- ietf/utils/test_runner.py | 2 +- ietf/utils/test_smtpserver.py | 116 ++++++++++++--------------------- requirements.txt | 1 + 4 files changed, 43 insertions(+), 78 deletions(-) diff --git a/ietf/utils/management/tests.py b/ietf/utils/management/tests.py index e94c39354f..d704999cd1 100644 --- a/ietf/utils/management/tests.py +++ b/ietf/utils/management/tests.py @@ -12,7 +12,7 @@ from ietf.utils.test_utils import TestCase -@mock.patch.object(EmailOnFailureCommand, 'handle') +@mock.patch.object(EmailOnFailureCommand, 'handle', return_value=None) class EmailOnFailureCommandTests(TestCase): def test_calls_handle(self, handle_method): call_command(EmailOnFailureCommand()) diff --git a/ietf/utils/test_runner.py b/ietf/utils/test_runner.py index a77377ffb5..c06e7876db 100644 --- a/ietf/utils/test_runner.py +++ b/ietf/utils/test_runner.py @@ -863,7 +863,7 @@ def setup_test_environment(self, **kwargs): try: # remember the value so ietf.utils.mail.send_smtp() will use the same ietf.utils.mail.SMTP_ADDR['port'] = base + offset - self.smtpd_driver = SMTPTestServerDriver((ietf.utils.mail.SMTP_ADDR['ip4'],ietf.utils.mail.SMTP_ADDR['port']),None) + self.smtpd_driver = SMTPTestServerDriver(ietf.utils.mail.SMTP_ADDR['ip4'],ietf.utils.mail.SMTP_ADDR['port'], None) self.smtpd_driver.start() print((" Running an SMTP test server on %(ip4)s:%(port)s to catch outgoing email." % ietf.utils.mail.SMTP_ADDR)) break diff --git a/ietf/utils/test_smtpserver.py b/ietf/utils/test_smtpserver.py index 66675aa0b1..40da758d66 100644 --- a/ietf/utils/test_smtpserver.py +++ b/ietf/utils/test_smtpserver.py @@ -1,92 +1,56 @@ -# Copyright The IETF Trust 2014-2020, All Rights Reserved +# Copyright The IETF Trust 2014-2025, All Rights Reserved # -*- coding: utf-8 -*- +from aiosmtpd.controller import Controller +from aiosmtpd.smtp import SMTP +from email.utils import parseaddr +from typing import Optional -import smtpd -import threading -import asyncore -import debug # pyflakes:ignore +class SMTPTestHandler: -class AsyncCoreLoopThread(object): + def __init__(self, inbox: list): + self.inbox = inbox - def wrap_loop(self, exit_condition, timeout=1.0, use_poll=False, map=None): - if map is None: - map = asyncore.socket_map - while map and not exit_condition: - asyncore.loop(timeout=1.0, use_poll=False, map=map, count=1) + async def handle_DATA(self, server, session, envelope): + """Handle the DATA command and 'deliver' the message""" - def start(self): - """Start the listening service""" - self.exit_condition = [] - kwargs={'exit_condition':self.exit_condition,'timeout':1.0} - self.thread = threading.Thread(target=self.wrap_loop, kwargs=kwargs) - self.thread.daemon = True - self.thread.daemon = True - self.thread.start() - - def stop(self): - """Stop the listening service""" - self.exit_condition.append(True) - self.thread.join() - - -class SMTPTestChannel(smtpd.SMTPChannel): + self.inbox.append(envelope.content) + # Per RFC2033: https://datatracker.ietf.org/doc/html/rfc2033.html#section-4.2 + # ...after the final ".", the server returns one reply + # for each previously successful RCPT command in the mail transaction, + # in the order that the RCPT commands were issued. Even if there were + # multiple successful RCPT commands giving the same forward-path, there + # must be one reply for each successful RCPT command. + return "\n".join("250 OK" for _ in envelope.rcpt_tos) -# mail_options = ['BODY=8BITMIME', 'SMTPUTF8'] - - def smtp_RCPT(self, arg): - if not self.mailfrom: - self.push(str('503 Error: need MAIL command')) - return - arg = self._strip_command_keyword('TO:', arg) - address, __ = self._getaddr(arg) - if not address: - self.push(str('501 Syntax: RCPT TO:
    ')) - return + async def handle_RCPT(self, server, session, envelope, address, rcpt_options): + """Handle an RCPT command and add the address to the envelope if it is acceptable""" + _, address = parseaddr(address) + if address == "": + return "501 Syntax: RCPT TO:
    " if "poison" in address: - self.push(str('550 Error: Not touching that')) - return - self.rcpt_options = [] - self.rcpttos.append(address) - self.push(str('250 Ok')) - -class SMTPTestServer(smtpd.SMTPServer): - - def __init__(self,localaddr,remoteaddr,inbox): - if inbox is not None: - self.inbox=inbox - else: - self.inbox = [] - smtpd.SMTPServer.__init__(self,localaddr,remoteaddr) + return "550 Error: Not touching that" + # At this point the address is acceptable + envelope.rcpt_tos.append(address) + return "250 OK" - def handle_accept(self): - pair = self.accept() - if pair is not None: - conn, addr = pair - #channel = SMTPTestChannel(self, conn, addr) - SMTPTestChannel(self, conn, addr) - def process_message(self, peer, mailfrom, rcpttos, data, mail_options=None, rcpt_options=None): - self.inbox.append(data) +class SMTPTestServerDriver: - -class SMTPTestServerDriver(object): - def __init__(self, localaddr, remoteaddr, inbox=None): - self.localaddr=localaddr - self.remoteaddr=remoteaddr - if inbox is not None: - self.inbox = inbox - else: - self.inbox = [] - self.thread_driver = None + def __init__(self, address: str, port: int, inbox: Optional[list] = None): + # Allow longer lines than the 1001 that RFC 5321 requires. As of 2025-04-16 the + # datatracker emits some non-compliant messages. + # See https://aiosmtpd.aio-libs.org/en/latest/smtp.html + SMTP.line_length_limit = 4000 # tests start failing between 3000 and 4000 + self.controller = Controller( + hostname=address, + port=port, + handler=SMTPTestHandler(inbox=[] if inbox is None else inbox), + ) def start(self): - self.smtpserver = SMTPTestServer(self.localaddr,self.remoteaddr,self.inbox) - self.thread_driver = AsyncCoreLoopThread() - self.thread_driver.start() + self.controller.start() def stop(self): - if self.thread_driver: - self.thread_driver.stop() - + self.controller.stop() diff --git a/requirements.txt b/requirements.txt index cd93f448e2..b00a21ab49 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,6 +1,7 @@ # -*- conf-mode -*- setuptools>=51.1.0 # Require this first, to prevent later errors # +aiosmtpd>=1.4.6 argon2-cffi>=21.3.0 # For the Argon2 password hasher option beautifulsoup4>=4.11.1 # Only used in tests bibtexparser>=1.2.0 # Only used in tests From 2652c96c8db968a021f6f95953d8ab2337aa167e Mon Sep 17 00:00:00 2001 From: rjsparks <10996692+rjsparks@users.noreply.github.com> Date: Mon, 21 Apr 2025 16:13:32 +0000 Subject: [PATCH 069/405] ci: update base image target version to 20250421T1600 --- dev/build/Dockerfile | 2 +- dev/build/TARGET_BASE | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/dev/build/Dockerfile b/dev/build/Dockerfile index cd0a70667c..bdf3cff0e4 100644 --- a/dev/build/Dockerfile +++ b/dev/build/Dockerfile @@ -1,4 +1,4 @@ -FROM ghcr.io/ietf-tools/datatracker-app-base:20250402T1611 +FROM ghcr.io/ietf-tools/datatracker-app-base:20250421T1600 LABEL maintainer="IETF Tools Team " ENV DEBIAN_FRONTEND=noninteractive diff --git a/dev/build/TARGET_BASE b/dev/build/TARGET_BASE index 1195fc9a0b..bdfdb2eed1 100644 --- a/dev/build/TARGET_BASE +++ b/dev/build/TARGET_BASE @@ -1 +1 @@ -20250402T1611 +20250421T1600 From 35db33961d91ada14c5744af3cf8c160e02c5614 Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Tue, 22 Apr 2025 12:31:07 -0500 Subject: [PATCH 070/405] fix: lengthen liaison response_contact field (#8823) --- ...lter_liaisonstatement_response_contacts.py | 20 +++++++++++++++++++ ietf/liaisons/models.py | 2 +- 2 files changed, 21 insertions(+), 1 deletion(-) create mode 100644 ietf/liaisons/migrations/0002_alter_liaisonstatement_response_contacts.py diff --git a/ietf/liaisons/migrations/0002_alter_liaisonstatement_response_contacts.py b/ietf/liaisons/migrations/0002_alter_liaisonstatement_response_contacts.py new file mode 100644 index 0000000000..ac0a11101b --- /dev/null +++ b/ietf/liaisons/migrations/0002_alter_liaisonstatement_response_contacts.py @@ -0,0 +1,20 @@ +# Copyright The IETF Trust 2025, All Rights Reserved + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("liaisons", "0001_initial"), + ] + + operations = [ + migrations.AlterField( + model_name="liaisonstatement", + name="response_contacts", + field=models.TextField( + blank=True, help_text="Where to send a response", max_length=1024 + ), + ), + ] diff --git a/ietf/liaisons/models.py b/ietf/liaisons/models.py index f357d6cf8c..2ad502102c 100644 --- a/ietf/liaisons/models.py +++ b/ietf/liaisons/models.py @@ -33,7 +33,7 @@ class LiaisonStatement(models.Model): to_groups = models.ManyToManyField(Group, blank=True, related_name='liaisonstatement_to_set') to_contacts = models.CharField(max_length=2000, help_text="Contacts at recipient group") - response_contacts = models.CharField(blank=True, max_length=255, help_text="Where to send a response") # RFC4053 + response_contacts = models.TextField(blank=True, max_length=1024, help_text="Where to send a response") # RFC4053 technical_contacts = models.CharField(blank=True, max_length=255, help_text="Who to contact for clarification") # RFC4053 action_holder_contacts = models.CharField(blank=True, max_length=255, help_text="Who makes sure action is completed") # incoming only? cc_contacts = models.TextField(blank=True) From 7be503cf1c6c02ed7715f6b72417cbc760369175 Mon Sep 17 00:00:00 2001 From: Ryan Cross Date: Wed, 23 Apr 2025 08:33:13 -0700 Subject: [PATCH 071/405] feat: use new mail archive search API for review app lookups (#8788) * feat: use new mail archive search API for review app lookups * fix: fix patch call * fix: remove url from query_data * fix: force utc timezone on datetime from archive API * fix: remove query_data['url'] from test --- ietf/doc/tests_review.py | 220 ++++++++++-------- ietf/doc/views_review.py | 22 +- ietf/review/mailarch.py | 126 ++++------ ietf/settings.py | 2 + .../templates/doc/review/complete_review.html | 4 +- 5 files changed, 176 insertions(+), 198 deletions(-) diff --git a/ietf/doc/tests_review.py b/ietf/doc/tests_review.py index 13ddbc22ba..9850beca75 100644 --- a/ietf/doc/tests_review.py +++ b/ietf/doc/tests_review.py @@ -3,12 +3,12 @@ from pathlib import Path -import datetime, os, shutil +import datetime import io -import tarfile, tempfile, mailbox -import email.mime.multipart, email.mime.text, email.utils +import os +import shutil -from mock import patch +from mock import patch, Mock from requests import Response from django.apps import apps @@ -650,112 +650,132 @@ def test_accept_reviewer_assignment_after_reject(self): assignment = reload_db_objects(assignment) self.assertEqual(assignment.state_id, "accepted") - def make_test_mbox_tarball(self, review_req): - mbox_path = os.path.join(self.review_dir, "testmbox.tar.gz") - with tarfile.open(mbox_path, "w:gz") as tar: - with tempfile.NamedTemporaryFile(dir=self.review_dir, suffix=".mbox") as tmp: - mbox = mailbox.mbox(tmp.name) + @patch('ietf.review.mailarch.requests.post') + def test_retrieve_messages(self, mock_post): + mock_data = { + "results": [ + { + "from": "Alice ", + "subject": "Hello", + "content": "Hi\n This is a really good document.\n", + "message_id": "abc123", + "url": "https://example.com/message", + "date": "2025-04-07T12:00:00", + } + ] + } + mock_post.return_value.json.return_value = mock_data - # plain text - msg = email.mime.text.MIMEText("Hello,\n\nI have reviewed the document and did not find any problems.\n\nJohn Doe") - msg["From"] = "John Doe " - msg["To"] = review_req.team.list_email - msg["Subject"] = "Review of {}-01".format(review_req.doc.name) - msg["Message-ID"] = email.utils.make_msgid() - msg["Archived-At"] = "" - msg["Date"] = email.utils.formatdate() - - mbox.add(msg) - - # plain text + HTML - msg = email.mime.multipart.MIMEMultipart('alternative') - msg["From"] = "John Doe II " - msg["To"] = review_req.team.list_email - msg["Subject"] = "Review of {}".format(review_req.doc.name) - msg["Message-ID"] = email.utils.make_msgid() - msg["Archived-At"] = "" - - msg.attach(email.mime.text.MIMEText("Hi!,\r\nLooks OK!\r\n-John", "plain")) - msg.attach(email.mime.text.MIMEText("

    Hi!,

    Looks OK!

    -John

    ", "html")) - mbox.add(msg) - - tmp.flush() - - tar.add(os.path.relpath(tmp.name)) + doc = WgDraftFactory(group__acronym='mars', rev='01') + review_team = ReviewTeamFactory(acronym="reviewteam", name="Review Team", type_id="review", list_email="reviewteam@ietf.org", parent=Group.objects.get(acronym="farfut")) + rev_role = RoleFactory(group=review_team, person__user__username='reviewer', person__user__email='reviewer@example.com', name_id='reviewer') + RoleFactory(group=review_team, person__user__username='reviewsecretary', person__user__email='reviewsecretary@example.com', name_id='secr') + review_req = ReviewRequestFactory(doc=doc, team=review_team, type_id='early', state_id='assigned', requested_by=rev_role.person, deadline=timezone.now() + datetime.timedelta(days=20)) + ReviewAssignmentFactory(review_request=review_req, reviewer=rev_role.person.email_set.first(), state_id='accepted') - mbox.close() + query_data = ietf.review.mailarch.construct_query_data(doc, review_team, query=None) + response = ietf.review.mailarch.retrieve_messages(query_data) - return mbox_path + self.assertEqual(len(response), 1) + self.assertEqual(response[0]['from'], 'Alice ') + self.assertEqual(response[0]['splitfrom'], ('Alice', 'alice@example.com')) + self.assertEqual(response[0]['subject'], 'Hello') + self.assertEqual(response[0]['content'], 'Hi\n This is a really good document.') + self.assertEqual(response[0]['message_id'], 'abc123') + self.assertEqual(response[0]['url'], 'https://example.com/message') + self.assertEqual(response[0]['utcdate'], ('2025-04-07', '12:00:00')) - def test_search_mail_archive(self): - doc = WgDraftFactory(group__acronym='mars',rev='01') + def test_construct_query_data(self): + doc = WgDraftFactory(group__acronym='mars', rev='01') review_team = ReviewTeamFactory(acronym="reviewteam", name="Review Team", type_id="review", list_email="reviewteam@ietf.org", parent=Group.objects.get(acronym="farfut")) - rev_role = RoleFactory(group=review_team,person__user__username='reviewer',person__user__email='reviewer@example.com',name_id='reviewer') - RoleFactory(group=review_team,person__user__username='reviewsecretary',person__user__email='reviewsecretary@example.com',name_id='secr') - review_req = ReviewRequestFactory(doc=doc,team=review_team,type_id='early',state_id='assigned',requested_by=rev_role.person,deadline=timezone.now()+datetime.timedelta(days=20)) + data = ietf.review.mailarch.construct_query_data(doc, review_team, query=None) + self.assertEqual(data['start_date'], (date_today() - datetime.timedelta(days=180)).isoformat()) + self.assertEqual(data['email_list'], 'reviewteam') + self.assertEqual(data['query_value'], doc.name) + self.assertEqual(data['query'], f'subject:({doc.name})') + self.assertEqual(data['limit'], '30') + + @patch('ietf.doc.views_review.requests.post') + def test_search_mail_archive(self, mock_post): + doc = WgDraftFactory(group__acronym='mars', rev='01') + review_team = ReviewTeamFactory(acronym="reviewteam", name="Review Team", type_id="review", list_email="reviewteam@ietf.org", parent=Group.objects.get(acronym="farfut")) + rev_role = RoleFactory(group=review_team, person__user__username='reviewer', person__user__email='reviewer@example.com', name_id='reviewer') + RoleFactory(group=review_team, person__user__username='reviewsecretary', person__user__email='reviewsecretary@example.com', name_id='secr') + review_req = ReviewRequestFactory(doc=doc, team=review_team, type_id='early', state_id='assigned', requested_by=rev_role.person, deadline=timezone.now() + datetime.timedelta(days=20)) assignment = ReviewAssignmentFactory(review_request=review_req, reviewer=rev_role.person.email_set.first(), state_id='accepted') + mock_data = { + "results": [ + { + "from": "Alice ", + "subject": "Review of {}-01".format(review_req.doc.name), + "content": "Hi\n This is a really good document.\n", + "message_id": "abc123", + "url": "https://example.com/message", + "date": "2025-04-07T12:00:00", + }, + { + "from": "Joe ", + "subject": "Review of {}".format(review_req.doc.name), + "content": "Hi\n I believe this is the best document.\n", + "message_id": "abc456", + "url": "https://example.com/message", + "date": "2025-04-07T12:00:00", + } + ] + } + response1 = Mock() + response1.json.return_value = mock_data + + response2 = Mock() + response2.json.return_value = mock_data + + response3 = Mock() + response3.json.return_value = {"results": []} + + mock_post.side_effect = [response1, response2, response3] + # test URL construction - query_urls = ietf.review.mailarch.construct_query_urls(doc, review_team) - self.assertTrue(review_req.doc.name in query_urls["query_data_url"]) + query_data = ietf.review.mailarch.construct_query_data(doc, review_team) + self.assertTrue(review_req.doc.name in query_data["query_value"]) - # test parsing - mbox_path = self.make_test_mbox_tarball(review_req) + url = urlreverse('ietf.doc.views_review.search_mail_archive', kwargs={"name": doc.name, "assignment_id": assignment.pk}) + url2 = urlreverse('ietf.doc.views_review.search_mail_archive', kwargs={"name": doc.name, "acronym": review_team.acronym}) + login_testing_unauthorized(self, "reviewsecretary", url) - try: - # mock URL generator and point it to local file - for this - # to work, the module (and not the function) must be - # imported in the view - real_fn = ietf.review.mailarch.construct_query_urls - ietf.review.mailarch.construct_query_urls = lambda doc, team, query=None: { "query_data_url": "file://" + os.path.abspath(mbox_path) } - url = urlreverse('ietf.doc.views_review.search_mail_archive', kwargs={ "name": doc.name, "assignment_id": assignment.pk }) - url2 = urlreverse('ietf.doc.views_review.search_mail_archive', kwargs={ "name": doc.name, "acronym": review_team.acronym }) - login_testing_unauthorized(self, "reviewsecretary", url) - - r = self.client.get(url) - self.assertEqual(r.status_code, 200) - messages = r.json()["messages"] - self.assertEqual(len(messages), 2) - - r = self.client.get(url2) - self.assertEqual(r.status_code, 200) - messages = r.json()["messages"] - self.assertEqual(len(messages), 2) - - today = date_today(datetime.timezone.utc) - - self.assertEqual(messages[0]["url"], "https://www.example.com/testmessage") - self.assertTrue("John Doe" in messages[0]["content"]) - self.assertEqual(messages[0]["subject"], "Review of {}-01".format(review_req.doc.name)) - self.assertEqual(messages[0]["revision_guess"], "01") - self.assertEqual(messages[0]["splitfrom"], ["John Doe", "johndoe@example.com"]) - self.assertEqual(messages[0]["utcdate"][0], today.isoformat()) - - self.assertEqual(messages[1]["url"], "https://www.example.com/testmessage2") - self.assertTrue("Looks OK" in messages[1]["content"]) - self.assertTrue("" not in messages[1]["content"]) - self.assertEqual(messages[1]["subject"], "Review of {}".format(review_req.doc.name)) - self.assertFalse('revision_guess' in messages[1]) - self.assertEqual(messages[1]["splitfrom"], ["John Doe II", "johndoe2@example.com"]) - self.assertEqual(messages[1]["utcdate"][0], "") - - - # Test failure to return mailarch results - no_result_path = os.path.join(self.review_dir, "mailarch_no_result.html") - with io.open(no_result_path, "w") as f: - f.write('Content-Type: text/html\n\n
    No results found
    ') - ietf.review.mailarch.construct_query_urls = lambda doc, team, query=None: { "query_data_url": "file://" + os.path.abspath(no_result_path) } - - url = urlreverse('ietf.doc.views_review.search_mail_archive', kwargs={ "name": doc.name, "assignment_id": assignment.pk }) - - r = self.client.get(url) - self.assertEqual(r.status_code, 200) - result = r.json() - self.assertNotIn('messages', result) - self.assertIn('No results found', result['error']) - - finally: - ietf.review.mailarch.construct_query_urls = real_fn + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + messages = r.json()["messages"] + self.assertEqual(len(messages), 2) + + r = self.client.get(url2) + self.assertEqual(r.status_code, 200) + messages = r.json()["messages"] + self.assertEqual(len(messages), 2) + + self.assertEqual(messages[0]["url"], "https://example.com/message") + self.assertTrue("Hi" in messages[0]["content"]) + self.assertEqual(messages[0]["subject"], "Review of {}-01".format(review_req.doc.name)) + self.assertEqual(messages[0]["revision_guess"], "01") + self.assertEqual(messages[0]["splitfrom"], ["Alice", "alice@example.com"]) + self.assertEqual(messages[0]["utcdate"], ['2025-04-07', '12:00:00']) + + self.assertEqual(messages[1]["url"], "https://example.com/message") + self.assertTrue("Hi" in messages[1]["content"]) + self.assertTrue("" not in messages[1]["content"]) + self.assertEqual(messages[1]["subject"], "Review of {}".format(review_req.doc.name)) + self.assertFalse('revision_guess' in messages[1]) + self.assertEqual(messages[1]["splitfrom"], ["Joe", "joe@example.com"]) + self.assertEqual(messages[1]["utcdate"], ['2025-04-07', '12:00:00']) + + # Test failure to return mailarch results + url = urlreverse('ietf.doc.views_review.search_mail_archive', kwargs={"name": doc.name, "assignment_id": assignment.pk}) + + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + result = r.json() + self.assertNotIn('messages', result) + self.assertIn('No results found', result['error']) def test_submit_unsolicited_review_choose_team(self): doc = WgDraftFactory(group__acronym='mars', rev='01') diff --git a/ietf/doc/views_review.py b/ietf/doc/views_review.py index 1f23c435fa..6ac0e17791 100644 --- a/ietf/doc/views_review.py +++ b/ietf/doc/views_review.py @@ -958,14 +958,14 @@ def complete_review(request, name, assignment_id=None, acronym=None): form = CompleteReviewForm(assignment, doc, team, is_reviewer, initial=initial) - mail_archive_query_urls = mailarch.construct_query_urls(doc, team) + mail_archive_query_data = mailarch.construct_query_data(doc, team) return render(request, 'doc/review/complete_review.html', { 'doc': doc, 'team': team, 'assignment': assignment, 'form': form, - 'mail_archive_query_urls': mail_archive_query_urls, + 'mail_archive_query_data': mail_archive_query_data, 'revising_review': revising_review, 'review_to': to, 'review_cc': cc, @@ -987,27 +987,25 @@ def search_mail_archive(request, name, acronym=None, assignment_id=None): if not (is_reviewer or can_manage_request): permission_denied(request, "You do not have permission to perform this action") - res = mailarch.construct_query_urls(doc, team, query=request.GET.get("query")) - if not res: - return JsonResponse({ "error": "Couldn't do lookup in mail archive - don't know where to look"}) - - MAX_RESULTS = 30 + query_data = mailarch.construct_query_data(doc, team, query=request.GET.get("query")) + if not query_data: + return JsonResponse({"error": "Couldn't do lookup in mail archive - don't know where to look"}) try: - res["messages"] = mailarch.retrieve_messages(res["query_data_url"])[:MAX_RESULTS] - for message in res["messages"]: + query_data["messages"] = mailarch.retrieve_messages(query_data) + for message in query_data["messages"]: try: revision_guess = message["subject"].split(name)[1].split('-')[1] message["revision_guess"] = revision_guess if revision_guess.isnumeric() else None except IndexError: pass except KeyError as e: - res["error"] = "No results found (%s)" % str(e) + query_data["error"] = "No results found (%s)" % str(e) except Exception as e: - res["error"] = "Retrieval from mail archive failed: %s" % str(e) + query_data["error"] = "Retrieval from mail archive failed: %s" % str(e) # raise # useful when debugging - return JsonResponse(res) + return JsonResponse(query_data) class EditReviewRequestCommentForm(forms.ModelForm): comment = forms.CharField(widget=forms.Textarea, strip=False) diff --git a/ietf/review/mailarch.py b/ietf/review/mailarch.py index c34a6079ce..61abc83aa5 100644 --- a/ietf/review/mailarch.py +++ b/ietf/review/mailarch.py @@ -6,25 +6,18 @@ # mailarchive.ietf.org import base64 -import contextlib import datetime import email.utils import hashlib -import mailbox -import tarfile -import tempfile - -from urllib.parse import urlencode -from urllib.request import urlopen +import requests import debug # pyflakes:ignore -from pyquery import PyQuery from django.conf import settings from django.utils.encoding import force_bytes, force_str -from ietf.utils.mail import get_payload_text +from ietf.utils.log import log from ietf.utils.timezone import date_today @@ -43,7 +36,7 @@ def hash_list_message_id(list_name, msgid): sha.update(force_bytes(list_name)) return force_str(base64.urlsafe_b64encode(sha.digest()).rstrip(b"=")) -def construct_query_urls(doc, team, query=None): +def construct_query_data(doc, team, query=None): list_name = list_name_from_email(team.list_email) if not list_name: return None @@ -51,83 +44,48 @@ def construct_query_urls(doc, team, query=None): if not query: query = doc.name - encoded_query = "?" + urlencode({ - "qdr": "c", # custom time frame - "start_date": (date_today() - datetime.timedelta(days=180)).isoformat(), - "email_list": list_name, - "q": "subject:({})".format(query), - "as": "1", # this is an advanced search - }) - - return { - "query": query, - "query_url": settings.MAILING_LIST_ARCHIVE_URL + "/arch/search/" + encoded_query, - "query_data_url": settings.MAILING_LIST_ARCHIVE_URL + "/arch/export/mbox/" + encoded_query, + query_data = { + 'start_date': (date_today() - datetime.timedelta(days=180)).isoformat(), + 'email_list': list_name, + 'query_value': query, + 'query': f'subject:({query})', + 'limit': '30', } + return query_data def construct_message_url(list_name, msgid): return "{}/arch/msg/{}/{}".format(settings.MAILING_LIST_ARCHIVE_URL, list_name, hash_list_message_id(list_name, msgid)) -def retrieve_messages_from_mbox(mbox_fileobj): - """Return selected content in message from mbox from mailarch.""" - res = [] - with tempfile.NamedTemporaryFile(suffix=".mbox") as mbox_file: - # mailbox.mbox needs a path, so we need to put the contents - # into a file - mbox_data = mbox_fileobj.read() - mbox_file.write(mbox_data) - mbox_file.flush() - - mbox = mailbox.mbox(mbox_file.name, create=False) - for msg in mbox: - content = "" - - for part in msg.walk(): - if part.get_content_type() == "text/plain": - charset = part.get_content_charset() or "utf-8" - content += get_payload_text(part, default_charset=charset) - - # parse a couple of things for the front end - utcdate = None - d = email.utils.parsedate_tz(msg["Date"]) - if d: - utcdate = datetime.datetime.fromtimestamp(email.utils.mktime_tz(d), datetime.timezone.utc) - - res.append({ - "from": msg["From"], - "splitfrom": email.utils.parseaddr(msg["From"]), - "subject": msg["Subject"], - "content": content.replace("\r\n", "\n").replace("\r", "\n").strip("\n"), - "message_id": email.utils.unquote(msg["Message-ID"].strip()), - "url": email.utils.unquote(msg["Archived-At"].strip()), - "date": msg["Date"], - "utcdate": (utcdate.date().isoformat(), utcdate.time().isoformat()) if utcdate else ("", ""), - }) - mbox.close() - - return res - -def retrieve_messages(query_data_url): +def retrieve_messages(query_data): """Retrieve and return selected content from mailarch.""" - res = [] - - # This has not been rewritten to use requests.get() because get() does - # not handle file URLs out of the box, which we need for tesing - with contextlib.closing(urlopen(query_data_url, timeout=15)) as fileobj: - content_type = fileobj.info()["Content-type"] - if not content_type.startswith("application/x-tar"): - if content_type.startswith("text/html"): - r = fileobj.read(20000) - q = PyQuery(r) - div = q('div[class~="no-results"]') - if div: - raise KeyError("No results: %s -> %s" % (query_data_url, div.text(), )) - raise Exception("Export failed - this usually means no matches were found") - - with tarfile.open(fileobj=fileobj, mode='r|*') as tar: - for entry in tar: - if entry.isfile(): - mbox_fileobj = tar.extractfile(entry) - res.extend(retrieve_messages_from_mbox(mbox_fileobj)) - - return res + + headers = {'X-Api-Key': settings.MAILING_LIST_ARCHIVE_API_KEY} + try: + response = requests.post( + settings.MAILING_LIST_ARCHIVE_SEARCH_URL, + headers=headers, + json=query_data, + timeout=settings.DEFAULT_REQUESTS_TIMEOUT) + except requests.Timeout as exc: + log(f'POST request failed for [{query_data["url"]}]: {exc}') + raise RuntimeError(f'Timeout retrieving [{query_data["url"]}]') from exc + + results = [] + jresponse = response.json() + if 'results' not in jresponse or len(jresponse['results']) == 0: + raise KeyError(f'No results: {query_data["query"]}') + for msg in jresponse['results']: + # datetime is already UTC + dt = datetime.datetime.fromisoformat(msg['date']) + dt_utc = dt.replace(tzinfo=datetime.timezone.utc) + results.append({ + "from": msg["from"], + "splitfrom": email.utils.parseaddr(msg["from"]), + "subject": msg["subject"], + "content": msg["content"].replace("\r\n", "\n").replace("\r", "\n").strip("\n"), + "message_id": msg["message_id"], + "url": msg["url"], + "utcdate": (dt_utc.date().isoformat(), dt_utc.time().isoformat()), + }) + + return results diff --git a/ietf/settings.py b/ietf/settings.py index 33a2f976d9..8005aca8cc 100644 --- a/ietf/settings.py +++ b/ietf/settings.py @@ -808,6 +808,8 @@ def skip_unreadable_post(record): # Mailing list info URL for lists hosted on the IETF servers MAILING_LIST_INFO_URL = "https://mailman3.%(domain)s/mailman3/lists/%(list_addr)s.%(domain)s" MAILING_LIST_ARCHIVE_URL = "https://mailarchive.ietf.org" +MAILING_LIST_ARCHIVE_SEARCH_URL = "https://mailarchive.ietf.org/api/v1/message/search/" +MAILING_LIST_ARCHIVE_API_KEY = "changeme" # Liaison Statement Tool settings (one is used in DOC_HREFS below) LIAISON_UNIVERSAL_FROM = 'Liaison Statement Management Tool ' diff --git a/ietf/templates/doc/review/complete_review.html b/ietf/templates/doc/review/complete_review.html index 091f30532e..8aeb1486a4 100644 --- a/ietf/templates/doc/review/complete_review.html +++ b/ietf/templates/doc/review/complete_review.html @@ -93,7 +93,7 @@

    Back {% endif %}
    - {% if mail_archive_query_urls %} + {% if mail_archive_query_data %}
    - {% if person.photo %} + {% if person.photo or person.role_set.exists %}
    -
    {% include "person/photo.html" with person=person %}
    +
    + {% if person.photo %} + {% include "person/photo.html" with person=person %} + {% endif %} + {% if person.role_set.exists %} +

    Email support@ietf.org + to update your photo.

    + {% endif %}
    {% endif %}
    From e55162360c198765ae16f2baa8413ed1b502fa9c Mon Sep 17 00:00:00 2001 From: Ryan Cross Date: Thu, 24 Jul 2025 14:25:49 +0200 Subject: [PATCH 156/405] fix: Improvements to registration admin views (#9185) * fix: add tickets to Registration admin * fix: more improvements to registration admin views * fix: add ignore for mypy * fix: remove unused import * fix: add new admin templates to TEST_TEMPLATE_IGNORE * fix: use full path for TEST_TEMPLATE_IGNORE --- ietf/meeting/admin.py | 56 ++++++++++++++++--- ietf/settings.py | 2 + .../meeting/Registration/change_list.html | 10 ++++ .../RegistrationTicket/change_list.html | 10 ++++ 4 files changed, 70 insertions(+), 8 deletions(-) create mode 100644 ietf/templates/admin/meeting/Registration/change_list.html create mode 100644 ietf/templates/admin/meeting/RegistrationTicket/change_list.html diff --git a/ietf/meeting/admin.py b/ietf/meeting/admin.py index 1c5d5c67b5..d886a9a4b6 100644 --- a/ietf/meeting/admin.py +++ b/ietf/meeting/admin.py @@ -3,11 +3,13 @@ from django.contrib import admin +from django.db.models import Count from ietf.meeting.models import (Attended, Meeting, Room, Session, TimeSlot, Constraint, Schedule, SchedTimeSessAssignment, ResourceAssociation, FloorPlan, UrlResource, SessionPresentation, ImportantDate, SlideSubmission, SchedulingEvent, BusinessConstraint, - ProceedingsMaterial, MeetingHost, Registration, RegistrationTicket) + ProceedingsMaterial, MeetingHost, Registration, RegistrationTicket, + AttendanceTypeName) class UrlResourceAdmin(admin.ModelAdmin): @@ -219,8 +221,9 @@ class MeetingFilter(admin.SimpleListFilter): parameter_name = 'meeting_id' def lookups(self, request, model_admin): - # Your queryset to limit choices - choices = Meeting.objects.filter(type='ietf').values_list('id', 'number') + # only include meetings with registration records + meetings = Meeting.objects.filter(type='ietf').annotate(reg_count=Count('registration')).filter(reg_count__gt=0).order_by('-date') + choices = meetings.values_list('id', 'number') return choices def queryset(self, request, queryset): @@ -228,23 +231,60 @@ def queryset(self, request, queryset): return queryset.filter(meeting__id=self.value()) return queryset +class AttendanceFilter(admin.SimpleListFilter): + title = 'Attendance Type' + parameter_name = 'attendance_type' + + def lookups(self, request, model_admin): + choices = AttendanceTypeName.objects.all().values_list('slug', 'name') + return choices + + def queryset(self, request, queryset): + if self.value(): + return queryset.filter(tickets__attendance_type__slug=self.value()).distinct() + return queryset + class RegistrationTicketInline(admin.TabularInline): model = RegistrationTicket class RegistrationAdmin(admin.ModelAdmin): model = Registration - # list_filter = [('meeting', Meeting.objects.filter(type='ietf')), ] - list_filter = [MeetingFilter, ] - list_display = ['meeting', 'first_name', 'last_name', 'affiliation', 'country_code', 'person', 'email', ] - search_fields = ['meeting__number', 'first_name', 'last_name', 'affiliation', 'country_code', 'email', ] + list_filter = [AttendanceFilter, MeetingFilter] + list_display = ['meeting', 'first_name', 'last_name', 'display_attendance', 'affiliation', 'country_code', 'email', ] + search_fields = ['first_name', 'last_name', 'affiliation', 'country_code', 'email', ] raw_id_fields = ['person'] inlines = [RegistrationTicketInline, ] + ordering = ['-meeting__date', 'last_name'] + + def display_attendance(self, instance): + '''Only display the most significant ticket in the list. + To see all the tickets inspect the individual instance + ''' + if instance.tickets.filter(attendance_type__slug='onsite').exists(): + return 'onsite' + elif instance.tickets.filter(attendance_type__slug='remote').exists(): + return 'remote' + elif instance.tickets.filter(attendance_type__slug='hackathon_onsite').exists(): + return 'hackathon onsite' + elif instance.tickets.filter(attendance_type__slug='hackathon_remote').exists(): + return 'hackathon remote' + display_attendance.short_description = "Attendance" # type: ignore # https://github.com/python/mypy/issues/2087 + admin.site.register(Registration, RegistrationAdmin) class RegistrationTicketAdmin(admin.ModelAdmin): model = RegistrationTicket list_filter = ['attendance_type', ] - list_display = ['registration', 'attendance_type', 'ticket_type'] + # not available until Django 5.2, the name of a related field, using the __ notation + # list_display = ['registration__meeting', 'registration', 'attendance_type', 'ticket_type', 'registration__email'] + # list_select_related = ('registration',) + list_display = ['registration', 'attendance_type', 'ticket_type', 'display_meeting'] search_fields = ['registration__first_name', 'registration__last_name', 'registration__email'] raw_id_fields = ['registration'] + ordering = ['-registration__meeting__date', 'registration__last_name'] + + def display_meeting(self, instance): + return instance.registration.meeting.number + display_meeting.short_description = "Meeting" # type: ignore # https://github.com/python/mypy/issues/2087 + admin.site.register(RegistrationTicket, RegistrationTicketAdmin) diff --git a/ietf/settings.py b/ietf/settings.py index 64679ca1d8..3af01d76e6 100644 --- a/ietf/settings.py +++ b/ietf/settings.py @@ -729,6 +729,8 @@ def skip_unreadable_post(record): "*~", # tilde temp-files "#*", # files beginning with a hashmark "500.html", # isn't loaded by regular loader, but checked by test_500_page() + "ietf/templates/admin/meeting/RegistrationTicket/change_list.html", + "ietf/templates/admin/meeting/Registration/change_list.html", ] TEST_COVERAGE_MAIN_FILE = os.path.join(BASE_DIR, "../release-coverage.json") diff --git a/ietf/templates/admin/meeting/Registration/change_list.html b/ietf/templates/admin/meeting/Registration/change_list.html new file mode 100644 index 0000000000..62784b2cb6 --- /dev/null +++ b/ietf/templates/admin/meeting/Registration/change_list.html @@ -0,0 +1,10 @@ +{% extends "admin/change_list.html" %} + +{% block search %} + {{ block.super }} {# This includes the original search form #} + {% if cl.search_fields %} {# Only show if search is enabled for the model #} +

    + Hint: Search by: {{ cl.search_fields|join:", "|capfirst }}. +

    + {% endif %} +{% endblock %} \ No newline at end of file diff --git a/ietf/templates/admin/meeting/RegistrationTicket/change_list.html b/ietf/templates/admin/meeting/RegistrationTicket/change_list.html new file mode 100644 index 0000000000..62784b2cb6 --- /dev/null +++ b/ietf/templates/admin/meeting/RegistrationTicket/change_list.html @@ -0,0 +1,10 @@ +{% extends "admin/change_list.html" %} + +{% block search %} + {{ block.super }} {# This includes the original search form #} + {% if cl.search_fields %} {# Only show if search is enabled for the model #} +

    + Hint: Search by: {{ cl.search_fields|join:", "|capfirst }}. +

    + {% endif %} +{% endblock %} \ No newline at end of file From ea8377db0d4ba8bc990c3f29386fba942f55c84f Mon Sep 17 00:00:00 2001 From: Tero Kivinen Date: Mon, 28 Jul 2025 16:18:13 +0300 Subject: [PATCH 157/405] fix: Add Reviews history page to review teams group page. (fixes #9190) (#9191) * Add Reviews history page to review teams group page. * fix: Added test cases. Fixed html to validate, moved sort text to data-text from comments. Added test cases. fixes #9190 --- ietf/group/tests_review.py | 128 ++++++++++++++++++ ietf/group/urls.py | 1 + ietf/group/utils.py | 1 + ietf/group/views.py | 50 +++++++ .../group/review_requests_history.html | 90 ++++++++++++ 5 files changed, 270 insertions(+) create mode 100644 ietf/templates/group/review_requests_history.html diff --git a/ietf/group/tests_review.py b/ietf/group/tests_review.py index a03b806f8f..d671228953 100644 --- a/ietf/group/tests_review.py +++ b/ietf/group/tests_review.py @@ -815,3 +815,131 @@ def test_reset_next_reviewer(self): self.assertEqual(NextReviewerInTeam.objects.get(team=group).next_reviewer, reviewers[target_index].person) self.client.logout() target_index += 2 + +class RequestsHistoryTests(TestCase): + def test_requests_history_overview_page(self): + # Make assigned assignment + review_req = ReviewRequestFactory(state_id='assigned') + assignment = ReviewAssignmentFactory(review_request=review_req, + state_id='assigned', + reviewer=EmailFactory(), + assigned_on = review_req.time) + group = review_req.team + + for url in [urlreverse(ietf.group.views.review_requests_history, + kwargs={ 'acronym': group.acronym }), + urlreverse(ietf.group.views.review_requests_history, + kwargs={ 'acronym': group.acronym , + 'group_type': group.type_id}), + urlreverse(ietf.group.views.review_requests_history, + kwargs={ 'acronym': group.acronym }) + + '?since=3m', + urlreverse(ietf.group.views.review_requests_history, + kwargs={ 'acronym': group.acronym , + 'group_type': group.type_id }) + + '?since=3m']: + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + self.assertContains(r, review_req.doc.name) + self.assertContains(r, 'Assigned') + self.assertContains(r, escape(assignment.reviewer.person.name)) + + url = urlreverse(ietf.group.views.review_requests_history, + kwargs={ 'acronym': group.acronym }) + + assignment.state = ReviewAssignmentStateName.objects.get(slug="completed") + assignment.result = ReviewResultName.objects.get(slug="ready") + assignment.save() + + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + self.assertContains(r, review_req.doc.name) + self.assertContains(r, 'Assigned') + self.assertContains(r, 'Completed') + + def test_requests_history_filter_page(self): + # First assignment as assigned + review_req = ReviewRequestFactory(state_id = 'assigned', + doc = DocumentFactory()) + assignment = ReviewAssignmentFactory(review_request = review_req, + state_id = 'assigned', + reviewer = EmailFactory(), + assigned_on = review_req.time) + group = review_req.team + + # Second assignment in same group as accepted + review_req2 = ReviewRequestFactory(state_id = 'assigned', + team = review_req.team, + doc = DocumentFactory()) + assignment2 = ReviewAssignmentFactory(review_request = review_req2, + state_id='accepted', + reviewer = EmailFactory(), + assigned_on = review_req2.time) + + # Modify the assignment to be completed, and mark it ready + assignment2.state = ReviewAssignmentStateName.objects.get(slug="completed") + assignment2.result = ReviewResultName.objects.get(slug="ready") + assignment2.save() + + # Check that we have all information when we do not filter + url = urlreverse(ietf.group.views.review_requests_history, + kwargs={ 'acronym': group.acronym }) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + self.assertContains(r, review_req.doc.name) + self.assertContains(r, review_req2.doc.name) + self.assertContains(r, 'Assigned') + self.assertContains(r, 'Accepted') + self.assertContains(r, 'Completed') + self.assertContains(r, 'Ready') + self.assertContains(r, escape(assignment.reviewer.person.name)) + self.assertContains(r, escape(assignment2.reviewer.person.name)) + + # Check first reviewer history + for url in [urlreverse(ietf.group.views.review_requests_history, + kwargs={ 'acronym': group.acronym }) + + '?reviewer_email=' + str(assignment.reviewer), + urlreverse(ietf.group.views.review_requests_history, + kwargs={ 'acronym': group.acronym , + 'group_type': group.type_id}) + + '?reviewer_email=' + str(assignment.reviewer)]: + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + self.assertContains(r, review_req.doc.name) + self.assertNotContains(r, review_req2.doc.name) + self.assertContains(r, 'Assigned') + self.assertNotContains(r, 'Accepted') + self.assertNotContains(r, 'Completed') + self.assertNotContains(r, 'Ready') + self.assertContains(r, escape(assignment.reviewer.person.name)) + self.assertNotContains(r, escape(assignment2.reviewer.person.name)) + + # Check second reviewer history + for url in [urlreverse(ietf.group.views.review_requests_history, + kwargs={ 'acronym': group.acronym }) + + '?reviewer_email=' + str(assignment2.reviewer), + urlreverse(ietf.group.views.review_requests_history, + kwargs={ 'acronym': group.acronym , + 'group_type': group.type_id}) + + '?reviewer_email=' + str(assignment2.reviewer)]: + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + self.assertNotContains(r, review_req.doc.name) + self.assertContains(r, review_req2.doc.name) + self.assertNotContains(r, 'Assigned') + self.assertContains(r, 'Accepted') + self.assertContains(r, 'Completed') + self.assertContains(r, 'Ready') + self.assertNotContains(r, escape(assignment.reviewer.person.name)) + self.assertContains(r, escape(assignment2.reviewer.person.name)) + + # Check for reviewer that does not have anything + url = urlreverse(ietf.group.views.review_requests_history, + kwargs={ 'acronym': group.acronym }) + '?reviewer_email=nobody@nowhere.example.org' + + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + self.assertNotContains(r, review_req.doc.name) + self.assertNotContains(r, 'Assigned') + self.assertNotContains(r, 'Accepted') + self.assertNotContains(r, 'Completed') diff --git a/ietf/group/urls.py b/ietf/group/urls.py index 1824564c4d..8354aba063 100644 --- a/ietf/group/urls.py +++ b/ietf/group/urls.py @@ -24,6 +24,7 @@ url(r'^about/status/edit/$', views.group_about_status_edit), url(r'^about/status/meeting/(?P\d+)/$', views.group_about_status_meeting), url(r'^history/$',views.history), + url(r'^requestshistory/$',views.review_requests_history), url(r'^history/addcomment/$',views.add_comment), url(r'^email/$', views.email), url(r'^deps\.json$', views.dependencies), diff --git a/ietf/group/utils.py b/ietf/group/utils.py index dcf9d83e6f..29cfff2c2d 100644 --- a/ietf/group/utils.py +++ b/ietf/group/utils.py @@ -236,6 +236,7 @@ def construct_group_menu_context(request, group, selected, group_type, others): import ietf.group.views entries.append(("Review requests", urlreverse(ietf.group.views.review_requests, kwargs=kwargs))) entries.append(("Reviewers", urlreverse(ietf.group.views.reviewer_overview, kwargs=kwargs))) + entries.append(("Reviews History", urlreverse(ietf.group.views.review_requests_history, kwargs=kwargs))) if group.features.has_meetings: entries.append(("Meetings", urlreverse("ietf.group.views.meetings", kwargs=kwargs))) diff --git a/ietf/group/views.py b/ietf/group/views.py index 0c89302c6a..bc79599722 100644 --- a/ietf/group/views.py +++ b/ietf/group/views.py @@ -686,6 +686,56 @@ def history(request, acronym, group_type=None): "can_add_comment": can_add_comment, })) +def review_requests_history(request, acronym, group_type=None): + group = get_group_or_404(acronym, group_type) + if not group.features.has_reviews: + raise Http404 + + reviewer_email = request.GET.get("reviewer_email", None) + + if reviewer_email: + history = ReviewAssignment.history.model.objects.filter( + review_request__team__acronym=acronym, + reviewer=reviewer_email) + else: + history = ReviewAssignment.history.model.objects.filter( + review_request__team__acronym=acronym) + reviewer_email = '' + + since_choices = [ + (None, "1 month"), + ("3m", "3 months"), + ("6m", "6 months"), + ("1y", "1 year"), + ("2y", "2 years"), + ("all", "All"), + ] + since = request.GET.get("since", None) + + if since not in [key for key, label in since_choices]: + since = None + + if since != "all": + date_limit = { + None: datetime.timedelta(days=31), + "3m": datetime.timedelta(days=31 * 3), + "6m": datetime.timedelta(days=180), + "1y": datetime.timedelta(days=365), + "2y": datetime.timedelta(days=2 * 365), + }[since] + + history = history.filter(review_request__time__gte=datetime_today(DEADLINE_TZINFO) - date_limit) + + return render(request, 'group/review_requests_history.html', + construct_group_menu_context(request, group, "reviews history", group_type, { + "group": group, + "acronym": acronym, + "history": history, + "since_choices": since_choices, + "since": since, + "reviewer_email": reviewer_email + })) + def materials(request, acronym, group_type=None): group = get_group_or_404(acronym, group_type) if not group.features.has_nonsession_materials: diff --git a/ietf/templates/group/review_requests_history.html b/ietf/templates/group/review_requests_history.html new file mode 100644 index 0000000000..1b1fb4d263 --- /dev/null +++ b/ietf/templates/group/review_requests_history.html @@ -0,0 +1,90 @@ +{% extends "group/group_base.html" %} +{# Copyright The IETF Trust 2015, All Rights Reserved #} +{% load origin %} +{% load tz %} +{% load ietf_filters person_filters textfilters %} +{% load static %} +{% block pagehead %} + +{% endblock %} +{% block group_content %} + {% origin %} + {% if reviewer_email %} +

    Review requests history of {{ reviewer_email }}

    + {% else %} +

    Review requests history

    + {% endif %} +
    +
    + + + +
    +
    + Past: +
    + {% for key, label in since_choices %} + + {% endfor %} +
    +
    +
    + + + + + + + + + + + + + + {% if history %} + + {% for h in history %} + + + + + + + + + + {% endfor %} + + {% endif %} +
    Date (UTC)ByDocumentStateReviewerResultDescription
    {{ h.history_date|utc|date:"Y-m-d H:i:s" }}{% person_link h.history_user.person %}{% if h.reviewed_rev %} + + {{ h.review_request.doc.name }}-{{ h.reviewed_rev }} + + {% else %} + {{ h.review_request.doc.name }} + {% endif %} + + {{ h.state }} + + {% person_link h.reviewer.person %} + + (set as filter) + + + {% if h.review %} + {{ h.result }} + {% else %} + {{ h.result }} + {% endif %} + {{ h.history_change_reason }}
    +{% endblock %} +{% block js %} + +{% endblock %} From 4762e252552587bfc80c91244c9acb9cc59d820b Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Mon, 4 Aug 2025 15:02:44 -0300 Subject: [PATCH 158/405] test: replace mock with unittest.mock (#9286) --- ietf/api/tests.py | 2 +- ietf/community/tests.py | 2 +- ietf/doc/tests.py | 2 +- ietf/doc/tests_ballot.py | 2 +- ietf/doc/tests_draft.py | 2 +- ietf/doc/tests_material.py | 2 +- ietf/doc/tests_review.py | 2 +- ietf/doc/tests_tasks.py | 2 +- ietf/group/tests.py | 2 +- ietf/group/tests_info.py | 2 +- ietf/idindex/tests.py | 2 +- ietf/ipr/management/tests.py | 2 +- ietf/ipr/tests.py | 2 +- ietf/meeting/tests_models.py | 2 +- ietf/meeting/tests_tasks.py | 2 +- ietf/meeting/tests_utils.py | 2 +- ietf/meeting/tests_views.py | 2 +- ietf/message/tests.py | 2 +- ietf/nomcom/management/tests.py | 2 +- ietf/nomcom/tests.py | 2 +- ietf/person/tests.py | 2 +- ietf/review/tests.py | 2 +- ietf/submit/tests.py | 2 +- ietf/sync/tests.py | 2 +- ietf/utils/management/tests.py | 2 +- ietf/utils/tests.py | 2 +- requirements.txt | 2 -- 27 files changed, 26 insertions(+), 28 deletions(-) diff --git a/ietf/api/tests.py b/ietf/api/tests.py index 93a2195467..93515dd0cb 100644 --- a/ietf/api/tests.py +++ b/ietf/api/tests.py @@ -5,7 +5,7 @@ import datetime import json import html -import mock +from unittest import mock import os import sys diff --git a/ietf/community/tests.py b/ietf/community/tests.py index 1255ba46eb..04f1433d61 100644 --- a/ietf/community/tests.py +++ b/ietf/community/tests.py @@ -1,7 +1,7 @@ # Copyright The IETF Trust 2016-2023, All Rights Reserved # -*- coding: utf-8 -*- -import mock +from unittest import mock from pyquery import PyQuery from django.test.utils import override_settings diff --git a/ietf/doc/tests.py b/ietf/doc/tests.py index 1229df46c5..d3fba03bcc 100644 --- a/ietf/doc/tests.py +++ b/ietf/doc/tests.py @@ -10,7 +10,7 @@ from django.http import HttpRequest import lxml import bibtexparser -import mock +from unittest import mock import json import copy import random diff --git a/ietf/doc/tests_ballot.py b/ietf/doc/tests_ballot.py index ec23f3d491..810ee598f6 100644 --- a/ietf/doc/tests_ballot.py +++ b/ietf/doc/tests_ballot.py @@ -3,7 +3,7 @@ import datetime -import mock +from unittest import mock from pyquery import PyQuery diff --git a/ietf/doc/tests_draft.py b/ietf/doc/tests_draft.py index 4753c4ff0c..576feb0582 100644 --- a/ietf/doc/tests_draft.py +++ b/ietf/doc/tests_draft.py @@ -5,7 +5,7 @@ import os import datetime import io -import mock +from unittest import mock from collections import Counter from pathlib import Path diff --git a/ietf/doc/tests_material.py b/ietf/doc/tests_material.py index c87341c95b..04779bdaf1 100644 --- a/ietf/doc/tests_material.py +++ b/ietf/doc/tests_material.py @@ -6,7 +6,7 @@ import shutil import io -from mock import call, patch +from unittest.mock import call, patch from pathlib import Path from pyquery import PyQuery diff --git a/ietf/doc/tests_review.py b/ietf/doc/tests_review.py index 9850beca75..8c1fc99ffe 100644 --- a/ietf/doc/tests_review.py +++ b/ietf/doc/tests_review.py @@ -8,7 +8,7 @@ import os import shutil -from mock import patch, Mock +from unittest.mock import patch, Mock from requests import Response from django.apps import apps diff --git a/ietf/doc/tests_tasks.py b/ietf/doc/tests_tasks.py index 8a6ffa8be1..29689cd596 100644 --- a/ietf/doc/tests_tasks.py +++ b/ietf/doc/tests_tasks.py @@ -2,7 +2,7 @@ import debug # pyflakes:ignore import datetime -import mock +from unittest import mock from pathlib import Path diff --git a/ietf/group/tests.py b/ietf/group/tests.py index 31f8cc45b5..229744388c 100644 --- a/ietf/group/tests.py +++ b/ietf/group/tests.py @@ -3,7 +3,7 @@ import datetime import json -import mock +from unittest import mock from django.urls import reverse as urlreverse from django.db.models import Q diff --git a/ietf/group/tests_info.py b/ietf/group/tests_info.py index aaf937ee43..eb85860ece 100644 --- a/ietf/group/tests_info.py +++ b/ietf/group/tests_info.py @@ -6,7 +6,7 @@ import datetime import io import bleach -import mock +from unittest import mock from unittest.mock import call, patch from pathlib import Path diff --git a/ietf/idindex/tests.py b/ietf/idindex/tests.py index 5cc7a7b3bb..ba6100550d 100644 --- a/ietf/idindex/tests.py +++ b/ietf/idindex/tests.py @@ -3,7 +3,7 @@ import datetime -import mock +from unittest import mock from pathlib import Path from tempfile import TemporaryDirectory diff --git a/ietf/ipr/management/tests.py b/ietf/ipr/management/tests.py index d84b0cfef4..d7acd65042 100644 --- a/ietf/ipr/management/tests.py +++ b/ietf/ipr/management/tests.py @@ -1,7 +1,7 @@ # Copyright The IETF Trust 2021, All Rights Reserved # -*- coding: utf-8 -*- """Tests of ipr management commands""" -import mock +from unittest import mock import sys from django.core.management import call_command diff --git a/ietf/ipr/tests.py b/ietf/ipr/tests.py index e0d00b5d1a..74fa540126 100644 --- a/ietf/ipr/tests.py +++ b/ietf/ipr/tests.py @@ -3,7 +3,7 @@ import datetime -import mock +from unittest import mock import re from pyquery import PyQuery diff --git a/ietf/meeting/tests_models.py b/ietf/meeting/tests_models.py index e333ddad9a..869d9ec814 100644 --- a/ietf/meeting/tests_models.py +++ b/ietf/meeting/tests_models.py @@ -3,7 +3,7 @@ """Tests of models in the Meeting application""" import datetime -from mock import patch +from unittest.mock import patch from django.conf import settings from django.test import override_settings diff --git a/ietf/meeting/tests_tasks.py b/ietf/meeting/tests_tasks.py index 66de212899..0c442c4bf7 100644 --- a/ietf/meeting/tests_tasks.py +++ b/ietf/meeting/tests_tasks.py @@ -1,7 +1,7 @@ # Copyright The IETF Trust 2025, All Rights Reserved import datetime -from mock import patch, call +from unittest.mock import patch, call from ietf.utils.test_utils import TestCase from ietf.utils.timezone import date_today from .factories import MeetingFactory diff --git a/ietf/meeting/tests_utils.py b/ietf/meeting/tests_utils.py index 391e017d68..7dd8f435e1 100644 --- a/ietf/meeting/tests_utils.py +++ b/ietf/meeting/tests_utils.py @@ -7,7 +7,7 @@ import json import jsonschema from json import JSONDecodeError -from mock import patch, Mock +from unittest.mock import patch, Mock from django.http import HttpResponse, JsonResponse from ietf.meeting.factories import MeetingFactory, RegistrationFactory, RegistrationTicketFactory diff --git a/ietf/meeting/tests_views.py b/ietf/meeting/tests_views.py index 1aac2a6523..96a29c2297 100644 --- a/ietf/meeting/tests_views.py +++ b/ietf/meeting/tests_views.py @@ -12,7 +12,7 @@ import requests_mock from unittest import skipIf -from mock import call, patch, PropertyMock +from unittest.mock import call, patch, PropertyMock from pyquery import PyQuery from lxml.etree import tostring from io import StringIO, BytesIO diff --git a/ietf/message/tests.py b/ietf/message/tests.py index a677d5477e..e1bad9a1e6 100644 --- a/ietf/message/tests.py +++ b/ietf/message/tests.py @@ -1,7 +1,7 @@ # Copyright The IETF Trust 2013-2020, All Rights Reserved # -*- coding: utf-8 -*- import datetime -import mock +from unittest import mock from smtplib import SMTPException diff --git a/ietf/nomcom/management/tests.py b/ietf/nomcom/management/tests.py index 7bda2b5aa5..08c0e1fe32 100644 --- a/ietf/nomcom/management/tests.py +++ b/ietf/nomcom/management/tests.py @@ -1,7 +1,7 @@ # Copyright The IETF Trust 2021, All Rights Reserved # -*- coding: utf-8 -*- """Tests of nomcom management commands""" -import mock +from unittest import mock import sys from collections import namedtuple diff --git a/ietf/nomcom/tests.py b/ietf/nomcom/tests.py index ea17da6707..cc2e0826d3 100644 --- a/ietf/nomcom/tests.py +++ b/ietf/nomcom/tests.py @@ -4,7 +4,7 @@ import datetime import io -import mock +from unittest import mock import random import shutil diff --git a/ietf/person/tests.py b/ietf/person/tests.py index 61d9b0ed70..6326362fd8 100644 --- a/ietf/person/tests.py +++ b/ietf/person/tests.py @@ -4,7 +4,7 @@ import datetime import json -import mock +from unittest import mock from io import StringIO, BytesIO from PIL import Image diff --git a/ietf/review/tests.py b/ietf/review/tests.py index e9ddbd47af..5dc8f11e8e 100644 --- a/ietf/review/tests.py +++ b/ietf/review/tests.py @@ -1,7 +1,7 @@ # Copyright The IETF Trust 2019-2020, All Rights Reserved # -*- coding: utf-8 -*- import datetime -import mock +from unittest import mock import debug # pyflakes:ignore from pyquery import PyQuery diff --git a/ietf/submit/tests.py b/ietf/submit/tests.py index 7e70c55965..6b9002502b 100644 --- a/ietf/submit/tests.py +++ b/ietf/submit/tests.py @@ -5,7 +5,7 @@ import datetime import email import io -import mock +from unittest import mock import os import re import sys diff --git a/ietf/sync/tests.py b/ietf/sync/tests.py index 14d65de0b2..182b6e24c4 100644 --- a/ietf/sync/tests.py +++ b/ietf/sync/tests.py @@ -6,7 +6,7 @@ import io import json import datetime -import mock +from unittest import mock import quopri import requests diff --git a/ietf/utils/management/tests.py b/ietf/utils/management/tests.py index d704999cd1..38be464c7f 100644 --- a/ietf/utils/management/tests.py +++ b/ietf/utils/management/tests.py @@ -1,7 +1,7 @@ # Copyright The IETF Trust 2013-2020, All Rights Reserved # -*- coding: utf-8 -*- -import mock +from unittest import mock from django.core.management import call_command, CommandError from django.test import override_settings diff --git a/ietf/utils/tests.py b/ietf/utils/tests.py index ce1842236d..01433888fe 100644 --- a/ietf/utils/tests.py +++ b/ietf/utils/tests.py @@ -11,7 +11,7 @@ import shutil import types -from mock import call, patch +from unittest.mock import call, patch from pyquery import PyQuery from typing import Dict, List # pyflakes:ignore diff --git a/requirements.txt b/requirements.txt index 4eb573ce36..8ed354192a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -51,8 +51,6 @@ logging_tree>=1.9 # Used only by the showloggers management command lxml>=5.3.0 markdown>=3.3.6 types-markdown>=3.3.6 -mock>=4.0.3 # Used only by tests, of course -types-mock>=4.0.3 mypy~=1.7.0 # Version requirements determined by django-stubs. oic>=1.3 # Used only by tests Pillow>=9.1.0 From e0546b1543565c0a293d198db9b15f1dd5121600 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Mon, 4 Aug 2025 15:04:09 -0300 Subject: [PATCH 159/405] fix: blank=True for xml_version (#9285) --- .../0002_alter_submission_xml_version.py | 18 ++++++++++++++++++ ietf/submit/models.py | 2 +- 2 files changed, 19 insertions(+), 1 deletion(-) create mode 100644 ietf/submit/migrations/0002_alter_submission_xml_version.py diff --git a/ietf/submit/migrations/0002_alter_submission_xml_version.py b/ietf/submit/migrations/0002_alter_submission_xml_version.py new file mode 100644 index 0000000000..275e6efd95 --- /dev/null +++ b/ietf/submit/migrations/0002_alter_submission_xml_version.py @@ -0,0 +1,18 @@ +# Generated by Django 4.2.23 on 2025-08-01 19:33 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("submit", "0001_initial"), + ] + + operations = [ + migrations.AlterField( + model_name="submission", + name="xml_version", + field=models.CharField(blank=True, default=None, max_length=4, null=True), + ), + ] diff --git a/ietf/submit/models.py b/ietf/submit/models.py index 51f7541e31..1145f761b4 100644 --- a/ietf/submit/models.py +++ b/ietf/submit/models.py @@ -55,7 +55,7 @@ class Submission(models.Model): file_size = models.IntegerField(null=True, blank=True) document_date = models.DateField(null=True, blank=True) submission_date = models.DateField(default=date_today) - xml_version = models.CharField(null=True, max_length=4, default=None) + xml_version = models.CharField(null=True, blank=True, max_length=4, default=None) submitter = models.CharField(max_length=255, blank=True, help_text="Name and email of submitter, e.g. \"John Doe <john@example.org>\".") From 827f4e74a1b9a8e872634f31e9484a3dc8cd0842 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Mon, 4 Aug 2025 15:09:15 -0300 Subject: [PATCH 160/405] fix: escape nulls in XML api responses (#9283) * fix: escape nulls in XML api responses * refactor: use \u2400 instead of \0 Less likely to lead to null injection down the road * test: modern naming/python * test: test null char handling * test: remove unused vars --- ietf/api/__init__.py | 21 +++++++++++++++++++++ ietf/api/tests.py | 20 ++++++++++++++++++-- 2 files changed, 39 insertions(+), 2 deletions(-) diff --git a/ietf/api/__init__.py b/ietf/api/__init__.py index 9fadab8e6f..d70866083e 100644 --- a/ietf/api/__init__.py +++ b/ietf/api/__init__.py @@ -145,5 +145,26 @@ def dehydrate(self, bundle, for_list=True): class Serializer(tastypie.serializers.Serializer): + OPTION_ESCAPE_NULLS = "datatracker-escape-nulls" + def format_datetime(self, data): return data.astimezone(datetime.timezone.utc).replace(tzinfo=None).isoformat(timespec="seconds") + "Z" + + def to_simple(self, data, options): + options = options or {} + simple_data = super().to_simple(data, options) + if ( + options.get(self.OPTION_ESCAPE_NULLS, False) + and isinstance(simple_data, str) + ): + # replace nulls with unicode "symbol for null character", \u2400 + simple_data = simple_data.replace("\x00", "\u2400") + return simple_data + + def to_etree(self, data, options=None, name=None, depth=0): + # lxml does not escape nulls on its own, so ask to_simple() to do it. + # This is mostly (only?) an issue when generating errors responses for + # fuzzers. + options = options or {} + options[self.OPTION_ESCAPE_NULLS] = True + return super().to_etree(data, options, name, depth) diff --git a/ietf/api/tests.py b/ietf/api/tests.py index 93515dd0cb..865f877bfb 100644 --- a/ietf/api/tests.py +++ b/ietf/api/tests.py @@ -41,6 +41,7 @@ from ietf.utils.models import DumpInfo from ietf.utils.test_utils import TestCase, login_testing_unauthorized, reload_db_objects +from . import Serializer from .ietf_utils import is_valid_token, requires_api_token from .views import EmailIngestionError @@ -1496,7 +1497,7 @@ def test_good_password(self): data = self.response_data(r) self.assertEqual(data["result"], "success") -class TastypieApiTestCase(ResourceTestCaseMixin, TestCase): +class TastypieApiTests(ResourceTestCaseMixin, TestCase): def __init__(self, *args, **kwargs): self.apps = {} for app_name in settings.INSTALLED_APPS: @@ -1506,7 +1507,7 @@ def __init__(self, *args, **kwargs): models_path = os.path.join(os.path.dirname(app.__file__), "models.py") if os.path.exists(models_path): self.apps[name] = app_name - super(TastypieApiTestCase, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def test_api_top_level(self): client = Client(Accept='application/json') @@ -1541,6 +1542,21 @@ def test_all_model_resources_exist(self): self.assertIn(model._meta.model_name, list(app_resources.keys()), "There doesn't seem to be any API resource for model %s.models.%s"%(app.__name__,model.__name__,)) + def test_serializer_to_etree_handles_nulls(self): + """Serializer to_etree() should handle a null character""" + serializer = Serializer() + try: + serializer.to_etree("string with no nulls in it") + except ValueError: + self.fail("serializer.to_etree raised ValueError on an ordinary string") + try: + serializer.to_etree("string with a \x00 in it") + except ValueError: + self.fail( + "serializer.to_etree raised ValueError on a string " + "containing a null character" + ) + class RfcdiffSupportTests(TestCase): From 8c4bff875398fb7fc7de624c8155c8377276cec0 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Mon, 4 Aug 2025 15:11:12 -0300 Subject: [PATCH 161/405] test: guard against empty path (#9282) --- ietf/utils/test_runner.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/ietf/utils/test_runner.py b/ietf/utils/test_runner.py index d0a5496283..a9b2e5d572 100644 --- a/ietf/utils/test_runner.py +++ b/ietf/utils/test_runner.py @@ -432,7 +432,11 @@ def get_template_paths(apps=None) -> list[str]: relative_path = pathlib.Path( dirpath.removeprefix(templatepath).lstrip("/") ) - if apps and relative_path.parts[0] not in apps: + if ( + apps + and len(relative_path.parts) > 0 + and relative_path.parts[0] not in apps + ): continue # skip uninteresting apps for filename in files: file_path = project_path / filename From 649de73cab6c3aed3e909280e80e02272bc362a8 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Mon, 4 Aug 2025 15:13:46 -0300 Subject: [PATCH 162/405] fix: validate review_requests_history params (#9281) * test: test null chars in GET params * fix: validate GET params --- ietf/group/tests_review.py | 39 +++++++++++++++++++++++++++++++++ ietf/group/views.py | 45 +++++++++++++++++++++++--------------- 2 files changed, 66 insertions(+), 18 deletions(-) diff --git a/ietf/group/tests_review.py b/ietf/group/tests_review.py index d671228953..89c755bb26 100644 --- a/ietf/group/tests_review.py +++ b/ietf/group/tests_review.py @@ -943,3 +943,42 @@ def test_requests_history_filter_page(self): self.assertNotContains(r, 'Assigned') self.assertNotContains(r, 'Accepted') self.assertNotContains(r, 'Completed') + + def test_requests_history_invalid_filter_parameters(self): + # First assignment as assigned + review_req = ReviewRequestFactory(state_id="assigned", doc=DocumentFactory()) + group = review_req.team + url = urlreverse( + "ietf.group.views.review_requests_history", + kwargs={"acronym": group.acronym}, + ) + invalid_reviewer_emails = [ + "%00null@example.com", # urlencoded null character + "null@exa%00mple.com", # urlencoded null character + "\x00null@example.com", # literal null character + "null@ex\x00ample.com", # literal null character + ] + for invalid_email in invalid_reviewer_emails: + r = self.client.get( + url + f"?reviewer_email={invalid_email}" + ) + self.assertEqual( + r.status_code, + 400, + f"should return a 400 response for reviewer_email={repr(invalid_email)}" + ) + + invalid_since_choices = [ + "forever", # not an option + "all\x00", # literal null character + "a%00ll", # urlencoded null character + ] + for invalid_since in invalid_since_choices: + r = self.client.get( + url + f"?since={invalid_since}" + ) + self.assertEqual( + r.status_code, + 400, + f"should return a 400 response for since={repr(invalid_since)}" + ) diff --git a/ietf/group/views.py b/ietf/group/views.py index bc79599722..3529b31f68 100644 --- a/ietf/group/views.py +++ b/ietf/group/views.py @@ -51,7 +51,13 @@ from django.contrib.auth.decorators import login_required from django.db.models import Count, F, OuterRef, Prefetch, Q, Subquery, TextField, Value from django.db.models.functions import Coalesce -from django.http import HttpResponse, HttpResponseRedirect, Http404, JsonResponse +from django.http import ( + HttpResponse, + HttpResponseRedirect, + Http404, + JsonResponse, + HttpResponseBadRequest, +) from django.shortcuts import render, redirect, get_object_or_404 from django.template.loader import render_to_string from django.urls import reverse as urlreverse @@ -96,11 +102,9 @@ from ietf.review.policies import get_reviewer_queue_policy from ietf.review.utils import (can_manage_review_requests_for_team, can_access_review_stats_for_team, - extract_revision_ordered_review_requests_for_documents_and_replaced, assign_review_request_to_reviewer, close_review_request, - suggested_review_requests_for_team, unavailable_periods_to_list, current_unavailable_periods_for_reviewers, @@ -686,13 +690,30 @@ def history(request, acronym, group_type=None): "can_add_comment": can_add_comment, })) + +class RequestsHistoryParamsForm(forms.Form): + SINCE_CHOICES = ( + (None, "1 month"), + ("3m", "3 months"), + ("6m", "6 months"), + ("1y", "1 year"), + ("2y", "2 years"), + ("all", "All"), + ) + + reviewer_email = forms.EmailField(required=False) + since = forms.ChoiceField(choices=SINCE_CHOICES, required=False) + def review_requests_history(request, acronym, group_type=None): group = get_group_or_404(acronym, group_type) if not group.features.has_reviews: raise Http404 - reviewer_email = request.GET.get("reviewer_email", None) + params = RequestsHistoryParamsForm(request.GET) + if not params.is_valid(): + return HttpResponseBadRequest("Invalid parameters") + reviewer_email = params.cleaned_data["reviewer_email"] or None if reviewer_email: history = ReviewAssignment.history.model.objects.filter( review_request__team__acronym=acronym, @@ -702,19 +723,7 @@ def review_requests_history(request, acronym, group_type=None): review_request__team__acronym=acronym) reviewer_email = '' - since_choices = [ - (None, "1 month"), - ("3m", "3 months"), - ("6m", "6 months"), - ("1y", "1 year"), - ("2y", "2 years"), - ("all", "All"), - ] - since = request.GET.get("since", None) - - if since not in [key for key, label in since_choices]: - since = None - + since = params.cleaned_data["since"] or None if since != "all": date_limit = { None: datetime.timedelta(days=31), @@ -731,7 +740,7 @@ def review_requests_history(request, acronym, group_type=None): "group": group, "acronym": acronym, "history": history, - "since_choices": since_choices, + "since_choices": params.SINCE_CHOICES, "since": since, "reviewer_email": reviewer_email })) From 39165a0b5d079459da601ea82f87c9981f6508b7 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Mon, 4 Aug 2025 15:25:54 -0300 Subject: [PATCH 163/405] fix: serve materials w/mixed-case exts (#9273) * fix: serve materials w/ mixed-case exts * fix: another endpoint+reorder regex --- ietf/meeting/urls.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ietf/meeting/urls.py b/ietf/meeting/urls.py index 5da24ddb6f..18b123b4d8 100644 --- a/ietf/meeting/urls.py +++ b/ietf/meeting/urls.py @@ -64,7 +64,7 @@ def get_redirect_url(self, *args, **kwargs): type_interim_patterns = [ url(r'^agenda/(?P[A-Za-z0-9-]+)-drafts.pdf$', views.session_draft_pdf), url(r'^agenda/(?P[A-Za-z0-9-]+)-drafts.tgz$', views.session_draft_tarfile), - url(r'^materials/%(document)s(?P\.[a-z0-9]+)$' % settings.URL_REGEXPS, views.materials_document), + url(r'^materials/%(document)s(?P\.[A-Za-z0-9]+)$' % settings.URL_REGEXPS, views.materials_document), url(r'^materials/%(document)s/?$' % settings.URL_REGEXPS, views.materials_document), url(r'^agenda.json$', views.agenda_json) ] @@ -85,7 +85,7 @@ def get_redirect_url(self, *args, **kwargs): url(r'^week-view(?:.html)?/?$', AgendaRedirectView.as_view(pattern_name='agenda', permanent=True)), url(r'^materials(?:.html)?/?$', views.materials), url(r'^request_minutes/?$', views.request_minutes), - url(r'^materials/%(document)s(?P\.[a-z0-9]+)?/?$' % settings.URL_REGEXPS, views.materials_document), + url(r'^materials/%(document)s(?P\.[A-Za-z0-9]+)?/?$' % settings.URL_REGEXPS, views.materials_document), url(r'^session/?$', views.materials_editable_groups), url(r'^proceedings(?:.html)?/?$', views.proceedings), url(r'^proceedings(?:.html)?/finalize/?$', views.finalize_proceedings), From afb0d2d245a11384d73e8f0cc0d31150dd91f80a Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Mon, 4 Aug 2025 17:16:47 -0300 Subject: [PATCH 164/405] chore(deps): pin jsonfield version (#9267) At least nominally, 3.2.0 requires py3.10. Package is deprecated. --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 8ed354192a..1b00cf81a2 100644 --- a/requirements.txt +++ b/requirements.txt @@ -44,7 +44,7 @@ hashids>=1.3.1 html2text>=2020.1.16 # Used only to clean comment field of secr/sreq html5lib>=1.1 # Only used in tests inflect>= 6.0.2 -jsonfield>=3.1.0 # for SubmissionCheck. This is https://github.com/bradjasper/django-jsonfield/. +jsonfield>=3.1.0,<3.2.0 # 3.2.0 needs py3.10; deprecated-replace with Django JSONField jsonschema[format]>=4.2.1 jwcrypto>=1.2 # for signed notifications - this is aspirational, and is not really used. logging_tree>=1.9 # Used only by the showloggers management command From 6494ce880631ce798424a3a57db88f50f6ebf370 Mon Sep 17 00:00:00 2001 From: rjsparks <10996692+rjsparks@users.noreply.github.com> Date: Mon, 4 Aug 2025 20:29:21 +0000 Subject: [PATCH 165/405] ci: update base image target version to 20250804T2017 --- dev/build/Dockerfile | 2 +- dev/build/TARGET_BASE | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/dev/build/Dockerfile b/dev/build/Dockerfile index 8317195446..662aee950f 100644 --- a/dev/build/Dockerfile +++ b/dev/build/Dockerfile @@ -1,4 +1,4 @@ -FROM ghcr.io/ietf-tools/datatracker-app-base:20250624T1543 +FROM ghcr.io/ietf-tools/datatracker-app-base:20250804T2017 LABEL maintainer="IETF Tools Team " ENV DEBIAN_FRONTEND=noninteractive diff --git a/dev/build/TARGET_BASE b/dev/build/TARGET_BASE index 1f2e39a0a2..d2c3dd6fc9 100644 --- a/dev/build/TARGET_BASE +++ b/dev/build/TARGET_BASE @@ -1 +1 @@ -20250624T1543 +20250804T2017 From e220bc89b464aab54e11c6698f074ed51982c715 Mon Sep 17 00:00:00 2001 From: Eric Vyncke Date: Mon, 4 Aug 2025 22:43:44 +0200 Subject: [PATCH 166/405] Add link to reviewers's reviews (#9272) --- ietf/templates/person/profile.html | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/ietf/templates/person/profile.html b/ietf/templates/person/profile.html index 1424f037a1..a78a90412f 100644 --- a/ietf/templates/person/profile.html +++ b/ietf/templates/person/profile.html @@ -50,7 +50,11 @@

    Roles

    {{ role.name.name }}{{ role.name.name }} + {% if role.name.name == 'Reviewer' %} + (See reviews) + {% endif %} + {{ role.group.name }} ({{ role.group.acronym }}) From e3b87d9459c597731d828482bd13041931fe0a2d Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Mon, 4 Aug 2025 22:28:31 -0300 Subject: [PATCH 167/405] chore: different celery path for sandboxes (#9300) * chore: different celery path for sandboxes * chore: typo --- docker/scripts/app-init-celery.sh | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/docker/scripts/app-init-celery.sh b/docker/scripts/app-init-celery.sh index 5788b943da..17925633d2 100755 --- a/docker/scripts/app-init-celery.sh +++ b/docker/scripts/app-init-celery.sh @@ -92,7 +92,14 @@ fi USER_BIN_PATH="/home/dev/.local/bin" WATCHMEDO="$USER_BIN_PATH/watchmedo" -CELERY="$USER_BIN_PATH/celery" +# Find a celery that works +if [[ -x "$USER_BIN_PATH/celery" ]]; then + # This branch is used for dev + CELERY="$USER_BIN_PATH/celery" +else + # This branch is used for sandbox instances + CELERY="/usr/local/bin/celery" +fi trap 'trap "" TERM; cleanup' TERM # start celery in the background so we can trap the TERM signal if [[ -n "${DEV_MODE}" && -x "${WATCHMEDO}" ]]; then From b8e135b928f9d67c83e6ef6fda6c273fdb106748 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Tue, 5 Aug 2025 14:38:30 -0300 Subject: [PATCH 168/405] chore: remove find_github_backup_info.py (#9307) --- .../commands/find_github_backup_info.py | 74 ------------------- requirements.txt | 1 - 2 files changed, 75 deletions(-) delete mode 100644 ietf/doc/management/commands/find_github_backup_info.py diff --git a/ietf/doc/management/commands/find_github_backup_info.py b/ietf/doc/management/commands/find_github_backup_info.py deleted file mode 100644 index f1f71452df..0000000000 --- a/ietf/doc/management/commands/find_github_backup_info.py +++ /dev/null @@ -1,74 +0,0 @@ -# Copyright The IETF Trust 2020, All Rights Reserved - - -import github3 - -from collections import Counter -from urllib.parse import urlparse - -from django.conf import settings -from django.core.management.base import BaseCommand, CommandError - -from ietf.doc.models import DocExtResource -from ietf.group.models import GroupExtResource -from ietf.person.models import PersonExtResource - -# TODO: Think more about submodules. This currently will only take top level repos, with the assumption that the clone will include arguments to grab all the submodules. -# As a consequence, we might end up pulling more than we need (or that the org or user expected) -# Make sure this is what we want. - -class Command(BaseCommand): - help = ('Locate information about github repositories to backup') - - def add_arguments(self, parser): - parser.add_argument('--verbose', dest='verbose', action='store_true', help='Show counts of types of repositories') - - def handle(self, *args, **options): - - if not (hasattr(settings,'GITHUB_BACKUP_API_KEY') and settings.GITHUB_BACKUP_API_KEY): - raise CommandError("ERROR: can't find GITHUB_BACKUP_API_KEY") # TODO: at >= py3.1, use returncode - - github = github3.login(token = settings.GITHUB_BACKUP_API_KEY) - owners = dict() - repos = set() - - for cls in (DocExtResource, GroupExtResource, PersonExtResource): - for res in cls.objects.filter(name_id__in=('github_repo','github_org')): - path_parts = urlparse(res.value).path.strip('/').split('/') - if not path_parts or not path_parts[0]: - continue - - owner = path_parts[0] - - if owner not in owners: - try: - gh_owner = github.user(username=owner) - owners[owner] = gh_owner - except github3.exceptions.NotFoundError: - continue - - if gh_owner.type in ('User', 'Organization'): - if len(path_parts) > 1: - repo = path_parts[1] - if (owner, repo) not in repos: - try: - github.repository(owner,repo) - repos.add( (owner, repo) ) - except github3.exceptions.NotFoundError: - continue - else: - for repo in github.repositories_by(owner): - repos.add( (owner, repo.name) ) - - owner_types = Counter([owners[owner].type for owner in owners]) - if options['verbose']: - self.stdout.write("Owners:") - for key in owner_types: - self.stdout.write(" %s: %s"%(key,owner_types[key])) - self.stdout.write("Repositories: %d" % len(repos)) - for repo in sorted(repos): - self.stdout.write(" https://github.com/%s/%s" % repo ) - else: - for repo in sorted(repos): - self.stdout.write("%s/%s" % repo ) - diff --git a/requirements.txt b/requirements.txt index 1b00cf81a2..f82bfc4101 100644 --- a/requirements.txt +++ b/requirements.txt @@ -38,7 +38,6 @@ drf-spectacular>=0.27 drf-standardized-errors[openapi] >= 0.14 types-docutils>=0.18.1 factory-boy>=3.3 -github3.py>=3.2.0 gunicorn>=20.1.0 hashids>=1.3.1 html2text>=2020.1.16 # Used only to clean comment field of secr/sreq From beb873efc8a98cc5fe144304ebc050faeb814371 Mon Sep 17 00:00:00 2001 From: rjsparks <10996692+rjsparks@users.noreply.github.com> Date: Tue, 5 Aug 2025 17:51:24 +0000 Subject: [PATCH 169/405] ci: update base image target version to 20250805T1738 --- dev/build/Dockerfile | 2 +- dev/build/TARGET_BASE | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/dev/build/Dockerfile b/dev/build/Dockerfile index 662aee950f..3d5520babe 100644 --- a/dev/build/Dockerfile +++ b/dev/build/Dockerfile @@ -1,4 +1,4 @@ -FROM ghcr.io/ietf-tools/datatracker-app-base:20250804T2017 +FROM ghcr.io/ietf-tools/datatracker-app-base:20250805T1738 LABEL maintainer="IETF Tools Team " ENV DEBIAN_FRONTEND=noninteractive diff --git a/dev/build/TARGET_BASE b/dev/build/TARGET_BASE index d2c3dd6fc9..90d83abf03 100644 --- a/dev/build/TARGET_BASE +++ b/dev/build/TARGET_BASE @@ -1 +1 @@ -20250804T2017 +20250805T1738 From ebe6fbf046590c9b6f08560075b760f2164f1f2a Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Thu, 7 Aug 2025 12:13:41 -0300 Subject: [PATCH 170/405] feat: personless User deletion via admin (#9312) * feat: admin to allow user deletion * fix: permissions + drop dangerous action * chore: minor style lint * fix: avoid limit on a queryset delete * feat: User age filter * feat: show useful fields on User admin * chore: fix lint * fix: reverse direction of age filter --- ietf/ietfauth/admin.py | 136 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 136 insertions(+) create mode 100644 ietf/ietfauth/admin.py diff --git a/ietf/ietfauth/admin.py b/ietf/ietfauth/admin.py new file mode 100644 index 0000000000..c2914f9efa --- /dev/null +++ b/ietf/ietfauth/admin.py @@ -0,0 +1,136 @@ +# Copyright The IETF Trust 2025, All Rights Reserved +import datetime + +from django.conf import settings +from django.contrib import admin, messages +from django.contrib.admin import action +from django.contrib.admin.actions import delete_selected as default_delete_selected +from django.contrib.auth.admin import UserAdmin +from django.contrib.auth.models import User +from django.utils import timezone + + +# Replace default UserAdmin with our custom one +admin.site.unregister(User) + + +class AgeListFilter(admin.SimpleListFilter): + title = "account age" + parameter_name = "age" + + def lookups(self, request, model_admin): + return [ + ("1day", "> 1 day"), + ("3days", "> 3 days"), + ("1week", "> 1 week"), + ("1month", "> 1 month"), + ("1year", "> 1 year"), + ] + + def queryset(self, request, queryset): + deltas = { + "1day": datetime.timedelta(days=1), + "3days": datetime.timedelta(days=3), + "1week": datetime.timedelta(weeks=1), + "1month": datetime.timedelta(days=30), + "1year": datetime.timedelta(days=365), + } + if self.value(): + return queryset.filter(date_joined__lt=timezone.now()-deltas[self.value()]) + return queryset + + +@admin.register(User) +class CustomUserAdmin(UserAdmin): + list_display = ( + "username", + "person", + "date_joined", + "last_login", + "is_staff", + ) + list_filter = list(UserAdmin.list_filter) + [ + AgeListFilter, + ("person", admin.EmptyFieldListFilter), + ] + actions = ["delete_selected"] + + @action( + permissions=["delete"], description="Delete personless %(verbose_name_plural)s" + ) + def delete_selected(self, request, queryset): + """Delete selected action restricted to Users with a null Person field + + This displaces the default delete_selected action with a safer one that will + only delete personless Users. It is done this way instead of by introducing + a new action so that we can simply hand off to the default action (imported + as default_delete_selected()) without having to adjust its template (and maybe + other things) to make it work with a different action name. + """ + already_confirmed = bool(request.POST.get("post")) + personless_queryset = queryset.filter(person__isnull=True) + original_count = queryset.count() + personless_count = personless_queryset.count() + if personless_count > original_count: + # Refuse to act if the count increased! + self.message_user( + request, + ( + "Limiting the selection to Users without a Person INCREASED the " + "count from {} to {}. This should not happen and probably means a " + "concurrent change to the database affected this request. Please " + "try again.".format(original_count, personless_count) + ), + level=messages.ERROR, + ) + return None # return to changelist + + # Display warning/info if this is showing the confirmation page + if not already_confirmed: + if personless_count < original_count: + self.message_user( + request, + ( + "Limiting the selection to Users without a Person reduced the " + "count from {} to {}. Only {} will be deleted.".format( + original_count, personless_count, personless_count + ) + ), + level=messages.WARNING, + ) + else: + self.message_user( + request, + "Confirmed that all selected Users had no Persons.", + ) + + # Django limits the number of fields in a request. The delete form itself + # includes a few metadata fields, so give it a little padding. The default + # limit is 1000 and everything will break if it's a small number, so not + # bothering to check that it's > 10. + max_count = settings.DATA_UPLOAD_MAX_NUMBER_FIELDS - 10 + if personless_count > max_count: + self.message_user( + request, + ( + f"Only {max_count} Users can be deleted at once. Will only delete " + f"the first {max_count} selected Personless Users." + ), + level=messages.WARNING, + ) + # delete() doesn't like a queryset limited via [:max_count], so do an + # equivalent filter. + last_to_delete = personless_queryset.order_by("pk")[max_count] + personless_queryset = personless_queryset.filter(pk__lt=last_to_delete.pk) + + if already_confirmed and personless_count != original_count: + # After confirmation, none of the above filtering should change anything. + # Refuse to delete if the DB moved underneath us. + self.message_user( + request, + "Queryset count changed, nothing deleted. Please try again.", + level=messages.ERROR, + ) + return None + + return default_delete_selected(self, request, personless_queryset) From 86bce86731048f2dde04ace47af6425b775e23e9 Mon Sep 17 00:00:00 2001 From: Rudi Matz Date: Thu, 7 Aug 2025 11:14:20 -0400 Subject: [PATCH 171/405] feat: use icalendar instead manual template (#9187) * feat: use icalendar instead manual template * avoid code duplication * code cleanup * ruff ruff * remove comments * add custom field with meeting's local Time zone * more code cleanup * remove unused template for ical * pyflakes: remove unused imports and vars * improve tests and code coverage * remove commented line * change URL in ical to use session material page --- ietf/meeting/tests_views.py | 53 +++++++---- ietf/meeting/views.py | 148 +++++++++++++++++++++++++++--- ietf/templates/meeting/agenda.ics | 32 ------- requirements.txt | 1 + 4 files changed, 168 insertions(+), 66 deletions(-) delete mode 100644 ietf/templates/meeting/agenda.ics diff --git a/ietf/meeting/tests_views.py b/ietf/meeting/tests_views.py index 96a29c2297..f382772485 100644 --- a/ietf/meeting/tests_views.py +++ b/ietf/meeting/tests_views.py @@ -15,6 +15,7 @@ from unittest.mock import call, patch, PropertyMock from pyquery import PyQuery from lxml.etree import tostring +from icalendar import Calendar from io import StringIO, BytesIO from bs4 import BeautifulSoup from urllib.parse import urlparse, urlsplit @@ -384,9 +385,6 @@ def test_meeting_agenda(self): r = self.client.get(ical_url) assert_ical_response_is_valid(self, r) - self.assertContains(r, "BEGIN:VTIMEZONE") - self.assertContains(r, "END:VTIMEZONE") - self.assertContains(r, meeting.time_zone, msg_prefix="time_zone should appear in its original case") self.assertNotEqual( meeting.time_zone, meeting.time_zone.lower(), @@ -405,21 +403,32 @@ def test_meeting_agenda(self): assert_ical_response_is_valid(self, r) self.assertContains(r, session.group.acronym) self.assertContains(r, session.group.name) - self.assertContains(r, session.remote_instructions) - self.assertContains(r, slot.location.name) - self.assertContains(r, 'https://onsite.example.com') - self.assertContains(r, 'https://meetecho.example.com') - self.assertContains(r, "BEGIN:VTIMEZONE") - self.assertContains(r, "END:VTIMEZONE") - self.assertContains(r, session.agenda().get_href()) - self.assertContains( - r, + cal = Calendar.from_ical(r.content) + events = [component for component in cal.walk() if component.name == "VEVENT"] + + self.assertEqual(len(events), 2) + self.assertIn(session.remote_instructions, events[0].get('description')) + self.assertIn("Onsite tool: https://onsite.example.com", events[0].get('description')) + self.assertIn("Meetecho: https://meetecho.example.com", events[0].get('description')) + self.assertIn(f"Agenda {session.agenda().get_href()}", events[0].get('description')) + session_materials_url = settings.IDTRACKER_BASE_URL + urlreverse( + 'ietf.meeting.views.session_details', + kwargs=dict(num=meeting.number, acronym=session.group.acronym) + ) + self.assertIn(f"Session materials: {session_materials_url}", events[0].get('description')) + self.assertIn( urlreverse( 'ietf.meeting.views.session_details', kwargs=dict(num=meeting.number, acronym=session.group.acronym)), - msg_prefix='ical should contain link to meeting materials page for session') + events[0].get('description')) + self.assertEqual( + session_materials_url, + events[0].get('url') + ) + self.assertContains(r, f"LOCATION:{slot.location.name}") + # Floor Plan r = self.client.get(urlreverse('floor-plan', kwargs=dict(num=meeting.number))) self.assertEqual(r.status_code, 200) @@ -1049,32 +1058,36 @@ def test_group_ical(self): s1 = Session.objects.filter(meeting=meeting, group__acronym="mars").first() a1 = s1.official_timeslotassignment() t1 = a1.timeslot + # Create an extra session t2 = TimeSlotFactory.create( meeting=meeting, - time=meeting.tz().localize( + time=pytz.utc.localize( datetime.datetime.combine(meeting.date, datetime.time(11, 30)) ) ) + s2 = SessionFactory.create(meeting=meeting, group=s1.group, add_to_schedule=False) SchedTimeSessAssignment.objects.create(timeslot=t2, session=s2, schedule=meeting.schedule) - # + url = urlreverse('ietf.meeting.views.agenda_ical', kwargs={'num':meeting.number, 'acronym':s1.group.acronym, }) r = self.client.get(url) assert_ical_response_is_valid(self, r, expected_event_summaries=['mars - Martian Special Interest Group'], expected_event_count=2) - self.assertContains(r, t1.local_start_time().strftime('%Y%m%dT%H%M%S')) - self.assertContains(r, t2.local_start_time().strftime('%Y%m%dT%H%M%S')) - # + self.assertContains(r, f"DTSTART:{t1.time.strftime('%Y%m%dT%H%M%SZ')}") + self.assertContains(r, f"DTEND:{(t1.time + t1.duration).strftime('%Y%m%dT%H%M%SZ')}") + self.assertContains(r, f"DTSTART:{t2.time.strftime('%Y%m%dT%H%M%SZ')}") + self.assertContains(r, f"DTEND:{(t2.time + t2.duration).strftime('%Y%m%dT%H%M%SZ')}") + url = urlreverse('ietf.meeting.views.agenda_ical', kwargs={'num':meeting.number, 'session_id':s1.id, }) r = self.client.get(url) assert_ical_response_is_valid(self, r, expected_event_summaries=['mars - Martian Special Interest Group'], expected_event_count=1) - self.assertContains(r, t1.local_start_time().strftime('%Y%m%dT%H%M%S')) - self.assertNotContains(r, t2.local_start_time().strftime('%Y%m%dT%H%M%S')) + self.assertContains(r, f"DTSTART:{t1.time.strftime('%Y%m%dT%H%M%SZ')}") + self.assertNotContains(r, f"DTSTART:{t2.time.strftime('%Y%m%dT%H%M%SZ')}") def test_parse_agenda_filter_params(self): def _r(show=(), hide=(), showtypes=(), hidetypes=()): diff --git a/ietf/meeting/views.py b/ietf/meeting/views.py index 85eda5a8f4..7fa3d21259 100644 --- a/ietf/meeting/views.py +++ b/ietf/meeting/views.py @@ -118,6 +118,9 @@ UploadAgendaForm, UploadBlueSheetForm, UploadMinutesForm, UploadSlidesForm, UploadNarrativeMinutesForm) +from icalendar import Calendar, Event +from ietf.doc.templatetags.ietf_filters import absurl + request_summary_exclude_group_types = ['team'] @@ -137,6 +140,10 @@ def send_interim_change_notice(request, meeting): message.related_groups.add(group) send_mail_message(request, message) +def parse_ical_line_endings(ical): + """Parse icalendar line endings to ensure they are RFC 5545 compliant""" + return re.sub(r'\r(?!\n)|(?=20.1.0 hashids>=1.3.1 html2text>=2020.1.16 # Used only to clean comment field of secr/sreq html5lib>=1.1 # Only used in tests +icalendar>=5.0.0 inflect>= 6.0.2 jsonfield>=3.1.0,<3.2.0 # 3.2.0 needs py3.10; deprecated-replace with Django JSONField jsonschema[format]>=4.2.1 From 666e9c53b45bc2eeda80ccbea66abeaa01df9830 Mon Sep 17 00:00:00 2001 From: rjsparks <10996692+rjsparks@users.noreply.github.com> Date: Thu, 7 Aug 2025 15:27:02 +0000 Subject: [PATCH 172/405] ci: update base image target version to 20250807T1514 --- dev/build/Dockerfile | 2 +- dev/build/TARGET_BASE | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/dev/build/Dockerfile b/dev/build/Dockerfile index 3d5520babe..0ecf9566ef 100644 --- a/dev/build/Dockerfile +++ b/dev/build/Dockerfile @@ -1,4 +1,4 @@ -FROM ghcr.io/ietf-tools/datatracker-app-base:20250805T1738 +FROM ghcr.io/ietf-tools/datatracker-app-base:20250807T1514 LABEL maintainer="IETF Tools Team " ENV DEBIAN_FRONTEND=noninteractive diff --git a/dev/build/TARGET_BASE b/dev/build/TARGET_BASE index 90d83abf03..327fb48da4 100644 --- a/dev/build/TARGET_BASE +++ b/dev/build/TARGET_BASE @@ -1 +1 @@ -20250805T1738 +20250807T1514 From a1a19e5b6cef8ddc8b6443b2e2844ee20be988fa Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Mon, 11 Aug 2025 14:00:51 -0300 Subject: [PATCH 173/405] fix: tz-aware tastypie datetimes (#9330) * fix: tz-aware tastypie datetimes * chore: comment * chore: clarify comment --- ietf/api/__init__.py | 33 +++++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/ietf/api/__init__.py b/ietf/api/__init__.py index d70866083e..e236347975 100644 --- a/ietf/api/__init__.py +++ b/ietf/api/__init__.py @@ -4,6 +4,7 @@ import datetime import re +import sys from urllib.parse import urlencode @@ -25,6 +26,9 @@ OMITTED_APPS_APIS = ["ietf.status"] +# Pre-py3.11, fromisoformat() does not handle Z or +HH tz offsets +HAVE_BROKEN_FROMISOFORMAT = sys.version_info < (3, 11, 0, "", 0) + def populate_api_list(): _module_dict = globals() for app_config in django_apps.get_app_configs(): @@ -58,6 +62,35 @@ def generate_cache_key(self, *args, **kwargs): # Use a list plus a ``.join()`` because it's faster than concatenation. return "%s:%s:%s:%s" % (self._meta.api_name, self._meta.resource_name, ':'.join(args), smooshed) + def _z_aware_fromisoformat(self, value): + """datetime.datetie.fromisoformat replacement that works with python < 3.11""" + if HAVE_BROKEN_FROMISOFORMAT: + if value.upper().endswith("Z"): + value = value[:-1] + "+00:00" # Z -> UTC + elif re.match(r"[+-][0-9][0-9]$", value[-3:]): + value = value + ":00" # -04 -> -04:00 + return value + + def filter_value_to_python( + self, value, field_name, filters, filter_expr, filter_type + ): + py_value = super().filter_value_to_python( + value, field_name, filters, filter_expr, filter_type + ) + if isinstance( + self.fields[field_name], tastypie.fields.DateTimeField + ) and isinstance(py_value, str): + # Ensure datetime values are TZ-aware, using UTC by default + try: + dt = self._z_aware_fromisoformat(py_value) + except ValueError: + pass # let tastypie deal with the original value + else: + if dt.tzinfo is None: + dt = dt.replace(tzinfo=datetime.timezone.utc) + py_value = dt.isoformat() + return py_value + TIMEDELTA_REGEX = re.compile(r'^(?P\d+d)?\s?(?P\d+h)?\s?(?P\d+m)?\s?(?P\d+s?)$') From 369e38f0b2ffe564340db6b6bcd64124d3190aa8 Mon Sep 17 00:00:00 2001 From: Eric Vyncke Date: Mon, 11 Aug 2025 19:05:59 +0200 Subject: [PATCH 174/405] Less shaded dark theme colors (#9335) --- ietf/static/css/ietf.scss | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/ietf/static/css/ietf.scss b/ietf/static/css/ietf.scss index 014213c3e5..df973863d5 100644 --- a/ietf/static/css/ietf.scss +++ b/ietf/static/css/ietf.scss @@ -510,23 +510,23 @@ td.position-empty { tr.position-notready-row, tr.position-discuss-row, tr.position-block-row { - background-color: shade-color($color-discuss, 85%); + background-color: shade-color($color-discuss, 65%); } tr.position-yes-row { - background-color: shade-color($color-yes, 75%); + background-color: shade-color($color-yes, 65%); } tr.position-noobj-row { - background-color: shade-color($color-noobj, 75%); + background-color: shade-color($color-noobj, 65%); } tr.position-abstain-row { - background-color: shade-color($color-abstain, 85%); + background-color: shade-color($color-abstain, 65%); } tr.position-recuse-row { - background-color: shade-color($color-recuse, 85%); + background-color: shade-color($color-recuse, 65%); } } From 37e3ffc8c41ad5e15464af4d242b2d6ee09c3775 Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Mon, 11 Aug 2025 18:37:43 -0500 Subject: [PATCH 175/405] fix: allow RFC Editor to add comments to RFC history (#9344) --- ietf/doc/tests.py | 34 ++++++++++++++++++++++++---------- ietf/doc/views_doc.py | 2 +- 2 files changed, 25 insertions(+), 11 deletions(-) diff --git a/ietf/doc/tests.py b/ietf/doc/tests.py index d3fba03bcc..fa8c7fa4fc 100644 --- a/ietf/doc/tests.py +++ b/ietf/doc/tests.py @@ -66,7 +66,7 @@ from ietf.name.models import SessionStatusName, BallotPositionName, DocTypeName, RoleName from ietf.person.models import Person from ietf.person.factories import PersonFactory, EmailFactory -from ietf.utils.mail import outbox, empty_outbox +from ietf.utils.mail import get_payload_text, outbox, empty_outbox from ietf.utils.test_utils import login_testing_unauthorized, unicontent from ietf.utils.test_utils import TestCase from ietf.utils.text import normalize_text @@ -2172,20 +2172,19 @@ def test_trailing_hypen_digit_name_bibxml(self): class AddCommentTestCase(TestCase): def test_add_comment(self): - draft = WgDraftFactory(name='draft-ietf-mars-test',group__acronym='mars') - url = urlreverse('ietf.doc.views_doc.add_comment', kwargs=dict(name=draft.name)) + draft = WgDraftFactory(name="draft-ietf-mars-test", group__acronym="mars") + url = urlreverse("ietf.doc.views_doc.add_comment", kwargs=dict(name=draft.name)) login_testing_unauthorized(self, "secretary", url) # normal get r = self.client.get(url) self.assertEqual(r.status_code, 200) q = PyQuery(unicontent(r)) - self.assertEqual(len(q('form textarea[name=comment]')), 1) + self.assertEqual(len(q("form textarea[name=comment]")), 1) - # request resurrect events_before = draft.docevent_set.count() mailbox_before = len(outbox) - + r = self.client.post(url, dict(comment="This is a test.")) self.assertEqual(r.status_code, 302) @@ -2193,9 +2192,9 @@ def test_add_comment(self): self.assertEqual("This is a test.", draft.latest_event().desc) self.assertEqual("added_comment", draft.latest_event().type) self.assertEqual(len(outbox), mailbox_before + 1) - self.assertIn("Comment added", outbox[-1]['Subject']) - self.assertIn(draft.name, outbox[-1]['Subject']) - self.assertIn('draft-ietf-mars-test@', outbox[-1]['To']) + self.assertIn("Comment added", outbox[-1]["Subject"]) + self.assertIn(draft.name, outbox[-1]["Subject"]) + self.assertIn("draft-ietf-mars-test@", outbox[-1]["To"]) # Make sure we can also do it as IANA self.client.login(username="iana", password="iana+password") @@ -2204,7 +2203,22 @@ def test_add_comment(self): r = self.client.get(url) self.assertEqual(r.status_code, 200) q = PyQuery(unicontent(r)) - self.assertEqual(len(q('form textarea[name=comment]')), 1) + self.assertEqual(len(q("form textarea[name=comment]")), 1) + + empty_outbox() + rfc = WgRfcFactory() + self.client.login(username="rfc", password="rfc+password") + url = urlreverse("ietf.doc.views_doc.add_comment", kwargs=dict(name=rfc.name)) + r = self.client.post( + url, dict(comment="This is an RFC Editor comment on an RFC.") + ) + self.assertEqual(r.status_code, 302) + + self.assertEqual( + "This is an RFC Editor comment on an RFC.", rfc.latest_event().desc + ) + self.assertEqual(len(outbox), 1) + self.assertIn("This is an RFC Editor comment on an RFC.", get_payload_text(outbox[0])) class TemplateTagTest(TestCase): diff --git a/ietf/doc/views_doc.py b/ietf/doc/views_doc.py index 591a72d907..4a20db3c89 100644 --- a/ietf/doc/views_doc.py +++ b/ietf/doc/views_doc.py @@ -1687,7 +1687,7 @@ def add_comment(request, name): group__acronym=doc.group.acronym, person__user=request.user))) else: - can_add_comment = has_role(request.user, ("Area Director", "Secretariat", "IRTF Chair")) + can_add_comment = has_role(request.user, ("Area Director", "Secretariat", "IRTF Chair", "RFC Editor")) if not can_add_comment: # The user is a chair or secretary, but not for this WG or RG permission_denied(request, "You need to be a chair or secretary of this group to add a comment.") From 00264a4cb0f43df07e39beda8f74e1aa36814e71 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Thu, 14 Aug 2025 11:18:56 -0300 Subject: [PATCH 176/405] ci: ensure blobdbs exist for sandboxen --- dev/deploy-to-container/start.sh | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/dev/deploy-to-container/start.sh b/dev/deploy-to-container/start.sh index 2c83d6970c..5d976f80ea 100644 --- a/dev/deploy-to-container/start.sh +++ b/dev/deploy-to-container/start.sh @@ -35,6 +35,18 @@ echo "Running Datatracker checks..." # Migrate, adjusting to what the current state of the underlying database might be: +# On production, the blobdb tables are in a separate database. Manipulate migration +# history to ensure that they're created for the sandbox environment that runs it +# all from a single database. +echo "Ensuring blobdb relations exist..." +/usr/local/bin/python ./ietf/manage.py migrate --settings=settings_local --fake blobdb zero +if ! /usr/local/bin/python ./ietf/manage.py migrate --settings=settings_local blobdb; then + # If we are restarting a sandbox, the migration may already have run and re-running + # it will fail. Assume that happened and fake it. + /usr/local/bin/python ./ietf/manage.py migrate --settings=settings_local --fake blobdb +fi + +# Now run the migrations for real echo "Running Datatracker migrations..." /usr/local/bin/python ./ietf/manage.py migrate --settings=settings_local From f13a1a55774c9605b79e2db6f41dbe0ceb0d400a Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Fri, 15 Aug 2025 15:55:08 -0400 Subject: [PATCH 177/405] ci: Update db.Dockerfile to postgres 17 --- docker/db.Dockerfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docker/db.Dockerfile b/docker/db.Dockerfile index 0b57a80b70..48ab298780 100644 --- a/docker/db.Dockerfile +++ b/docker/db.Dockerfile @@ -1,7 +1,7 @@ # ===================== # --- Builder Stage --- # ===================== -FROM postgres:16 AS builder +FROM postgres:17 AS builder ENV POSTGRES_PASSWORD=hk2j22sfiv ENV POSTGRES_USER=django @@ -19,7 +19,7 @@ RUN ["/usr/local/bin/docker-entrypoint.sh", "postgres"] # =================== # --- Final Image --- # =================== -FROM postgres:16 +FROM postgres:17 LABEL maintainer="IETF Tools Team " COPY --from=builder /data $PGDATA From 0694863763509369228849b876b99666aaa70092 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Fri, 15 Aug 2025 17:05:40 -0300 Subject: [PATCH 178/405] refactor: jsonfield pkg->Django JSONField (#9284) * refactor: drop jsonfield pkg in code It still exists in migrations * refactor: callable JSONField defaults * chore: migrations * chore: unused import * chore: fix nulls in authors; renumber migrations * fix: rename IETFJSONField to fix migration * chore: update names.json JSONField representation has changed * fix: missed renaming; hide dead code from coverage --- .../0008_alter_group_used_roles_and_more.py | 107 ++ ietf/group/models.py | 55 +- ietf/name/fixtures/names.json | 1202 +++++++++++++---- ...alter_sessionpurposename_timeslot_types.py | 27 + ietf/name/models.py | 6 +- ...icalperson_pronouns_selectable_and_more.py | 34 + ietf/person/models.py | 3 +- ...ion_authors_alter_submissioncheck_items.py | 46 + ietf/submit/models.py | 7 +- ietf/utils/db.py | 26 +- ietf/utils/fields.py | 19 +- 11 files changed, 1259 insertions(+), 273 deletions(-) create mode 100644 ietf/group/migrations/0008_alter_group_used_roles_and_more.py create mode 100644 ietf/name/migrations/0019_alter_sessionpurposename_timeslot_types.py create mode 100644 ietf/person/migrations/0005_alter_historicalperson_pronouns_selectable_and_more.py create mode 100644 ietf/submit/migrations/0003_alter_submission_authors_alter_submissioncheck_items.py diff --git a/ietf/group/migrations/0008_alter_group_used_roles_and_more.py b/ietf/group/migrations/0008_alter_group_used_roles_and_more.py new file mode 100644 index 0000000000..28f345df00 --- /dev/null +++ b/ietf/group/migrations/0008_alter_group_used_roles_and_more.py @@ -0,0 +1,107 @@ +# Generated by Django 4.2.23 on 2025-08-15 16:46 + +from django.db import migrations, models +import ietf.group.models +import ietf.name.models +import ietf.utils.db +import ietf.utils.validators + + +class Migration(migrations.Migration): + + dependencies = [ + ("group", "0007_used_roles"), + ] + + operations = [ + migrations.AlterField( + model_name="group", + name="used_roles", + field=models.JSONField( + blank=True, + default=list, + help_text="Leave an empty list to get the group_type's default used roles", + max_length=256, + ), + ), + migrations.AlterField( + model_name="groupfeatures", + name="admin_roles", + field=ietf.utils.db.EmptyAwareJSONField( + default=ietf.group.models.default_admin_roles, max_length=64 + ), + ), + migrations.AlterField( + model_name="groupfeatures", + name="default_used_roles", + field=ietf.utils.db.EmptyAwareJSONField(default=list, max_length=256), + ), + migrations.AlterField( + model_name="groupfeatures", + name="docman_roles", + field=ietf.utils.db.EmptyAwareJSONField( + default=ietf.group.models.default_docman_roles, max_length=128 + ), + ), + migrations.AlterField( + model_name="groupfeatures", + name="groupman_authroles", + field=ietf.utils.db.EmptyAwareJSONField( + default=ietf.group.models.default_groupman_authroles, max_length=128 + ), + ), + migrations.AlterField( + model_name="groupfeatures", + name="groupman_roles", + field=ietf.utils.db.EmptyAwareJSONField( + default=ietf.group.models.default_groupman_roles, max_length=128 + ), + ), + migrations.AlterField( + model_name="groupfeatures", + name="material_types", + field=ietf.utils.db.EmptyAwareJSONField( + default=ietf.group.models.default_material_types, max_length=64 + ), + ), + migrations.AlterField( + model_name="groupfeatures", + name="matman_roles", + field=ietf.utils.db.EmptyAwareJSONField( + default=ietf.group.models.default_matman_roles, max_length=128 + ), + ), + migrations.AlterField( + model_name="groupfeatures", + name="role_order", + field=ietf.utils.db.EmptyAwareJSONField( + default=ietf.group.models.default_role_order, + help_text="The order in which roles are shown, for instance on photo pages. Enter valid JSON.", + max_length=128, + ), + ), + migrations.AlterField( + model_name="groupfeatures", + name="session_purposes", + field=ietf.utils.db.EmptyAwareJSONField( + default=list, + help_text="Allowed session purposes for this group type", + max_length=256, + validators=[ + ietf.utils.validators.JSONForeignKeyListValidator( + ietf.name.models.SessionPurposeName + ) + ], + ), + ), + migrations.AlterField( + model_name="grouphistory", + name="used_roles", + field=models.JSONField( + blank=True, + default=list, + help_text="Leave an empty list to get the group_type's default used roles", + max_length=256, + ), + ), + ] diff --git a/ietf/group/models.py b/ietf/group/models.py index 52549e8cc1..608dcc86b9 100644 --- a/ietf/group/models.py +++ b/ietf/group/models.py @@ -3,7 +3,6 @@ import email.utils -import jsonfield import os import re @@ -21,7 +20,7 @@ AgendaTypeName, AgendaFilterTypeName, ExtResourceName, SessionPurposeName, AppealArtifactTypeName ) from ietf.person.models import Email, Person -from ietf.utils.db import IETFJSONField +from ietf.utils.db import EmptyAwareJSONField from ietf.utils.mail import formataddr, send_mail_text from ietf.utils import log from ietf.utils.models import ForeignKey, OneToOneField @@ -46,7 +45,7 @@ class GroupInfo(models.Model): unused_states = models.ManyToManyField('doc.State', help_text="Document states that have been disabled for the group.", blank=True) unused_tags = models.ManyToManyField(DocTagName, help_text="Document tags that have been disabled for the group.", blank=True) - used_roles = jsonfield.JSONField(max_length=256, blank=True, default=[], help_text="Leave an empty list to get the group_type's default used roles") + used_roles = models.JSONField(max_length=256, blank=True, default=list, help_text="Leave an empty list to get the group_type's default used roles") uses_milestone_dates = models.BooleanField(default=True) @@ -235,6 +234,36 @@ def chat_archive_url(self): ) +# JSONFields need callable defaults that work with migrations to avoid sharing +# data structures between instances. These helpers provide that. +def default_material_types(): + return ["slides"] + + +def default_admin_roles(): + return ["chair"] + + +def default_docman_roles(): + return ["ad", "chair", "delegate", "secr"] + + +def default_groupman_roles(): + return ["ad", "chair"] + + +def default_groupman_authroles(): + return ["Secretariat"] + + +def default_matman_roles(): + return ["ad", "chair", "delegate", "secr"] + + +def default_role_order(): + return ["chair", "secr", "member"] + + class GroupFeatures(models.Model): type = OneToOneField(GroupTypeName, primary_key=True, null=False, related_name='features') #history = HistoricalRecords() @@ -268,16 +297,16 @@ class GroupFeatures(models.Model): agenda_type = models.ForeignKey(AgendaTypeName, null=True, default="ietf", on_delete=CASCADE) about_page = models.CharField(max_length=64, blank=False, default="ietf.group.views.group_about" ) default_tab = models.CharField(max_length=64, blank=False, default="ietf.group.views.group_about" ) - material_types = IETFJSONField(max_length=64, accepted_empty_values=[[], {}], blank=False, default=["slides"]) - default_used_roles = IETFJSONField(max_length=256, accepted_empty_values=[[], {}], blank=False, default=[]) - admin_roles = IETFJSONField(max_length=64, accepted_empty_values=[[], {}], blank=False, default=["chair"]) # Trac Admin - docman_roles = IETFJSONField(max_length=128, accepted_empty_values=[[], {}], blank=False, default=["ad","chair","delegate","secr"]) - groupman_roles = IETFJSONField(max_length=128, accepted_empty_values=[[], {}], blank=False, default=["ad","chair",]) - groupman_authroles = IETFJSONField(max_length=128, accepted_empty_values=[[], {}], blank=False, default=["Secretariat",]) - matman_roles = IETFJSONField(max_length=128, accepted_empty_values=[[], {}], blank=False, default=["ad","chair","delegate","secr"]) - role_order = IETFJSONField(max_length=128, accepted_empty_values=[[], {}], blank=False, default=["chair","secr","member"], - help_text="The order in which roles are shown, for instance on photo pages. Enter valid JSON.") - session_purposes = IETFJSONField(max_length=256, accepted_empty_values=[[], {}], blank=False, default=[], + material_types = EmptyAwareJSONField(max_length=64, accepted_empty_values=[[], {}], blank=False, default=default_material_types) + default_used_roles = EmptyAwareJSONField(max_length=256, accepted_empty_values=[[], {}], blank=False, default=list) + admin_roles = EmptyAwareJSONField(max_length=64, accepted_empty_values=[[], {}], blank=False, default=default_admin_roles) # Trac Admin + docman_roles = EmptyAwareJSONField(max_length=128, accepted_empty_values=[[], {}], blank=False, default=default_docman_roles) + groupman_roles = EmptyAwareJSONField(max_length=128, accepted_empty_values=[[], {}], blank=False, default=default_groupman_roles) + groupman_authroles = EmptyAwareJSONField(max_length=128, accepted_empty_values=[[], {}], blank=False, default=default_groupman_authroles) + matman_roles = EmptyAwareJSONField(max_length=128, accepted_empty_values=[[], {}], blank=False, default=default_matman_roles) + role_order = EmptyAwareJSONField(max_length=128, accepted_empty_values=[[], {}], blank=False, default=default_role_order, + help_text="The order in which roles are shown, for instance on photo pages. Enter valid JSON.") + session_purposes = EmptyAwareJSONField(max_length=256, accepted_empty_values=[[], {}], blank=False, default=list, help_text="Allowed session purposes for this group type", validators=[JSONForeignKeyListValidator(SessionPurposeName)]) diff --git a/ietf/name/fixtures/names.json b/ietf/name/fixtures/names.json index ebdda1a1fa..0724cbb4b5 100644 --- a/ietf/name/fixtures/names.json +++ b/ietf/name/fixtures/names.json @@ -2630,6 +2630,19 @@ "model": "doc.state", "pk": 181 }, + { + "fields": { + "desc": "The statement has been marked historic", + "name": "Historic", + "next_states": [], + "order": 0, + "slug": "historic", + "type": "statement", + "used": true + }, + "model": "doc.state", + "pk": 182 + }, { "fields": { "label": "State" @@ -2872,7 +2885,9 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": false, - "admin_roles": "[\n \"chair\"\n]", + "admin_roles": [ + "chair" + ], "agenda_filter_type": "special", "agenda_type": "ietf", "create_wiki": true, @@ -2880,10 +2895,24 @@ "customize_workflow": false, "default_parent": "", "default_tab": "ietf.group.views.group_about", - "default_used_roles": "[\n \"matman\",\n \"ad\",\n \"chair\",\n \"lead\",\n \"delegate\"\n]", - "docman_roles": "[\n \"chair\"\n]", - "groupman_authroles": "[\n \"Secretariat\"\n]", - "groupman_roles": "[\n \"chair\",\n \"lead\",\n \"delegate\"\n]", + "default_used_roles": [ + "matman", + "ad", + "chair", + "lead", + "delegate" + ], + "docman_roles": [ + "chair" + ], + "groupman_authroles": [ + "Secretariat" + ], + "groupman_roles": [ + "chair", + "lead", + "delegate" + ], "has_chartering_process": false, "has_default_chat": true, "has_documents": false, @@ -2893,15 +2922,29 @@ "has_reviews": false, "has_session_materials": true, "is_schedulable": true, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[\n \"chair\",\n \"lead\",\n \"delegate\",\n \"matman\"\n]", + "material_types": [ + "slides" + ], + "matman_roles": [ + "chair", + "lead", + "delegate", + "matman" + ], "need_parent": false, "parent_types": [ "ietf" ], "req_subm_approval": true, - "role_order": "[\n \"chair\",\n \"lead\",\n \"delegate\",\n \"matman\"\n]", - "session_purposes": "[\n \"presentation\"\n]", + "role_order": [ + "chair", + "lead", + "delegate", + "matman" + ], + "session_purposes": [ + "presentation" + ], "show_on_agenda": true }, "model": "group.groupfeatures", @@ -2911,7 +2954,9 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": false, - "admin_roles": "[\n \"chair\"\n]", + "admin_roles": [ + "chair" + ], "agenda_filter_type": "heading", "agenda_type": "ietf", "create_wiki": false, @@ -2919,10 +2964,19 @@ "customize_workflow": false, "default_parent": "", "default_tab": "ietf.group.views.group_about", - "default_used_roles": "[\n \"member\",\n \"chair\"\n]", - "docman_roles": "[\n \"chair\"\n]", - "groupman_authroles": "[\n \"Secretariat\"\n]", - "groupman_roles": "[\n \"chair\"\n]", + "default_used_roles": [ + "member", + "chair" + ], + "docman_roles": [ + "chair" + ], + "groupman_authroles": [ + "Secretariat" + ], + "groupman_roles": [ + "chair" + ], "has_chartering_process": false, "has_default_chat": false, "has_documents": false, @@ -2932,13 +2986,22 @@ "has_reviews": false, "has_session_materials": false, "is_schedulable": true, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[\n \"chair\"\n]", + "material_types": [ + "slides" + ], + "matman_roles": [ + "chair" + ], "need_parent": false, "parent_types": [], "req_subm_approval": false, - "role_order": "[\n \"chair\"\n]", - "session_purposes": "[\n \"closed_meeting\",\n \"officehours\"\n]", + "role_order": [ + "chair" + ], + "session_purposes": [ + "closed_meeting", + "officehours" + ], "show_on_agenda": true }, "model": "group.groupfeatures", @@ -2948,7 +3011,9 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": true, - "admin_roles": "[\n \"chair\"\n]", + "admin_roles": [ + "chair" + ], "agenda_filter_type": "normal", "agenda_type": "ietf", "create_wiki": true, @@ -2956,10 +3021,26 @@ "customize_workflow": false, "default_parent": "", "default_tab": "ietf.group.views.group_about", - "default_used_roles": "[\n \"ad\",\n \"chair\",\n \"secr\",\n \"delegate\"\n]", - "docman_roles": "[\n \"chair\",\n \"delegate\",\n \"secr\"\n]", - "groupman_authroles": "[\n \"Secretariat\",\n \"Area Director\"\n]", - "groupman_roles": "[\n \"ad\",\n \"chair\",\n \"delegate\"\n]", + "default_used_roles": [ + "ad", + "chair", + "secr", + "delegate" + ], + "docman_roles": [ + "chair", + "delegate", + "secr" + ], + "groupman_authroles": [ + "Secretariat", + "Area Director" + ], + "groupman_roles": [ + "ad", + "chair", + "delegate" + ], "has_chartering_process": false, "has_default_chat": false, "has_documents": true, @@ -2969,16 +3050,28 @@ "has_reviews": false, "has_session_materials": true, "is_schedulable": true, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[\n \"ad\",\n \"chair\",\n \"delegate\",\n \"secr\"\n]", + "material_types": [ + "slides" + ], + "matman_roles": [ + "ad", + "chair", + "delegate", + "secr" + ], "need_parent": false, "parent_types": [ "area", "ietf" ], "req_subm_approval": true, - "role_order": "[\n \"chair\",\n \"secr\"\n]", - "session_purposes": "[\n \"regular\"\n]", + "role_order": [ + "chair", + "secr" + ], + "session_purposes": [ + "regular" + ], "show_on_agenda": true }, "model": "group.groupfeatures", @@ -2988,7 +3081,9 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": false, - "admin_roles": "[\n \"ad\"\n]", + "admin_roles": [ + "ad" + ], "agenda_filter_type": "heading", "agenda_type": "ietf", "create_wiki": true, @@ -2996,10 +3091,22 @@ "customize_workflow": false, "default_parent": "", "default_tab": "ietf.group.views.group_about", - "default_used_roles": "[\n \"ad\",\n \"liaison_contact\",\n \"liaison_cc_contact\"\n]", - "docman_roles": "[\n \"ad\",\n \"delegate\",\n \"secr\"\n]", - "groupman_authroles": "[\n \"Secretariat\"\n]", - "groupman_roles": "[\n \"ad\"\n]", + "default_used_roles": [ + "ad", + "liaison_contact", + "liaison_cc_contact" + ], + "docman_roles": [ + "ad", + "delegate", + "secr" + ], + "groupman_authroles": [ + "Secretariat" + ], + "groupman_roles": [ + "ad" + ], "has_chartering_process": false, "has_default_chat": false, "has_documents": false, @@ -3009,15 +3116,27 @@ "has_reviews": false, "has_session_materials": false, "is_schedulable": false, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[\n \"ad\",\n \"chair\",\n \"delegate\",\n \"secr\"\n]", + "material_types": [ + "slides" + ], + "matman_roles": [ + "ad", + "chair", + "delegate", + "secr" + ], "need_parent": true, "parent_types": [ "ietf" ], "req_subm_approval": true, - "role_order": "[\n \"chair\",\n \"secr\"\n]", - "session_purposes": "[\n \"regular\"\n]", + "role_order": [ + "chair", + "secr" + ], + "session_purposes": [ + "regular" + ], "show_on_agenda": false }, "model": "group.groupfeatures", @@ -3027,7 +3146,10 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": false, - "admin_roles": "[\n \"chair\",\n \"secr\"\n]", + "admin_roles": [ + "chair", + "secr" + ], "agenda_filter_type": "special", "agenda_type": "ad", "create_wiki": true, @@ -3035,10 +3157,25 @@ "customize_workflow": false, "default_parent": "", "default_tab": "ietf.group.views.group_about", - "default_used_roles": "[\n \"ad\",\n \"chair\",\n \"reviewer\",\n \"secr\",\n \"delegate\"\n]", - "docman_roles": "[\n \"chair\"\n]", - "groupman_authroles": "[\n \"Secretariat\"\n]", - "groupman_roles": "[\n \"ad\",\n \"secr\",\n \"delegate\",\n \"chair\"\n]", + "default_used_roles": [ + "ad", + "chair", + "reviewer", + "secr", + "delegate" + ], + "docman_roles": [ + "chair" + ], + "groupman_authroles": [ + "Secretariat" + ], + "groupman_roles": [ + "ad", + "secr", + "delegate", + "chair" + ], "has_chartering_process": false, "has_default_chat": false, "has_documents": false, @@ -3048,15 +3185,31 @@ "has_reviews": false, "has_session_materials": true, "is_schedulable": false, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[\n \"ad\",\n \"chair\",\n \"delegate\",\n \"secr\"\n]", + "material_types": [ + "slides" + ], + "matman_roles": [ + "ad", + "chair", + "delegate", + "secr" + ], "need_parent": true, "parent_types": [ "area" ], "req_subm_approval": true, - "role_order": "[\n \"chair\",\n \"secr\"\n]", - "session_purposes": "[\n \"open_meeting\",\n \"presentation\",\n \"regular\",\n \"social\",\n \"tutorial\"\n]", + "role_order": [ + "chair", + "secr" + ], + "session_purposes": [ + "open_meeting", + "presentation", + "regular", + "social", + "tutorial" + ], "show_on_agenda": false }, "model": "group.groupfeatures", @@ -3066,7 +3219,9 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": false, - "admin_roles": "[\n \"chair\"\n]", + "admin_roles": [ + "chair" + ], "agenda_filter_type": "normal", "agenda_type": "ietf", "create_wiki": false, @@ -3074,10 +3229,19 @@ "customize_workflow": false, "default_parent": "", "default_tab": "ietf.group.views.group_about", - "default_used_roles": "[\n \"chair\",\n \"member\"\n]", - "docman_roles": "[\n \"chair\"\n]", - "groupman_authroles": "[\n \"Secretariat\"\n]", - "groupman_roles": "[\n \"chair\"\n]", + "default_used_roles": [ + "chair", + "member" + ], + "docman_roles": [ + "chair" + ], + "groupman_authroles": [ + "Secretariat" + ], + "groupman_roles": [ + "chair" + ], "has_chartering_process": false, "has_default_chat": true, "has_documents": false, @@ -3087,13 +3251,23 @@ "has_reviews": false, "has_session_materials": true, "is_schedulable": true, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[\n \"chair\"\n]", + "material_types": [ + "slides" + ], + "matman_roles": [ + "chair" + ], "need_parent": false, "parent_types": [], "req_subm_approval": false, - "role_order": "[\n \"chair\",\n \"member\"\n]", - "session_purposes": "[\n \"officehours\",\n \"regular\"\n]", + "role_order": [ + "chair", + "member" + ], + "session_purposes": [ + "officehours", + "regular" + ], "show_on_agenda": true }, "model": "group.groupfeatures", @@ -3103,7 +3277,9 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": true, - "admin_roles": "[\n \"chair\"\n]", + "admin_roles": [ + "chair" + ], "agenda_filter_type": "normal", "agenda_type": "ietf", "create_wiki": false, @@ -3111,10 +3287,18 @@ "customize_workflow": true, "default_parent": "", "default_tab": "ietf.group.views.group_documents", - "default_used_roles": "[\n \"chair\"\n]", - "docman_roles": "[\n \"chair\"\n]", - "groupman_authroles": "[\n \"Secretariat\"\n]", - "groupman_roles": "[\n \"chair\"\n]", + "default_used_roles": [ + "chair" + ], + "docman_roles": [ + "chair" + ], + "groupman_authroles": [ + "Secretariat" + ], + "groupman_roles": [ + "chair" + ], "has_chartering_process": false, "has_default_chat": true, "has_documents": true, @@ -3124,15 +3308,23 @@ "has_reviews": false, "has_session_materials": true, "is_schedulable": true, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[\n \"chair\"\n]", + "material_types": [ + "slides" + ], + "matman_roles": [ + "chair" + ], "need_parent": false, "parent_types": [ "rfcedtyp" ], "req_subm_approval": true, - "role_order": "[\n \"chair\"\n]", - "session_purposes": "[\n \"regular\"\n]", + "role_order": [ + "chair" + ], + "session_purposes": [ + "regular" + ], "show_on_agenda": true }, "model": "group.groupfeatures", @@ -3142,7 +3334,9 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": false, - "admin_roles": "[\n \"chair\"\n]", + "admin_roles": [ + "chair" + ], "agenda_filter_type": "normal", "agenda_type": "ietf", "create_wiki": false, @@ -3150,10 +3344,16 @@ "customize_workflow": false, "default_parent": "", "default_tab": "ietf.group.views.group_about", - "default_used_roles": "[\n \"chair\"\n]", - "docman_roles": "[\n \"chair\"\n]", - "groupman_authroles": "[\n \"Secretariat\"\n]", - "groupman_roles": "[]", + "default_used_roles": [ + "chair" + ], + "docman_roles": [ + "chair" + ], + "groupman_authroles": [ + "Secretariat" + ], + "groupman_roles": [], "has_chartering_process": false, "has_default_chat": false, "has_documents": false, @@ -3163,15 +3363,26 @@ "has_reviews": false, "has_session_materials": false, "is_schedulable": false, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[\n \"chair\",\n \"delegate\"\n]", + "material_types": [ + "slides" + ], + "matman_roles": [ + "chair", + "delegate" + ], "need_parent": false, "parent_types": [ "ietf" ], "req_subm_approval": true, - "role_order": "[\n \"chair\",\n \"secr\"\n]", - "session_purposes": "[\n \"closed_meeting\",\n \"regular\"\n]", + "role_order": [ + "chair", + "secr" + ], + "session_purposes": [ + "closed_meeting", + "regular" + ], "show_on_agenda": true }, "model": "group.groupfeatures", @@ -3181,7 +3392,9 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": false, - "admin_roles": "[\n \"lead\"\n]", + "admin_roles": [ + "lead" + ], "agenda_filter_type": "none", "agenda_type": "ad", "create_wiki": false, @@ -3189,10 +3402,27 @@ "customize_workflow": false, "default_parent": "", "default_tab": "ietf.group.views.group_about", - "default_used_roles": "[\n \"member\",\n \"chair\",\n \"lead\",\n \"delegate\"\n]", - "docman_roles": "[\n \"lead\",\n \"chair\",\n \"secr\"\n]", - "groupman_authroles": "[\n \"Secretariat\",\n \"IAB\"\n]", - "groupman_roles": "[\n \"lead\",\n \"chair\",\n \"secr\",\n \"delegate\"\n]", + "default_used_roles": [ + "member", + "chair", + "lead", + "delegate" + ], + "docman_roles": [ + "lead", + "chair", + "secr" + ], + "groupman_authroles": [ + "Secretariat", + "IAB" + ], + "groupman_roles": [ + "lead", + "chair", + "secr", + "delegate" + ], "has_chartering_process": false, "has_default_chat": false, "has_documents": true, @@ -3202,15 +3432,29 @@ "has_reviews": false, "has_session_materials": false, "is_schedulable": false, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[\n \"lead\",\n \"chair\",\n \"secr\"\n]", + "material_types": [ + "slides" + ], + "matman_roles": [ + "lead", + "chair", + "secr" + ], "need_parent": false, "parent_types": [ "ietf" ], "req_subm_approval": false, - "role_order": "[\n \"lead\",\n \"chair\",\n \"secr\"\n]", - "session_purposes": "[\n \"closed_meeting\",\n \"officehours\",\n \"open_meeting\"\n]", + "role_order": [ + "lead", + "chair", + "secr" + ], + "session_purposes": [ + "closed_meeting", + "officehours", + "open_meeting" + ], "show_on_agenda": false }, "model": "group.groupfeatures", @@ -3220,7 +3464,9 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": false, - "admin_roles": "[\n \"chair\"\n]", + "admin_roles": [ + "chair" + ], "agenda_filter_type": "none", "agenda_type": "ietf", "create_wiki": false, @@ -3228,10 +3474,20 @@ "customize_workflow": false, "default_parent": "iab", "default_tab": "ietf.group.views.group_about", - "default_used_roles": "[]", - "docman_roles": "[\n \"ad\",\n \"chair\",\n \"delegate\",\n \"secr\"\n]", - "groupman_authroles": "[\n \"Secretariat\"\n]", - "groupman_roles": "[\n \"ad\",\n \"chair\"\n]", + "default_used_roles": [], + "docman_roles": [ + "ad", + "chair", + "delegate", + "secr" + ], + "groupman_authroles": [ + "Secretariat" + ], + "groupman_roles": [ + "ad", + "chair" + ], "has_chartering_process": false, "has_default_chat": true, "has_documents": true, @@ -3241,15 +3497,26 @@ "has_reviews": false, "has_session_materials": true, "is_schedulable": false, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[\n \"ad\",\n \"chair\",\n \"delegate\",\n \"secr\"\n]", + "material_types": [ + "slides" + ], + "matman_roles": [ + "ad", + "chair", + "delegate", + "secr" + ], "need_parent": true, "parent_types": [ "ietf" ], "req_subm_approval": false, - "role_order": "[\n \"chair\",\n \"secr\",\n \"member\"\n]", - "session_purposes": "\"[\\\"regular\\\"]\"", + "role_order": [ + "chair", + "secr", + "member" + ], + "session_purposes": "[\"regular\"]", "show_on_agenda": false }, "model": "group.groupfeatures", @@ -3259,7 +3526,9 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": false, - "admin_roles": "[\n \"chair\"\n]", + "admin_roles": [ + "chair" + ], "agenda_filter_type": "none", "agenda_type": "ietf", "create_wiki": false, @@ -3267,10 +3536,18 @@ "customize_workflow": false, "default_parent": "", "default_tab": "ietf.group.views.group_about", - "default_used_roles": "[\n \"auth\"\n]", - "docman_roles": "[\n \"chair\"\n]", - "groupman_authroles": "[\n \"Secretariat\"\n]", - "groupman_roles": "[\n \"chair\"\n]", + "default_used_roles": [ + "auth" + ], + "docman_roles": [ + "chair" + ], + "groupman_authroles": [ + "Secretariat" + ], + "groupman_roles": [ + "chair" + ], "has_chartering_process": false, "has_default_chat": false, "has_documents": false, @@ -3280,13 +3557,21 @@ "has_reviews": false, "has_session_materials": false, "is_schedulable": false, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[\n \"chair\"\n]", + "material_types": [ + "slides" + ], + "matman_roles": [ + "chair" + ], "need_parent": false, "parent_types": [], "req_subm_approval": false, - "role_order": "[\n \"chair\"\n]", - "session_purposes": "[\n \"officehours\"\n]", + "role_order": [ + "chair" + ], + "session_purposes": [ + "officehours" + ], "show_on_agenda": false }, "model": "group.groupfeatures", @@ -3296,7 +3581,9 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": false, - "admin_roles": "[\n \"chair\"\n]", + "admin_roles": [ + "chair" + ], "agenda_filter_type": "none", "agenda_type": "ad", "create_wiki": false, @@ -3304,10 +3591,19 @@ "customize_workflow": false, "default_parent": "", "default_tab": "ietf.group.views.group_about", - "default_used_roles": "[\n \"delegate\"\n]", - "docman_roles": "[\n \"chair\"\n]", - "groupman_authroles": "[\n \"Secretariat\"\n]", - "groupman_roles": "[\n \"chair\",\n \"delegate\"\n]", + "default_used_roles": [ + "delegate" + ], + "docman_roles": [ + "chair" + ], + "groupman_authroles": [ + "Secretariat" + ], + "groupman_roles": [ + "chair", + "delegate" + ], "has_chartering_process": false, "has_default_chat": false, "has_documents": false, @@ -3317,13 +3613,24 @@ "has_reviews": false, "has_session_materials": false, "is_schedulable": false, - "material_types": "\"[]\"", - "matman_roles": "[\n \"chair\",\n \"delegate\",\n \"member\"\n]", + "material_types": "[]", + "matman_roles": [ + "chair", + "delegate", + "member" + ], "need_parent": false, "parent_types": [], "req_subm_approval": true, - "role_order": "[\n \"chair\",\n \"delegate\",\n \"member\"\n]", - "session_purposes": "[\n \"closed_meeting\",\n \"open_meeting\"\n]", + "role_order": [ + "chair", + "delegate", + "member" + ], + "session_purposes": [ + "closed_meeting", + "open_meeting" + ], "show_on_agenda": false }, "model": "group.groupfeatures", @@ -3333,7 +3640,10 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": false, - "admin_roles": "[\n \"chair\",\n \"lead\"\n]", + "admin_roles": [ + "chair", + "lead" + ], "agenda_filter_type": "heading", "agenda_type": "ietf", "create_wiki": false, @@ -3341,10 +3651,26 @@ "customize_workflow": false, "default_parent": "", "default_tab": "ietf.group.views.group_about", - "default_used_roles": "[\n \"ad\",\n \"member\",\n \"comdir\",\n \"delegate\",\n \"execdir\",\n \"recman\",\n \"secr\",\n \"chair\"\n]", - "docman_roles": "[\n \"chair\"\n]", - "groupman_authroles": "[\n \"Secretariat\"\n]", - "groupman_roles": "[\n \"chair\",\n \"delegate\"\n]", + "default_used_roles": [ + "ad", + "member", + "comdir", + "delegate", + "execdir", + "recman", + "secr", + "chair" + ], + "docman_roles": [ + "chair" + ], + "groupman_authroles": [ + "Secretariat" + ], + "groupman_roles": [ + "chair", + "delegate" + ], "has_chartering_process": false, "has_default_chat": false, "has_documents": false, @@ -3354,15 +3680,29 @@ "has_reviews": false, "has_session_materials": true, "is_schedulable": false, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[\n \"chair\",\n \"delegate\"\n]", + "material_types": [ + "slides" + ], + "matman_roles": [ + "chair", + "delegate" + ], "need_parent": false, "parent_types": [ "ietf" ], "req_subm_approval": true, - "role_order": "[\n \"chair\",\n \"secr\"\n]", - "session_purposes": "[\n \"admin\",\n \"plenary\",\n \"presentation\",\n \"social\",\n \"officehours\"\n]", + "role_order": [ + "chair", + "secr" + ], + "session_purposes": [ + "admin", + "plenary", + "presentation", + "social", + "officehours" + ], "show_on_agenda": false }, "model": "group.groupfeatures", @@ -3372,7 +3712,9 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": false, - "admin_roles": "[\n \"chair\"\n]", + "admin_roles": [ + "chair" + ], "agenda_filter_type": "none", "agenda_type": "ad", "create_wiki": false, @@ -3380,10 +3722,16 @@ "customize_workflow": false, "default_parent": "", "default_tab": "ietf.group.views.group_about", - "default_used_roles": "[\n \"ad\"\n]", - "docman_roles": "[\n \"auth\"\n]", - "groupman_authroles": "[\n \"Secretariat\"\n]", - "groupman_roles": "[]", + "default_used_roles": [ + "ad" + ], + "docman_roles": [ + "auth" + ], + "groupman_authroles": [ + "Secretariat" + ], + "groupman_roles": [], "has_chartering_process": false, "has_default_chat": false, "has_documents": false, @@ -3393,15 +3741,20 @@ "has_reviews": false, "has_session_materials": false, "is_schedulable": false, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[]", + "material_types": [ + "slides" + ], + "matman_roles": [], "need_parent": true, "parent_types": [ "area" ], "req_subm_approval": false, - "role_order": "[\n \"chair\",\n \"secr\"\n]", - "session_purposes": "[]", + "role_order": [ + "chair", + "secr" + ], + "session_purposes": [], "show_on_agenda": false }, "model": "group.groupfeatures", @@ -3411,7 +3764,9 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": false, - "admin_roles": "[\n \"chair\"\n]", + "admin_roles": [ + "chair" + ], "agenda_filter_type": "heading", "agenda_type": "ietf", "create_wiki": false, @@ -3419,10 +3774,20 @@ "customize_workflow": false, "default_parent": "", "default_tab": "ietf.group.views.group_about", - "default_used_roles": "[\n \"member\",\n \"atlarge\",\n \"chair\",\n \"delegate\"\n]", - "docman_roles": "[]", - "groupman_authroles": "[\n \"Secretariat\"\n]", - "groupman_roles": "[\n \"chair\",\n \"delegate\"\n]", + "default_used_roles": [ + "member", + "atlarge", + "chair", + "delegate" + ], + "docman_roles": [], + "groupman_authroles": [ + "Secretariat" + ], + "groupman_roles": [ + "chair", + "delegate" + ], "has_chartering_process": false, "has_default_chat": false, "has_documents": false, @@ -3432,15 +3797,24 @@ "has_reviews": false, "has_session_materials": false, "is_schedulable": false, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[\n \"chair\",\n \"delegate\",\n \"secr\"\n]", + "material_types": [ + "slides" + ], + "matman_roles": [ + "chair", + "delegate", + "secr" + ], "need_parent": false, "parent_types": [ "irtf" ], "req_subm_approval": true, - "role_order": "[\n \"chair\",\n \"secr\"\n]", - "session_purposes": "[]", + "role_order": [ + "chair", + "secr" + ], + "session_purposes": [], "show_on_agenda": false }, "model": "group.groupfeatures", @@ -3450,7 +3824,10 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": false, - "admin_roles": "[\n \"chair\",\n \"lead\"\n]", + "admin_roles": [ + "chair", + "lead" + ], "agenda_filter_type": "heading", "agenda_type": "ietf", "create_wiki": false, @@ -3458,10 +3835,20 @@ "customize_workflow": false, "default_parent": "", "default_tab": "ietf.group.views.group_about", - "default_used_roles": "[\n \"chair\",\n \"delegate\"\n]", - "docman_roles": "[\n \"chair\"\n]", - "groupman_authroles": "[\n \"Secretariat\"\n]", - "groupman_roles": "[\n \"chair\",\n \"delegate\"\n]", + "default_used_roles": [ + "chair", + "delegate" + ], + "docman_roles": [ + "chair" + ], + "groupman_authroles": [ + "Secretariat" + ], + "groupman_roles": [ + "chair", + "delegate" + ], "has_chartering_process": false, "has_default_chat": false, "has_documents": true, @@ -3471,13 +3858,24 @@ "has_reviews": false, "has_session_materials": false, "is_schedulable": true, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[\n \"chair\",\n \"delegate\"\n]", + "material_types": [ + "slides" + ], + "matman_roles": [ + "chair", + "delegate" + ], "need_parent": false, "parent_types": [], "req_subm_approval": true, - "role_order": "[\n \"chair\",\n \"delegate\"\n]", - "session_purposes": "[\n \"officehours\",\n \"regular\"\n]", + "role_order": [ + "chair", + "delegate" + ], + "session_purposes": [ + "officehours", + "regular" + ], "show_on_agenda": true }, "model": "group.groupfeatures", @@ -3487,7 +3885,9 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": false, - "admin_roles": "[\n \"chair\"\n]", + "admin_roles": [ + "chair" + ], "agenda_filter_type": "none", "agenda_type": null, "create_wiki": false, @@ -3495,10 +3895,17 @@ "customize_workflow": false, "default_parent": "", "default_tab": "ietf.group.views.group_about", - "default_used_roles": "[\n \"chair\",\n \"ceo\"\n]", - "docman_roles": "[]", - "groupman_authroles": "[\n \"Secretariat\"\n]", - "groupman_roles": "[\n \"chair\"\n]", + "default_used_roles": [ + "chair", + "ceo" + ], + "docman_roles": [], + "groupman_authroles": [ + "Secretariat" + ], + "groupman_roles": [ + "chair" + ], "has_chartering_process": false, "has_default_chat": false, "has_documents": false, @@ -3508,15 +3915,27 @@ "has_reviews": false, "has_session_materials": false, "is_schedulable": false, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[\n \"chair\",\n \"secr\"\n]", + "material_types": [ + "slides" + ], + "matman_roles": [ + "chair", + "secr" + ], "need_parent": false, "parent_types": [ "isoc" ], "req_subm_approval": true, - "role_order": "[\n \"chair\",\n \"secr\"\n]", - "session_purposes": "[\n \"officehours\",\n \"open_meeting\",\n \"presentation\"\n]", + "role_order": [ + "chair", + "secr" + ], + "session_purposes": [ + "officehours", + "open_meeting", + "presentation" + ], "show_on_agenda": false }, "model": "group.groupfeatures", @@ -3526,7 +3945,10 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": false, - "admin_roles": "[\n \"chair\",\n \"advisor\"\n]", + "admin_roles": [ + "chair", + "advisor" + ], "agenda_filter_type": "none", "agenda_type": "side", "create_wiki": true, @@ -3534,10 +3956,23 @@ "customize_workflow": false, "default_parent": "", "default_tab": "ietf.group.views.group_about", - "default_used_roles": "[\n \"member\",\n \"advisor\",\n \"liaison\",\n \"chair\",\n \"techadv\"\n]", - "docman_roles": "[\n \"chair\"\n]", - "groupman_authroles": "[\n \"Secretariat\"\n]", - "groupman_roles": "[\n \"chair\",\n \"advisor\"\n]", + "default_used_roles": [ + "member", + "advisor", + "liaison", + "chair", + "techadv" + ], + "docman_roles": [ + "chair" + ], + "groupman_authroles": [ + "Secretariat" + ], + "groupman_roles": [ + "chair", + "advisor" + ], "has_chartering_process": false, "has_default_chat": false, "has_documents": false, @@ -3547,15 +3982,26 @@ "has_reviews": false, "has_session_materials": false, "is_schedulable": false, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[\n \"chair\"\n]", + "material_types": [ + "slides" + ], + "matman_roles": [ + "chair" + ], "need_parent": false, "parent_types": [ "area" ], "req_subm_approval": true, - "role_order": "[\n \"chair\",\n \"member\",\n \"advisor\"\n]", - "session_purposes": "[\n \"closed_meeting\",\n \"officehours\"\n]", + "role_order": [ + "chair", + "member", + "advisor" + ], + "session_purposes": [ + "closed_meeting", + "officehours" + ], "show_on_agenda": false }, "model": "group.groupfeatures", @@ -3565,7 +4011,9 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": false, - "admin_roles": "[\n \"lead\"\n]", + "admin_roles": [ + "lead" + ], "agenda_filter_type": "normal", "agenda_type": "ad", "create_wiki": false, @@ -3573,10 +4021,27 @@ "customize_workflow": false, "default_parent": "", "default_tab": "ietf.group.views.group_about", - "default_used_roles": "[\n \"member\",\n \"chair\",\n \"lead\",\n \"delegate\"\n]", - "docman_roles": "[\n \"lead\",\n \"chair\",\n \"secr\"\n]", - "groupman_authroles": "[\n \"Secretariat\",\n \"IAB\"\n]", - "groupman_roles": "[\n \"lead\",\n \"chair\",\n \"secr\",\n \"delegate\"\n]", + "default_used_roles": [ + "member", + "chair", + "lead", + "delegate" + ], + "docman_roles": [ + "lead", + "chair", + "secr" + ], + "groupman_authroles": [ + "Secretariat", + "IAB" + ], + "groupman_roles": [ + "lead", + "chair", + "secr", + "delegate" + ], "has_chartering_process": false, "has_default_chat": false, "has_documents": true, @@ -3586,15 +4051,28 @@ "has_reviews": false, "has_session_materials": true, "is_schedulable": false, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[\n \"lead\",\n \"chair\",\n \"secr\"\n]", + "material_types": [ + "slides" + ], + "matman_roles": [ + "lead", + "chair", + "secr" + ], "need_parent": false, "parent_types": [ "ietf" ], "req_subm_approval": false, - "role_order": "[\n \"lead\",\n \"chair\",\n \"secr\"\n]", - "session_purposes": "[\n \"regular\",\n \"tutorial\"\n]", + "role_order": [ + "lead", + "chair", + "secr" + ], + "session_purposes": [ + "regular", + "tutorial" + ], "show_on_agenda": false }, "model": "group.groupfeatures", @@ -3604,7 +4082,9 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": true, - "admin_roles": "[\n \"chair\"\n]", + "admin_roles": [ + "chair" + ], "agenda_filter_type": "normal", "agenda_type": "ietf", "create_wiki": true, @@ -3612,10 +4092,24 @@ "customize_workflow": false, "default_parent": "", "default_tab": "ietf.group.views.group_about", - "default_used_roles": "[\n \"chair\",\n \"secr\",\n \"delegate\"\n]", - "docman_roles": "[\n \"chair\",\n \"delegate\",\n \"secr\"\n]", - "groupman_authroles": "[\n \"Secretariat\",\n \"IRTF Chair\"\n]", - "groupman_roles": "[\n \"chair\",\n \"delegate\"\n]", + "default_used_roles": [ + "chair", + "secr", + "delegate" + ], + "docman_roles": [ + "chair", + "delegate", + "secr" + ], + "groupman_authroles": [ + "Secretariat", + "IRTF Chair" + ], + "groupman_roles": [ + "chair", + "delegate" + ], "has_chartering_process": false, "has_default_chat": false, "has_documents": true, @@ -3625,15 +4119,26 @@ "has_reviews": false, "has_session_materials": true, "is_schedulable": true, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[\n \"chair\",\n \"delegate\",\n \"secr\"\n]", + "material_types": [ + "slides" + ], + "matman_roles": [ + "chair", + "delegate", + "secr" + ], "need_parent": false, "parent_types": [ "irtf" ], "req_subm_approval": true, - "role_order": "[\n \"chair\",\n \"secr\"\n]", - "session_purposes": "[\n \"regular\"\n]", + "role_order": [ + "chair", + "secr" + ], + "session_purposes": [ + "regular" + ], "show_on_agenda": true }, "model": "group.groupfeatures", @@ -3643,7 +4148,10 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": false, - "admin_roles": "[\n \"chair\",\n \"secr\"\n]", + "admin_roles": [ + "chair", + "secr" + ], "agenda_filter_type": "normal", "agenda_type": "ietf", "create_wiki": true, @@ -3651,10 +4159,24 @@ "customize_workflow": false, "default_parent": "", "default_tab": "ietf.group.views.review_requests", - "default_used_roles": "[\n \"ad\",\n \"chair\",\n \"reviewer\",\n \"secr\",\n \"delegate\"\n]", - "docman_roles": "[\n \"secr\"\n]", - "groupman_authroles": "[\n \"Secretariat\"\n]", - "groupman_roles": "[\n \"ad\",\n \"secr\",\n \"delegate\"\n]", + "default_used_roles": [ + "ad", + "chair", + "reviewer", + "secr", + "delegate" + ], + "docman_roles": [ + "secr" + ], + "groupman_authroles": [ + "Secretariat" + ], + "groupman_roles": [ + "ad", + "secr", + "delegate" + ], "has_chartering_process": false, "has_default_chat": false, "has_documents": false, @@ -3664,15 +4186,26 @@ "has_reviews": true, "has_session_materials": true, "is_schedulable": false, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[\n \"ad\",\n \"secr\"\n]", + "material_types": [ + "slides" + ], + "matman_roles": [ + "ad", + "secr" + ], "need_parent": true, "parent_types": [ "area" ], "req_subm_approval": true, - "role_order": "[\n \"chair\",\n \"secr\"\n]", - "session_purposes": "[\n \"open_meeting\",\n \"social\"\n]", + "role_order": [ + "chair", + "secr" + ], + "session_purposes": [ + "open_meeting", + "social" + ], "show_on_agenda": false }, "model": "group.groupfeatures", @@ -3682,7 +4215,9 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": false, - "admin_roles": "[\n \"chair\"\n]", + "admin_roles": [ + "chair" + ], "agenda_filter_type": "normal", "agenda_type": "ietf", "create_wiki": false, @@ -3690,10 +4225,19 @@ "customize_workflow": false, "default_parent": "", "default_tab": "ietf.group.views.group_about", - "default_used_roles": "[\n \"auth\",\n \"chair\"\n]", - "docman_roles": "[\n \"chair\"\n]", - "groupman_authroles": "[\n \"Secretariat\"\n]", - "groupman_roles": "[\n \"chair\"\n]", + "default_used_roles": [ + "auth", + "chair" + ], + "docman_roles": [ + "chair" + ], + "groupman_authroles": [ + "Secretariat" + ], + "groupman_roles": [ + "chair" + ], "has_chartering_process": false, "has_default_chat": false, "has_documents": false, @@ -3703,13 +4247,23 @@ "has_reviews": false, "has_session_materials": false, "is_schedulable": false, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[\n \"chair\"\n]", + "material_types": [ + "slides" + ], + "matman_roles": [ + "chair" + ], "need_parent": false, "parent_types": [], "req_subm_approval": true, - "role_order": "[\n \"chair\",\n \"secr\"\n]", - "session_purposes": "[\n \"officehours\",\n \"regular\"\n]", + "role_order": [ + "chair", + "secr" + ], + "session_purposes": [ + "officehours", + "regular" + ], "show_on_agenda": false }, "model": "group.groupfeatures", @@ -3719,7 +4273,9 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": true, - "admin_roles": "[\n \"chair\"\n]", + "admin_roles": [ + "chair" + ], "agenda_filter_type": "normal", "agenda_type": "ietf", "create_wiki": true, @@ -3727,10 +4283,25 @@ "customize_workflow": true, "default_parent": "irtf", "default_tab": "ietf.group.views.group_documents", - "default_used_roles": "[\n \"chair\",\n \"techadv\",\n \"secr\",\n \"delegate\"\n]", - "docman_roles": "[\n \"chair\",\n \"delegate\",\n \"secr\"\n]", - "groupman_authroles": "[\n \"Secretariat\",\n \"IRTF Chair\"\n]", - "groupman_roles": "[\n \"chair\",\n \"delegate\"\n]", + "default_used_roles": [ + "chair", + "techadv", + "secr", + "delegate" + ], + "docman_roles": [ + "chair", + "delegate", + "secr" + ], + "groupman_authroles": [ + "Secretariat", + "IRTF Chair" + ], + "groupman_roles": [ + "chair", + "delegate" + ], "has_chartering_process": true, "has_default_chat": true, "has_documents": true, @@ -3740,15 +4311,27 @@ "has_reviews": false, "has_session_materials": true, "is_schedulable": true, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[\n \"chair\",\n \"delegate\",\n \"secr\"\n]", + "material_types": [ + "slides" + ], + "matman_roles": [ + "chair", + "delegate", + "secr" + ], "need_parent": true, "parent_types": [ "irtf" ], "req_subm_approval": true, - "role_order": "[\n \"chair\",\n \"delegate\",\n \"secr\"\n]", - "session_purposes": "[\n \"regular\"\n]", + "role_order": [ + "chair", + "delegate", + "secr" + ], + "session_purposes": [ + "regular" + ], "show_on_agenda": true }, "model": "group.groupfeatures", @@ -3758,7 +4341,9 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": false, - "admin_roles": "[\n \"chair\"\n]", + "admin_roles": [ + "chair" + ], "agenda_filter_type": "none", "agenda_type": null, "create_wiki": false, @@ -3766,10 +4351,23 @@ "customize_workflow": false, "default_parent": "", "default_tab": "ietf.group.views.group_about", - "default_used_roles": "[\n \"liaiman\",\n \"ceo\",\n \"coord\",\n \"auth\",\n \"chair\",\n \"liaison_contact\",\n \"liaison_cc_contact\"\n]", - "docman_roles": "[\n \"liaiman\",\n \"matman\"\n]", - "groupman_authroles": "[\n \"Secretariat\"\n]", - "groupman_roles": "[]", + "default_used_roles": [ + "liaiman", + "ceo", + "coord", + "auth", + "chair", + "liaison_contact", + "liaison_cc_contact" + ], + "docman_roles": [ + "liaiman", + "matman" + ], + "groupman_authroles": [ + "Secretariat" + ], + "groupman_roles": [], "has_chartering_process": false, "has_default_chat": false, "has_documents": false, @@ -3779,16 +4377,20 @@ "has_reviews": false, "has_session_materials": false, "is_schedulable": false, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[]", + "material_types": [ + "slides" + ], + "matman_roles": [], "need_parent": false, "parent_types": [ "area", "sdo" ], "req_subm_approval": true, - "role_order": "[\n \"liaiman\"\n]", - "session_purposes": "[]", + "role_order": [ + "liaiman" + ], + "session_purposes": [], "show_on_agenda": false }, "model": "group.groupfeatures", @@ -3798,7 +4400,9 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": false, - "admin_roles": "[\n \"chair\"\n]", + "admin_roles": [ + "chair" + ], "agenda_filter_type": "special", "agenda_type": "ietf", "create_wiki": true, @@ -3806,10 +4410,28 @@ "customize_workflow": false, "default_parent": "", "default_tab": "ietf.group.views.group_about", - "default_used_roles": "[\n \"ad\",\n \"member\",\n \"delegate\",\n \"secr\",\n \"liaison\",\n \"atlarge\",\n \"chair\",\n \"matman\",\n \"techadv\"\n]", - "docman_roles": "[\n \"chair\"\n]", - "groupman_authroles": "[\n \"Secretariat\",\n \"Area Director\"\n]", - "groupman_roles": "[\n \"chair\",\n \"delegate\"\n]", + "default_used_roles": [ + "ad", + "member", + "delegate", + "secr", + "liaison", + "atlarge", + "chair", + "matman", + "techadv" + ], + "docman_roles": [ + "chair" + ], + "groupman_authroles": [ + "Secretariat", + "Area Director" + ], + "groupman_roles": [ + "chair", + "delegate" + ], "has_chartering_process": false, "has_default_chat": false, "has_documents": false, @@ -3819,15 +4441,29 @@ "has_reviews": false, "has_session_materials": true, "is_schedulable": true, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[\n \"chair\",\n \"matman\"\n]", + "material_types": [ + "slides" + ], + "matman_roles": [ + "chair", + "matman" + ], "need_parent": false, "parent_types": [ "area" ], "req_subm_approval": false, - "role_order": "[\n \"chair\",\n \"member\",\n \"matman\"\n]", - "session_purposes": "[\n \"coding\",\n \"presentation\",\n \"social\",\n \"tutorial\"\n]", + "role_order": [ + "chair", + "member", + "matman" + ], + "session_purposes": [ + "coding", + "presentation", + "social", + "tutorial" + ], "show_on_agenda": true }, "model": "group.groupfeatures", @@ -3837,7 +4473,9 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": true, - "admin_roles": "[\n \"chair\"\n]", + "admin_roles": [ + "chair" + ], "agenda_filter_type": "normal", "agenda_type": "ietf", "create_wiki": true, @@ -3845,10 +4483,32 @@ "customize_workflow": true, "default_parent": "", "default_tab": "ietf.group.views.group_documents", - "default_used_roles": "[\n \"ad\",\n \"editor\",\n \"delegate\",\n \"secr\",\n \"chair\",\n \"matman\",\n \"techadv\",\n \"liaison_contact\",\n \"liaison_cc_contact\"\n]", - "docman_roles": "[\n \"chair\",\n \"delegate\",\n \"secr\"\n]", - "groupman_authroles": "[\n \"Secretariat\",\n \"Area Director\"\n]", - "groupman_roles": "[\n \"ad\",\n \"chair\",\n \"delegate\",\n \"secr\"\n]", + "default_used_roles": [ + "ad", + "editor", + "delegate", + "secr", + "chair", + "matman", + "techadv", + "liaison_contact", + "liaison_cc_contact" + ], + "docman_roles": [ + "chair", + "delegate", + "secr" + ], + "groupman_authroles": [ + "Secretariat", + "Area Director" + ], + "groupman_roles": [ + "ad", + "chair", + "delegate", + "secr" + ], "has_chartering_process": true, "has_default_chat": true, "has_documents": true, @@ -3858,15 +4518,28 @@ "has_reviews": false, "has_session_materials": true, "is_schedulable": true, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[\n \"ad\",\n \"chair\",\n \"delegate\",\n \"secr\"\n]", + "material_types": [ + "slides" + ], + "matman_roles": [ + "ad", + "chair", + "delegate", + "secr" + ], "need_parent": false, "parent_types": [ "area" ], "req_subm_approval": true, - "role_order": "[\n \"chair\",\n \"secr\",\n \"delegate\"\n]", - "session_purposes": "[\n \"regular\"\n]", + "role_order": [ + "chair", + "secr", + "delegate" + ], + "session_purposes": [ + "regular" + ], "show_on_agenda": true }, "model": "group.groupfeatures", @@ -5392,6 +6065,21 @@ "model": "mailtrigger.mailtrigger", "pk": "review_completed_opsdir_telechat" }, + { + "fields": { + "cc": [ + "ietf_last_call", + "review_doc_all_parties", + "review_doc_group_mail_list" + ], + "desc": "Recipients when a perfmetrdir IETF Last Call review is completed", + "to": [ + "review_team_mail_list" + ] + }, + "model": "mailtrigger.mailtrigger", + "pk": "review_completed_perfmetrdir_lc" + }, { "fields": { "cc": [ @@ -13849,7 +14537,10 @@ "name": "Administrative", "on_agenda": true, "order": 5, - "timeslot_types": "[\n \"other\",\n \"reg\"\n]", + "timeslot_types": [ + "other", + "reg" + ], "used": true }, "model": "name.sessionpurposename", @@ -13861,7 +14552,10 @@ "name": "Closed meeting", "on_agenda": false, "order": 10, - "timeslot_types": "[\n \"other\",\n \"regular\"\n]", + "timeslot_types": [ + "other", + "regular" + ], "used": true }, "model": "name.sessionpurposename", @@ -13873,7 +14567,9 @@ "name": "Coding", "on_agenda": true, "order": 4, - "timeslot_types": "[\n \"other\"\n]", + "timeslot_types": [ + "other" + ], "used": true }, "model": "name.sessionpurposename", @@ -13885,7 +14581,7 @@ "name": "None", "on_agenda": true, "order": 0, - "timeslot_types": "[]", + "timeslot_types": [], "used": false }, "model": "name.sessionpurposename", @@ -13897,7 +14593,9 @@ "name": "Office hours", "on_agenda": true, "order": 3, - "timeslot_types": "[\n \"other\"\n]", + "timeslot_types": [ + "other" + ], "used": true }, "model": "name.sessionpurposename", @@ -13909,7 +14607,9 @@ "name": "Open meeting", "on_agenda": true, "order": 9, - "timeslot_types": "[\n \"other\"\n]", + "timeslot_types": [ + "other" + ], "used": true }, "model": "name.sessionpurposename", @@ -13921,7 +14621,9 @@ "name": "Plenary", "on_agenda": true, "order": 7, - "timeslot_types": "[\n \"plenary\"\n]", + "timeslot_types": [ + "plenary" + ], "used": true }, "model": "name.sessionpurposename", @@ -13933,7 +14635,10 @@ "name": "Presentation", "on_agenda": true, "order": 8, - "timeslot_types": "[\n \"other\",\n \"regular\"\n]", + "timeslot_types": [ + "other", + "regular" + ], "used": true }, "model": "name.sessionpurposename", @@ -13945,7 +14650,9 @@ "name": "Regular", "on_agenda": true, "order": 1, - "timeslot_types": "[\n \"regular\"\n]", + "timeslot_types": [ + "regular" + ], "used": true }, "model": "name.sessionpurposename", @@ -13957,7 +14664,10 @@ "name": "Social", "on_agenda": true, "order": 6, - "timeslot_types": "[\n \"break\",\n \"other\"\n]", + "timeslot_types": [ + "break", + "other" + ], "used": true }, "model": "name.sessionpurposename", @@ -13969,7 +14679,9 @@ "name": "Tutorial", "on_agenda": true, "order": 2, - "timeslot_types": "[\n \"other\"\n]", + "timeslot_types": [ + "other" + ], "used": true }, "model": "name.sessionpurposename", diff --git a/ietf/name/migrations/0019_alter_sessionpurposename_timeslot_types.py b/ietf/name/migrations/0019_alter_sessionpurposename_timeslot_types.py new file mode 100644 index 0000000000..a0ca81836d --- /dev/null +++ b/ietf/name/migrations/0019_alter_sessionpurposename_timeslot_types.py @@ -0,0 +1,27 @@ +# Copyright The IETF Trust 2025, All Rights Reserved + +from django.db import migrations, models +import ietf.utils.validators + + +class Migration(migrations.Migration): + dependencies = [ + ("name", "0018_alter_rolenames"), + ] + + operations = [ + migrations.AlterField( + model_name="sessionpurposename", + name="timeslot_types", + field=models.JSONField( + default=list, + help_text="Allowed TimeSlotTypeNames", + max_length=256, + validators=[ + ietf.utils.validators.JSONForeignKeyListValidator( + "name.TimeSlotTypeName" + ) + ], + ), + ), + ] diff --git a/ietf/name/models.py b/ietf/name/models.py index 0e87d43548..24104c5f45 100644 --- a/ietf/name/models.py +++ b/ietf/name/models.py @@ -1,8 +1,6 @@ # Copyright The IETF Trust 2010-2020, All Rights Reserved # -*- coding: utf-8 -*- -import jsonfield - from django.db import models from ietf.utils.models import ForeignKey @@ -73,8 +71,8 @@ class SessionStatusName(NameModel): """Waiting for Approval, Approved, Waiting for Scheduling, Scheduled, Cancelled, Disapproved""" class SessionPurposeName(NameModel): """Regular, Tutorial, Office Hours, Coding, Social, Admin""" - timeslot_types = jsonfield.JSONField( - max_length=256, blank=False, default=[], + timeslot_types = models.JSONField( + max_length=256, blank=False, default=list, help_text='Allowed TimeSlotTypeNames', validators=[JSONForeignKeyListValidator('name.TimeSlotTypeName')], ) diff --git a/ietf/person/migrations/0005_alter_historicalperson_pronouns_selectable_and_more.py b/ietf/person/migrations/0005_alter_historicalperson_pronouns_selectable_and_more.py new file mode 100644 index 0000000000..2af874b1fa --- /dev/null +++ b/ietf/person/migrations/0005_alter_historicalperson_pronouns_selectable_and_more.py @@ -0,0 +1,34 @@ +# Copyright The IETF Trust 2025, All Rights Reserved + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("person", "0004_alter_person_photo_alter_person_photo_thumb"), + ] + + operations = [ + migrations.AlterField( + model_name="historicalperson", + name="pronouns_selectable", + field=models.JSONField( + blank=True, + default=list, + max_length=120, + null=True, + verbose_name="Pronouns", + ), + ), + migrations.AlterField( + model_name="person", + name="pronouns_selectable", + field=models.JSONField( + blank=True, + default=list, + max_length=120, + null=True, + verbose_name="Pronouns", + ), + ), + ] diff --git a/ietf/person/models.py b/ietf/person/models.py index 93364478ae..03cf0c87fb 100644 --- a/ietf/person/models.py +++ b/ietf/person/models.py @@ -4,7 +4,6 @@ import email.utils import email.header -import jsonfield import uuid from hashids import Hashids @@ -57,7 +56,7 @@ class Person(models.Model): ascii = models.CharField("Full Name (ASCII)", max_length=255, help_text="Name as rendered in ASCII (Latin, unaccented) characters.", validators=[name_character_validator]) # The short ascii-form of the name. Also in alias table if non-null ascii_short = models.CharField("Abbreviated Name (ASCII)", max_length=32, null=True, blank=True, help_text="Example: A. Nonymous. Fill in this with initials and surname only if taking the initials and surname of the ASCII name above produces an incorrect initials-only form. (Blank is OK).", validators=[name_character_validator]) - pronouns_selectable = jsonfield.JSONCharField("Pronouns", max_length=120, blank=True, null=True, default=list ) + pronouns_selectable = models.JSONField("Pronouns", max_length=120, blank=True, null=True, default=list ) pronouns_freetext = models.CharField(" ", max_length=30, null=True, blank=True, help_text="Optionally provide your personal pronouns. These will be displayed on your public profile page and alongside your name in Meetecho and, in future, other systems. Select any number of the checkboxes OR provide a custom string up to 30 characters.") biography = models.TextField(blank=True, help_text="Short biography for use on leadership pages. Use plain text or reStructuredText markup.") photo = models.ImageField( diff --git a/ietf/submit/migrations/0003_alter_submission_authors_alter_submissioncheck_items.py b/ietf/submit/migrations/0003_alter_submission_authors_alter_submissioncheck_items.py new file mode 100644 index 0000000000..2c51659204 --- /dev/null +++ b/ietf/submit/migrations/0003_alter_submission_authors_alter_submissioncheck_items.py @@ -0,0 +1,46 @@ +# Copyright The IETF Trust 2025, All Rights Reserved + +from django.db import migrations, models + + +def forward(apps, schema_editor): + JSON_ENCODED_NULL = r"\u0000" + NULL = "\x00" + NUL_SYMBOL = "\u2400" # Unicode single-char "NUL" symbol + + Submission = apps.get_model("submit", "Submission") + # The qs filter sees the serialized JSON string... + null_in_authors = Submission.objects.filter(authors__contains=JSON_ENCODED_NULL) + for submission in null_in_authors: + # submission.authors is now deserialized into Python objects + for author in submission.authors: + for k in author: + author[k] = author[k].replace(NULL, NUL_SYMBOL) + submission.save() + + +def reverse(apps, schema_editor): + pass # don't restore invalid data + + +class Migration(migrations.Migration): + dependencies = [ + ("submit", "0002_alter_submission_xml_version"), + ] + + operations = [ + migrations.RunPython(forward, reverse), + migrations.AlterField( + model_name="submission", + name="authors", + field=models.JSONField( + default=list, + help_text="List of authors with name, email, affiliation and country.", + ), + ), + migrations.AlterField( + model_name="submissioncheck", + name="items", + field=models.JSONField(blank=True, default=dict, null=True), + ), + ] diff --git a/ietf/submit/models.py b/ietf/submit/models.py index 1145f761b4..576ba3e114 100644 --- a/ietf/submit/models.py +++ b/ietf/submit/models.py @@ -3,7 +3,6 @@ import email -import jsonfield from django.db import models from django.utils import timezone @@ -46,7 +45,9 @@ class Submission(models.Model): words = models.IntegerField(null=True, blank=True) formal_languages = models.ManyToManyField(FormalLanguageName, blank=True, help_text="Formal languages used in document") - authors = jsonfield.JSONField(default=list, help_text="List of authors with name, email, affiliation and country.") + authors = models.JSONField(default=list, help_text="List of authors with name, email, affiliation and country.") + # Schema note: authors is a list of authors. Each author is a JSON object with + # "name", "email", "affiliation", and "country" keys. All values are strings. note = models.TextField(blank=True) replaces = models.CharField(max_length=1000, blank=True) @@ -135,7 +136,7 @@ class SubmissionCheck(models.Model): message = models.TextField(null=True, blank=True) errors = models.IntegerField(null=True, blank=True, default=None) warnings = models.IntegerField(null=True, blank=True, default=None) - items = jsonfield.JSONField(null=True, blank=True, default='{}') + items = models.JSONField(null=True, blank=True, default=dict) symbol = models.CharField(max_length=64, default='') # def __str__(self): diff --git a/ietf/utils/db.py b/ietf/utils/db.py index d451f6cfd8..865c9b999f 100644 --- a/ietf/utils/db.py +++ b/ietf/utils/db.py @@ -6,12 +6,34 @@ # # JSONField should recognize {}, (), and [] as valid, non-empty JSON # values. However, the base Field class excludes them + import jsonfield +from django.db import models + +from ietf.utils.fields import IETFJSONField as FormIETFJSONField, EmptyAwareJSONField as FormEmptyAwareJSONField + + +class EmptyAwareJSONField(models.JSONField): + form_class = FormEmptyAwareJSONField + + def __init__(self, *args, empty_values=FormEmptyAwareJSONField.empty_values, accepted_empty_values=None, **kwargs): + if accepted_empty_values is None: + accepted_empty_values = [] + self.empty_values = [x + for x in empty_values + if x not in accepted_empty_values] + super().__init__(*args, **kwargs) -from ietf.utils.fields import IETFJSONField as FormIETFJSONField + def formfield(self, **kwargs): + if 'form_class' not in kwargs or issubclass(kwargs['form_class'], FormEmptyAwareJSONField): + kwargs.setdefault('empty_values', self.empty_values) + return super().formfield(**{**kwargs}) -class IETFJSONField(jsonfield.JSONField): +class IETFJSONField(jsonfield.JSONField): # pragma: no cover + # Deprecated - use EmptyAwareJSONField instead (different base class requires a + # new field name) + # Remove this class when migrations are squashed and it is no longer referenced form_class = FormIETFJSONField def __init__(self, *args, empty_values=FormIETFJSONField.empty_values, accepted_empty_values=None, **kwargs): diff --git a/ietf/utils/fields.py b/ietf/utils/fields.py index 3e6f56d45e..ba3fecebc6 100644 --- a/ietf/utils/fields.py +++ b/ietf/utils/fields.py @@ -6,8 +6,6 @@ import json import re -import jsonfield - import debug # pyflakes:ignore from typing import Optional, Type # pyflakes:ignore @@ -328,8 +326,21 @@ def has_changed(self, initial, data): return super().has_changed(initial, data) -class IETFJSONField(jsonfield.fields.forms.JSONField): - def __init__(self, *args, empty_values=jsonfield.fields.forms.JSONField.empty_values, +class IETFJSONField(forms.JSONField): # pragma: no cover + # Deprecated - use EmptyAwareJSONField instead + def __init__(self, *args, empty_values=forms.JSONField.empty_values, + accepted_empty_values=None, **kwargs): + if accepted_empty_values is None: + accepted_empty_values = [] + self.empty_values = [x + for x in empty_values + if x not in accepted_empty_values] + + super().__init__(*args, **kwargs) + + +class EmptyAwareJSONField(forms.JSONField): + def __init__(self, *args, empty_values=forms.JSONField.empty_values, accepted_empty_values=None, **kwargs): if accepted_empty_values is None: accepted_empty_values = [] From 711313d983d1d5ec9afe8c014a76959e04c04632 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Tue, 19 Aug 2025 12:08:11 -0300 Subject: [PATCH 179/405] chore: postgresql-client-17 for base.Dockerfile (#9378) --- docker/base.Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/base.Dockerfile b/docker/base.Dockerfile index f364456c7a..57aac8ee56 100644 --- a/docker/base.Dockerfile +++ b/docker/base.Dockerfile @@ -69,7 +69,7 @@ RUN apt-get update --fix-missing && apt-get install -qy --no-install-recommends nodejs \ pgloader \ pigz \ - postgresql-client-14 \ + postgresql-client-17 \ pv \ python3-ipython \ ripgrep \ From 811216838823507b911635967a012bc41f43edd5 Mon Sep 17 00:00:00 2001 From: rjsparks <10996692+rjsparks@users.noreply.github.com> Date: Tue, 19 Aug 2025 15:21:46 +0000 Subject: [PATCH 180/405] ci: update base image target version to 20250819T1508 --- dev/build/Dockerfile | 2 +- dev/build/TARGET_BASE | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/dev/build/Dockerfile b/dev/build/Dockerfile index 0ecf9566ef..756b7021b7 100644 --- a/dev/build/Dockerfile +++ b/dev/build/Dockerfile @@ -1,4 +1,4 @@ -FROM ghcr.io/ietf-tools/datatracker-app-base:20250807T1514 +FROM ghcr.io/ietf-tools/datatracker-app-base:20250819T1508 LABEL maintainer="IETF Tools Team " ENV DEBIAN_FRONTEND=noninteractive diff --git a/dev/build/TARGET_BASE b/dev/build/TARGET_BASE index 327fb48da4..2e0f1519da 100644 --- a/dev/build/TARGET_BASE +++ b/dev/build/TARGET_BASE @@ -1 +1 @@ -20250807T1514 +20250819T1508 From 566cf50b39fa4b0e7882a12b8ce977c29192009f Mon Sep 17 00:00:00 2001 From: "A. Nour" Date: Tue, 19 Aug 2025 17:52:56 +0100 Subject: [PATCH 181/405] feat: add auto-generated IPR email notifications on WG Call for Adoption or WG Last Call (#9322) * feat(documents): add auto-generated IPR email notifications on WG Call for Adoption or WG Last Call * fix: edit call durations in mails and call logic under new_state in views * fix: calc end_date as 7 * call_duration * feat(mailtrigger): added new mailtrigger for wg-lc and rfc stream states * test: add mailtrigger test fixtures and new tests * fix: use two action-oriented mailtrigger names The two actions have the same recipients to start with, but that may change over time. Mailtrigger names should describe "what happened to trigger this email?". Changed the utility names to match the actions. * fix: send from whomever issued the call Using a list name as the From will not work - the mail infrastructure blocks such mail when it is submitted. * chore: revert ietf/doc/tests_draft.py * fix: trigger call for adoption email from manage adoption view * fix: changed template names to match functions * fix: match the subject requested in the issue * fix: Initial tests * fix: pass duration to the email message generator * fix: only issue the c-adopt and wg-lc email for ietf-stream docs * chore: remove stray whitespace --------- Co-authored-by: Robert Sparks --- ietf/doc/mails.py | 55 +++++ ietf/doc/tests_draft.py | 205 +++++++++++++++++- ietf/doc/views_draft.py | 17 +- ..._call_for_adoption_and_last_call_issued.py | 43 ++++ ietf/name/fixtures/names.json | 28 +++ .../doc/mail/wg_call_for_adoption_issued.txt | 21 ++ .../doc/mail/wg_last_call_issued.txt | 22 ++ 7 files changed, 385 insertions(+), 6 deletions(-) create mode 100644 ietf/mailtrigger/migrations/0006_call_for_adoption_and_last_call_issued.py create mode 100644 ietf/templates/doc/mail/wg_call_for_adoption_issued.txt create mode 100644 ietf/templates/doc/mail/wg_last_call_issued.txt diff --git a/ietf/doc/mails.py b/ietf/doc/mails.py index ddecbb6b54..f20d398c3c 100644 --- a/ietf/doc/mails.py +++ b/ietf/doc/mails.py @@ -103,6 +103,61 @@ def email_stream_changed(request, doc, old_stream, new_stream, text=""): dict(text=text, url=settings.IDTRACKER_BASE_URL + doc.get_absolute_url()), cc=cc) + +def email_wg_call_for_adoption_issued(request, doc, cfa_duration_weeks=None): + if cfa_duration_weeks is None: + cfa_duration_weeks=2 + (to, cc) = gather_address_lists("doc_wg_call_for_adoption_issued", doc=doc) + frm = request.user.person.formatted_email() + + end_date = date_today(DEADLINE_TZINFO) + datetime.timedelta(days=7 * cfa_duration_weeks) + + subject = f"Call for adoption: {doc.name}-{doc.rev} (Ends {end_date})" + + send_mail( + request, + to, + frm, + subject, + "doc/mail/wg_call_for_adoption_issued.txt", + dict( + doc=doc, + subject=subject, + url=settings.IDTRACKER_BASE_URL + doc.get_absolute_url(), + end_date=end_date, + cfa_duration_weeks=cfa_duration_weeks, + wg_list=doc.group.list_email, + ), + cc=cc, + ) + + +def email_wg_last_call_issued(request, doc, wglc_duration_weeks=None): + if wglc_duration_weeks is None: + wglc_duration_weeks = 2 + (to, cc) = gather_address_lists("doc_wg_last_call_issued", doc=doc) + frm = request.user.person.formatted_email() + + + end_date = date_today(DEADLINE_TZINFO) + datetime.timedelta(days=7 * wglc_duration_weeks) + subject = f"WG Last Call: {doc.name}-{doc.rev} (Ends {end_date})" + + send_mail( + request, + to, + frm, + subject, + "doc/mail/wg_last_call_issued.txt", + dict( + doc=doc, + subject=subject, + url=settings.IDTRACKER_BASE_URL + doc.get_absolute_url(), + end_date=end_date, + wglc_duration_weeks=wglc_duration_weeks, + wg_list=doc.group.list_email, + ), + cc=cc, + ) def email_pulled_from_rfc_queue(request, doc, comment, prev_state, next_state): extra=extra_automation_headers(doc) diff --git a/ietf/doc/tests_draft.py b/ietf/doc/tests_draft.py index 576feb0582..ab7eaba768 100644 --- a/ietf/doc/tests_draft.py +++ b/ietf/doc/tests_draft.py @@ -1707,11 +1707,12 @@ def test_adopt_document(self): self.assertEqual(draft.group, chair_role.group) self.assertEqual(draft.stream_id, stream_state_type_slug[type_id][13:]) # trim off "draft-stream-" self.assertEqual(draft.docevent_set.count() - events_before, 5) - self.assertEqual(len(outbox), 1) - self.assertTrue("Call For Adoption" in outbox[-1]["Subject"]) - self.assertTrue(f"{chair_role.group.acronym}-chairs@" in outbox[-1]['To']) - self.assertTrue(f"{draft.name}@" in outbox[-1]['To']) - self.assertTrue(f"{chair_role.group.acronym}@" in outbox[-1]['To']) + self.assertEqual(len(outbox), 2) + self.assertTrue("Call For Adoption" in outbox[0]["Subject"]) + self.assertTrue(f"{chair_role.group.acronym}-chairs@" in outbox[0]['To']) + self.assertTrue(f"{draft.name}@" in outbox[0]['To']) + self.assertTrue(f"{chair_role.group.acronym}@" in outbox[0]['To']) + # contents of outbox[1] are tested elsewhere # adopt empty_outbox() @@ -2001,6 +2002,200 @@ def test_set_state(self): self.assertTrue("mars-chairs@ietf.org" in outbox[0].as_string()) self.assertTrue("marsdelegate@ietf.org" in outbox[0].as_string()) + def test_wg_call_for_adoption_issued(self): + role = RoleFactory( + name_id="chair", + group__acronym="mars", + group__list_email="mars-wg@ietf.org", + person__user__username="marschairman", + person__name="WG Cháir Man", + ) + # First test the usual workflow through the manage adoption view + draft = IndividualDraftFactory() + url = urlreverse( + "ietf.doc.views_draft.adopt_draft", kwargs=dict(name=draft.name) + ) + login_testing_unauthorized(self, "marschairman", url) + empty_outbox() + call_issued = State.objects.get(type="draft-stream-ietf", slug="c-adopt") + r = self.client.post( + url, + dict( + comment="some comment", + group=role.group.pk, + newstate=call_issued.pk, + weeks="10", + ), + ) + self.assertEqual(r.status_code, 302) + self.assertEqual(len(outbox), 2) + self.assertIn("mars-wg@ietf.org", outbox[1]["To"]) + self.assertIn("Call for adoption", outbox[1]["Subject"]) + body = get_payload_text(outbox[1]) + self.assertIn("disclosure obligations", body) + self.assertIn("starts a 10-week", body) + # Test not entering a duration on the form + draft = IndividualDraftFactory() + url = urlreverse( + "ietf.doc.views_draft.adopt_draft", kwargs=dict(name=draft.name) + ) + empty_outbox() + call_issued = State.objects.get(type="draft-stream-ietf", slug="c-adopt") + r = self.client.post( + url, + dict( + comment="some comment", + group=role.group.pk, + newstate=call_issued.pk, + ), + ) + self.assertEqual(r.status_code, 302) + self.assertEqual(len(outbox), 2) + self.assertIn("mars-wg@ietf.org", outbox[1]["To"]) + self.assertIn("Call for adoption", outbox[1]["Subject"]) + body = get_payload_text(outbox[1]) + self.assertIn("disclosure obligations", body) + self.assertIn("starts a 2-week", body) + + # Test the less usual workflow of issuing a call for adoption + # of a document that's already in the ietf stream + draft = WgDraftFactory(group=role.group) + url = urlreverse( + "ietf.doc.views_draft.change_stream_state", + kwargs=dict(name=draft.name, state_type="draft-stream-ietf"), + ) + old_state = draft.get_state("draft-stream-%s" % draft.stream_id) + new_state = State.objects.get( + used=True, type="draft-stream-%s" % draft.stream_id, slug="c-adopt" + ) + self.assertNotEqual(old_state, new_state) + empty_outbox() + r = self.client.post( + url, + dict( + new_state=new_state.pk, + comment="some comment", + weeks="10", + tags=[ + t.pk + for t in draft.tags.filter( + slug__in=get_tags_for_stream_id(draft.stream_id) + ) + ], + ), + ) + self.assertEqual(r.status_code, 302) + self.assertEqual(len(outbox), 2) + self.assertIn("mars-wg@ietf.org", outbox[1]["To"]) + self.assertIn("Call for adoption", outbox[1]["Subject"]) + body = get_payload_text(outbox[1]) + self.assertIn("disclosure obligations", body) + self.assertIn("starts a 10-week", body) + draft = WgDraftFactory(group=role.group) + url = urlreverse( + "ietf.doc.views_draft.change_stream_state", + kwargs=dict(name=draft.name, state_type="draft-stream-ietf"), + ) + old_state = draft.get_state("draft-stream-%s" % draft.stream_id) + new_state = State.objects.get( + used=True, type="draft-stream-%s" % draft.stream_id, slug="c-adopt" + ) + self.assertNotEqual(old_state, new_state) + empty_outbox() + r = self.client.post( + url, + dict( + new_state=new_state.pk, + comment="some comment", + tags=[ + t.pk + for t in draft.tags.filter( + slug__in=get_tags_for_stream_id(draft.stream_id) + ) + ], + ), + ) + self.assertEqual(r.status_code, 302) + self.assertEqual(len(outbox), 2) + self.assertIn("mars-wg@ietf.org", outbox[1]["To"]) + self.assertIn("Call for adoption", outbox[1]["Subject"]) + body = get_payload_text(outbox[1]) + self.assertIn("disclosure obligations", body) + self.assertIn("starts a 2-week", body) + + def test_wg_last_call_issued(self): + role = RoleFactory( + name_id="chair", + group__acronym="mars", + group__list_email="mars-wg@ietf.org", + person__user__username="marschairman", + person__name="WG Cháir Man", + ) + draft = WgDraftFactory(group=role.group) + url = urlreverse( + "ietf.doc.views_draft.change_stream_state", + kwargs=dict(name=draft.name, state_type="draft-stream-ietf"), + ) + login_testing_unauthorized(self, "marschairman", url) + old_state = draft.get_state("draft-stream-%s" % draft.stream_id) + new_state = State.objects.get( + used=True, type="draft-stream-%s" % draft.stream_id, slug="wg-lc" + ) + self.assertNotEqual(old_state, new_state) + empty_outbox() + r = self.client.post( + url, + dict( + new_state=new_state.pk, + comment="some comment", + weeks="10", + tags=[ + t.pk + for t in draft.tags.filter( + slug__in=get_tags_for_stream_id(draft.stream_id) + ) + ], + ), + ) + self.assertEqual(r.status_code, 302) + self.assertEqual(len(outbox), 2) + self.assertIn("mars-wg@ietf.org", outbox[1]["To"]) + self.assertIn("WG Last Call", outbox[1]["Subject"]) + body = get_payload_text(outbox[1]) + self.assertIn("disclosure obligations", body) + self.assertIn("starts a 10-week", body) + draft = WgDraftFactory(group=role.group) + url = urlreverse( + "ietf.doc.views_draft.change_stream_state", + kwargs=dict(name=draft.name, state_type="draft-stream-ietf"), + ) + old_state = draft.get_state("draft-stream-%s" % draft.stream_id) + new_state = State.objects.get( + used=True, type="draft-stream-%s" % draft.stream_id, slug="wg-lc" + ) + self.assertNotEqual(old_state, new_state) + empty_outbox() + r = self.client.post( + url, + dict( + new_state=new_state.pk, + comment="some comment", + tags=[ + t.pk + for t in draft.tags.filter( + slug__in=get_tags_for_stream_id(draft.stream_id) + ) + ], + ), + ) + self.assertEqual(r.status_code, 302) + self.assertEqual(len(outbox), 2) + self.assertIn("mars-wg@ietf.org", outbox[1]["To"]) + self.assertIn("WG Last Call", outbox[1]["Subject"]) + body = get_payload_text(outbox[1]) + self.assertIn("disclosure obligations", body) + self.assertIn("starts a 2-week", body) + def test_pubreq_validation(self): role = RoleFactory(name_id='chair',group__acronym='mars',group__list_email='mars-wg@ietf.org',person__user__username='marschairman',person__name='WG Cháir Man') RoleFactory(name_id='delegate',group=role.group,person__user__email='marsdelegate@ietf.org') diff --git a/ietf/doc/views_draft.py b/ietf/doc/views_draft.py index c80537afb3..16d04ee66a 100644 --- a/ietf/doc/views_draft.py +++ b/ietf/doc/views_draft.py @@ -28,6 +28,7 @@ IanaExpertDocEvent, IESG_SUBSTATE_TAGS) from ietf.doc.mails import ( email_pulled_from_rfc_queue, email_resurrect_requested, email_resurrection_completed, email_state_changed, email_stream_changed, + email_wg_call_for_adoption_issued, email_wg_last_call_issued, email_stream_state_changed, email_stream_tags_changed, extra_automation_headers, generate_publication_request, email_adopted, email_intended_status_changed, email_iesg_processing_document, email_ad_approved_doc, @@ -1568,8 +1569,15 @@ def adopt_draft(request, name): update_reminder(doc, "stream-s", e, due_date) + # The following call name is very misleading - the view allows + # setting states that are _not_ the adopted state. email_adopted(request, doc, prev_state, new_state, by, comment) + # Currently only the IETF stream uses the c-adopt state - guard against other + # streams starting to use it asthe IPR rules for those streams will be different. + if doc.stream_id == "ietf" and new_state.slug == "c-adopt": + email_wg_call_for_adoption_issued(request, doc, cfa_duration_weeks=form.cleaned_data["weeks"]) + # comment if comment: e = DocEvent(type="added_comment", doc=doc, rev=doc.rev, by=by) @@ -1754,13 +1762,20 @@ def change_stream_state(request, name, state_type): events.append(e) due_date = None - if form.cleaned_data["weeks"] != None: + if form.cleaned_data["weeks"] is not None: due_date = datetime_today(DEADLINE_TZINFO) + datetime.timedelta(weeks=form.cleaned_data["weeks"]) update_reminder(doc, "stream-s", e, due_date) email_stream_state_changed(request, doc, prev_state, new_state, by, comment) + if doc.stream_id == "ietf": + if new_state.slug == "c-adopt": + email_wg_call_for_adoption_issued(request, doc, cfa_duration_weeks=form.cleaned_data["weeks"]) + + if new_state.slug == "wg-lc": + email_wg_last_call_issued(request, doc, wglc_duration_weeks=form.cleaned_data["weeks"]) + # tags existing_tags = set(doc.tags.all()) new_tags = set(form.cleaned_data["tags"]) diff --git a/ietf/mailtrigger/migrations/0006_call_for_adoption_and_last_call_issued.py b/ietf/mailtrigger/migrations/0006_call_for_adoption_and_last_call_issued.py new file mode 100644 index 0000000000..7adad150eb --- /dev/null +++ b/ietf/mailtrigger/migrations/0006_call_for_adoption_and_last_call_issued.py @@ -0,0 +1,43 @@ +# Copyright The IETF Trust 2023, All Rights Reserved + +from django.db import migrations + + +def forward(apps, schema_editor): + MailTrigger = apps.get_model("mailtrigger", "MailTrigger") + Recipient = apps.get_model("mailtrigger", "Recipient") + recipients = list( + Recipient.objects.filter( + slug__in=( + "doc_group_mail_list", + "doc_authors", + "doc_group_chairs", + "doc_shepherd", + ) + ) + ) + call_for_adoption = MailTrigger.objects.create( + slug="doc_wg_call_for_adoption_issued", + desc="Recipients when a working group call for adoption is issued", + ) + call_for_adoption.to.add(*recipients) + wg_last_call = MailTrigger.objects.create( + slug="doc_wg_last_call_issued", + desc="Recipients when a working group last call is issued", + ) + wg_last_call.to.add(*recipients) + + +def reverse(apps, schema_editor): + MailTrigger = apps.get_model("mailtrigger", "MailTrigger") + MailTrigger.objects.filter( + slug_in=("doc_wg_call_for_adoption_issued", "doc_wg_last_call_issued") + ).delete() + + +class Migration(migrations.Migration): + dependencies = [ + ("mailtrigger", "0005_rfc_recipients"), + ] + + operations = [migrations.RunPython(forward, reverse)] diff --git a/ietf/name/fixtures/names.json b/ietf/name/fixtures/names.json index 0724cbb4b5..c94e15a459 100644 --- a/ietf/name/fixtures/names.json +++ b/ietf/name/fixtures/names.json @@ -5116,6 +5116,34 @@ "model": "mailtrigger.mailtrigger", "pk": "doc_telechat_details_changed" }, + { + "fields": { + "cc": [], + "desc": "Recipients when a working group call for adoption is issued", + "to": [ + "doc_authors", + "doc_group_chairs", + "doc_group_mail_list", + "doc_shepherd" + ] + }, + "model": "mailtrigger.mailtrigger", + "pk": "doc_wg_call_for_adoption_issued" + }, + { + "fields": { + "cc": [], + "desc": "Recipients when a working group last call is issued", + "to": [ + "doc_authors", + "doc_group_chairs", + "doc_group_mail_list", + "doc_shepherd" + ] + }, + "model": "mailtrigger.mailtrigger", + "pk": "doc_wg_last_call_issued" + }, { "fields": { "cc": [], diff --git a/ietf/templates/doc/mail/wg_call_for_adoption_issued.txt b/ietf/templates/doc/mail/wg_call_for_adoption_issued.txt new file mode 100644 index 0000000000..c4a2401bc2 --- /dev/null +++ b/ietf/templates/doc/mail/wg_call_for_adoption_issued.txt @@ -0,0 +1,21 @@ +{% load ietf_filters %}{% load mail_filters %}{% autoescape off %}{% filter wordwrap:78 %} +Subject: {{ subject }} + +This message starts a {{ cfa_duration_weeks }}-week Call for Adoption for this document. + +Abstract: +{{ doc.abstract }} + +File can be retrieved from: +{{ url }} + +Please reply to this message keeping {{ wg_list }} in copy by indicating whether you support or not the adoption of this draft as a WG document. Comments to motivate your preference are highly appreciated. + +Authors, and WG participants in general, are reminded of the Intellectual Property Rights (IPR) disclosure obligations described in BCP 79 [2]. Appropriate IPR disclosures required for full conformance with the provisions of BCP 78 [1] and BCP 79 [2] must be filed, if you are aware of any. Sanctions available for application to violators of IETF IPR Policy can be found at [3]. + +Thank you. +[1] https://datatracker.ietf.org/doc/bcp78/ +[2] https://datatracker.ietf.org/doc/bcp79/ +[3] https://datatracker.ietf.org/doc/rfc6701/ +{% endfilter %} +{% endautoescape %} diff --git a/ietf/templates/doc/mail/wg_last_call_issued.txt b/ietf/templates/doc/mail/wg_last_call_issued.txt new file mode 100644 index 0000000000..35b1e149d7 --- /dev/null +++ b/ietf/templates/doc/mail/wg_last_call_issued.txt @@ -0,0 +1,22 @@ +{% load ietf_filters %}{% load mail_filters %}{% autoescape off %}{% filter wordwrap:78 %} +Subject: {{ subject }} + +This message starts a {{ wglc_duration_weeks }}-week WG Last Call for this document. + +Abstract: +{{ doc.abstract }} + +File can be retrieved from: +{{ url }} + +Please review and indicate your support or objection to proceed with the publication of this document by replying to this email keeping {{ wg_list }} in copy. Objections should be motivated and suggestions to resolve them are highly appreciated. + +Authors, and WG participants in general, are reminded again of the Intellectual Property Rights (IPR) disclosure obligations described in BCP 79 [1]. Appropriate IPR disclosures required for full conformance with the provisions of BCP 78 [1] and BCP 79 [2] must be filed, if you are aware of any. Sanctions available for application to violators of IETF IPR Policy can be found at [3]. + +Thank you. + +[1] https://datatracker.ietf.org/doc/bcp78/ +[2] https://datatracker.ietf.org/doc/bcp79/ +[3] https://datatracker.ietf.org/doc/rfc6701/ +{% endfilter %} +{% endautoescape %} From 1e451fbac105b18a3539c406b8380890ee701ef8 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Tue, 19 Aug 2025 18:12:38 -0300 Subject: [PATCH 182/405] ci: replace DOCKER_BUILD_NO_SUMMARY (#9383) Option is deprecated; replace with DOCKER_BUILD_SUMMARY: false --- .github/workflows/build-base-app.yml | 2 +- .github/workflows/build-devblobstore.yml | 2 +- .github/workflows/build-mq-broker.yml | 2 +- .github/workflows/dev-assets-sync-nightly.yml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/build-base-app.yml b/.github/workflows/build-base-app.yml index 479cd7cadf..ef8a17f6b4 100644 --- a/.github/workflows/build-base-app.yml +++ b/.github/workflows/build-base-app.yml @@ -43,7 +43,7 @@ jobs: - name: Docker Build & Push uses: docker/build-push-action@v6 env: - DOCKER_BUILD_NO_SUMMARY: true + DOCKER_BUILD_SUMMARY: false with: context: . file: docker/base.Dockerfile diff --git a/.github/workflows/build-devblobstore.yml b/.github/workflows/build-devblobstore.yml index 2d8214b448..f49a11af19 100644 --- a/.github/workflows/build-devblobstore.yml +++ b/.github/workflows/build-devblobstore.yml @@ -35,7 +35,7 @@ jobs: - name: Docker Build & Push uses: docker/build-push-action@v6 env: - DOCKER_BUILD_NO_SUMMARY: true + DOCKER_BUILD_SUMMARY: false with: context: . file: docker/devblobstore.Dockerfile diff --git a/.github/workflows/build-mq-broker.yml b/.github/workflows/build-mq-broker.yml index 85c27c23cc..4de861dbcd 100644 --- a/.github/workflows/build-mq-broker.yml +++ b/.github/workflows/build-mq-broker.yml @@ -42,7 +42,7 @@ jobs: - name: Docker Build & Push uses: docker/build-push-action@v6 env: - DOCKER_BUILD_NO_SUMMARY: true + DOCKER_BUILD_SUMMARY: false with: context: . file: dev/mq/Dockerfile diff --git a/.github/workflows/dev-assets-sync-nightly.yml b/.github/workflows/dev-assets-sync-nightly.yml index a7fe67f012..19933bddfd 100644 --- a/.github/workflows/dev-assets-sync-nightly.yml +++ b/.github/workflows/dev-assets-sync-nightly.yml @@ -41,7 +41,7 @@ jobs: - name: Docker Build & Push uses: docker/build-push-action@v6 env: - DOCKER_BUILD_NO_SUMMARY: true + DOCKER_BUILD_SUMMARY: false with: context: . file: dev/shared-assets-sync/Dockerfile From b7da3d7a779f310f765f205119624229d9a860d7 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Thu, 21 Aug 2025 10:25:49 -0300 Subject: [PATCH 183/405] fix: escape linkify filter input (#9389) * fix: escape linkify filter input * test: exercise linkify * chore: lint --- ietf/utils/templatetags/tests.py | 66 ++++++++++++++++++++++++++ ietf/utils/templatetags/textfilters.py | 10 ++-- ietf/utils/text.py | 6 +++ 3 files changed, 79 insertions(+), 3 deletions(-) diff --git a/ietf/utils/templatetags/tests.py b/ietf/utils/templatetags/tests.py index a93bf2d94d..859319be3d 100644 --- a/ietf/utils/templatetags/tests.py +++ b/ietf/utils/templatetags/tests.py @@ -3,6 +3,7 @@ from django.template import Context, Origin, Template from django.test import override_settings +from ietf.utils.templatetags.textfilters import linkify from ietf.utils.test_utils import TestCase import debug # pyflakes: ignore @@ -39,3 +40,68 @@ def test_origin_outside_base_dir(self): output = template.render(Context()) self.assertNotIn(component, output, 'Full path components should not be revealed in html') + + +class TextfiltersTests(TestCase): + def test_linkify(self): + # Cases with autoescape = True (the default) + self.assertEqual( + linkify("plain string"), + "plain string", + ) + self.assertEqual( + linkify("https://www.ietf.org"), + 'https://www.ietf.org', + ) + self.assertEqual( + linkify('IETF'), + ( + '<a href="https://www.ietf.org">IETF</a>' + ), + ) + self.assertEqual( + linkify("somebody@example.com"), + 'somebody@example.com', + ) + self.assertEqual( + linkify("Some Body "), + ( + 'Some Body <' + 'somebody@example.com>' + ), + ) + self.assertEqual( + linkify(""), + "<script>alert('h4x0r3d');</script>", + ) + + # Cases with autoescape = False (these are dangerous and assume the caller + # has sanitized already) + self.assertEqual( + linkify("plain string", autoescape=False), + "plain string", + ) + self.assertEqual( + linkify("https://www.ietf.org", autoescape=False), + 'https://www.ietf.org', + ) + self.assertEqual( + linkify('IETF', autoescape=False), + 'IETF', + ) + self.assertEqual( + linkify("somebody@example.com", autoescape=False), + 'somebody@example.com', + ) + # bleach.Linkifier translates the < -> < and > -> > on this one + self.assertEqual( + linkify("Some Body ", autoescape=False), + ( + 'Some Body <' + 'somebody@example.com>' + ), + ) + self.assertEqual( + linkify("", autoescape=False), + "", + ) diff --git a/ietf/utils/templatetags/textfilters.py b/ietf/utils/templatetags/textfilters.py index 3b240740e0..e3bfbe0c56 100644 --- a/ietf/utils/templatetags/textfilters.py +++ b/ietf/utils/templatetags/textfilters.py @@ -7,6 +7,7 @@ from django import template from django.conf import settings from django.template.defaultfilters import stringfilter +from django.utils.html import conditional_escape from django.utils.safestring import mark_safe import debug # pyflakes:ignore @@ -71,10 +72,13 @@ def texescape_filter(value): "A TeX escape filter" return texescape(value) -@register.filter +@register.filter(needs_autoescape=True) @stringfilter -def linkify(value): - text = mark_safe(_linkify(value)) +def linkify(value, autoescape=True): + if autoescape: + # Escape unless the input was already a SafeString + value = conditional_escape(value) + text = mark_safe(_linkify(value)) # _linkify is a safe operation return text @register.filter diff --git a/ietf/utils/text.py b/ietf/utils/text.py index 4e5d5b6cd5..590ec3fd30 100644 --- a/ietf/utils/text.py +++ b/ietf/utils/text.py @@ -60,6 +60,12 @@ def check_url_validity(attrs, new=False): def linkify(text): + """Convert URL-ish substrings into HTML links + + This does no sanitization whatsoever. Caller must sanitize the input or output as + contextually appropriate. Do not call `mark_safe()` on the output if the input is + user-provided unless it has been sanitized or escaped. + """ return _bleach_linker.linkify(text) From 450ffd8e9e586fbfbc9c583a01ee4770de4f62d9 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Thu, 21 Aug 2025 10:26:38 -0300 Subject: [PATCH 184/405] chore(deps): bump pytz (#9385) * chore(deps): bump pytz * chore(deps): fix copy/paste error --- ...meeting_country_alter_meeting_time_zone.py | 693 ++++++++++++++++++ requirements.txt | 4 +- 2 files changed, 695 insertions(+), 2 deletions(-) create mode 100644 ietf/meeting/migrations/0016_alter_meeting_country_alter_meeting_time_zone.py diff --git a/ietf/meeting/migrations/0016_alter_meeting_country_alter_meeting_time_zone.py b/ietf/meeting/migrations/0016_alter_meeting_country_alter_meeting_time_zone.py new file mode 100644 index 0000000000..8f5db26112 --- /dev/null +++ b/ietf/meeting/migrations/0016_alter_meeting_country_alter_meeting_time_zone.py @@ -0,0 +1,693 @@ +# Copyright The IETF Trust 2025, All Rights Reserved + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("meeting", "0015_alter_meeting_time_zone"), + ] + + operations = [ + migrations.AlterField( + model_name="meeting", + name="country", + field=models.CharField( + blank=True, + choices=[ + ("", "---------"), + ("AF", "Afghanistan"), + ("AL", "Albania"), + ("DZ", "Algeria"), + ("AD", "Andorra"), + ("AO", "Angola"), + ("AI", "Anguilla"), + ("AQ", "Antarctica"), + ("AG", "Antigua & Barbuda"), + ("AR", "Argentina"), + ("AM", "Armenia"), + ("AW", "Aruba"), + ("AU", "Australia"), + ("AT", "Austria"), + ("AZ", "Azerbaijan"), + ("BS", "Bahamas"), + ("BH", "Bahrain"), + ("BD", "Bangladesh"), + ("BB", "Barbados"), + ("BY", "Belarus"), + ("BE", "Belgium"), + ("BZ", "Belize"), + ("BJ", "Benin"), + ("BM", "Bermuda"), + ("BT", "Bhutan"), + ("BO", "Bolivia"), + ("BA", "Bosnia & Herzegovina"), + ("BW", "Botswana"), + ("BV", "Bouvet Island"), + ("BR", "Brazil"), + ("GB", "Britain (UK)"), + ("IO", "British Indian Ocean Territory"), + ("BN", "Brunei"), + ("BG", "Bulgaria"), + ("BF", "Burkina Faso"), + ("BI", "Burundi"), + ("KH", "Cambodia"), + ("CM", "Cameroon"), + ("CA", "Canada"), + ("CV", "Cape Verde"), + ("BQ", "Caribbean NL"), + ("KY", "Cayman Islands"), + ("CF", "Central African Rep."), + ("TD", "Chad"), + ("CL", "Chile"), + ("CN", "China"), + ("CX", "Christmas Island"), + ("CC", "Cocos (Keeling) Islands"), + ("CO", "Colombia"), + ("KM", "Comoros"), + ("CD", "Congo (Dem. Rep.)"), + ("CG", "Congo (Rep.)"), + ("CK", "Cook Islands"), + ("CR", "Costa Rica"), + ("HR", "Croatia"), + ("CU", "Cuba"), + ("CW", "Curaçao"), + ("CY", "Cyprus"), + ("CZ", "Czech Republic"), + ("CI", "Côte d'Ivoire"), + ("DK", "Denmark"), + ("DJ", "Djibouti"), + ("DM", "Dominica"), + ("DO", "Dominican Republic"), + ("TL", "East Timor"), + ("EC", "Ecuador"), + ("EG", "Egypt"), + ("SV", "El Salvador"), + ("GQ", "Equatorial Guinea"), + ("ER", "Eritrea"), + ("EE", "Estonia"), + ("SZ", "Eswatini (Swaziland)"), + ("ET", "Ethiopia"), + ("FK", "Falkland Islands"), + ("FO", "Faroe Islands"), + ("FJ", "Fiji"), + ("FI", "Finland"), + ("FR", "France"), + ("GF", "French Guiana"), + ("PF", "French Polynesia"), + ("TF", "French S. Terr."), + ("GA", "Gabon"), + ("GM", "Gambia"), + ("GE", "Georgia"), + ("DE", "Germany"), + ("GH", "Ghana"), + ("GI", "Gibraltar"), + ("GR", "Greece"), + ("GL", "Greenland"), + ("GD", "Grenada"), + ("GP", "Guadeloupe"), + ("GU", "Guam"), + ("GT", "Guatemala"), + ("GG", "Guernsey"), + ("GN", "Guinea"), + ("GW", "Guinea-Bissau"), + ("GY", "Guyana"), + ("HT", "Haiti"), + ("HM", "Heard Island & McDonald Islands"), + ("HN", "Honduras"), + ("HK", "Hong Kong"), + ("HU", "Hungary"), + ("IS", "Iceland"), + ("IN", "India"), + ("ID", "Indonesia"), + ("IR", "Iran"), + ("IQ", "Iraq"), + ("IE", "Ireland"), + ("IM", "Isle of Man"), + ("IL", "Israel"), + ("IT", "Italy"), + ("JM", "Jamaica"), + ("JP", "Japan"), + ("JE", "Jersey"), + ("JO", "Jordan"), + ("KZ", "Kazakhstan"), + ("KE", "Kenya"), + ("KI", "Kiribati"), + ("KP", "Korea (North)"), + ("KR", "Korea (South)"), + ("KW", "Kuwait"), + ("KG", "Kyrgyzstan"), + ("LA", "Laos"), + ("LV", "Latvia"), + ("LB", "Lebanon"), + ("LS", "Lesotho"), + ("LR", "Liberia"), + ("LY", "Libya"), + ("LI", "Liechtenstein"), + ("LT", "Lithuania"), + ("LU", "Luxembourg"), + ("MO", "Macau"), + ("MG", "Madagascar"), + ("MW", "Malawi"), + ("MY", "Malaysia"), + ("MV", "Maldives"), + ("ML", "Mali"), + ("MT", "Malta"), + ("MH", "Marshall Islands"), + ("MQ", "Martinique"), + ("MR", "Mauritania"), + ("MU", "Mauritius"), + ("YT", "Mayotte"), + ("MX", "Mexico"), + ("FM", "Micronesia"), + ("MD", "Moldova"), + ("MC", "Monaco"), + ("MN", "Mongolia"), + ("ME", "Montenegro"), + ("MS", "Montserrat"), + ("MA", "Morocco"), + ("MZ", "Mozambique"), + ("MM", "Myanmar (Burma)"), + ("NA", "Namibia"), + ("NR", "Nauru"), + ("NP", "Nepal"), + ("NL", "Netherlands"), + ("NC", "New Caledonia"), + ("NZ", "New Zealand"), + ("NI", "Nicaragua"), + ("NE", "Niger"), + ("NG", "Nigeria"), + ("NU", "Niue"), + ("NF", "Norfolk Island"), + ("MK", "North Macedonia"), + ("MP", "Northern Mariana Islands"), + ("NO", "Norway"), + ("OM", "Oman"), + ("PK", "Pakistan"), + ("PW", "Palau"), + ("PS", "Palestine"), + ("PA", "Panama"), + ("PG", "Papua New Guinea"), + ("PY", "Paraguay"), + ("PE", "Peru"), + ("PH", "Philippines"), + ("PN", "Pitcairn"), + ("PL", "Poland"), + ("PT", "Portugal"), + ("PR", "Puerto Rico"), + ("QA", "Qatar"), + ("RO", "Romania"), + ("RU", "Russia"), + ("RW", "Rwanda"), + ("RE", "Réunion"), + ("AS", "Samoa (American)"), + ("WS", "Samoa (western)"), + ("SM", "San Marino"), + ("ST", "Sao Tome & Principe"), + ("SA", "Saudi Arabia"), + ("SN", "Senegal"), + ("RS", "Serbia"), + ("SC", "Seychelles"), + ("SL", "Sierra Leone"), + ("SG", "Singapore"), + ("SK", "Slovakia"), + ("SI", "Slovenia"), + ("SB", "Solomon Islands"), + ("SO", "Somalia"), + ("ZA", "South Africa"), + ("GS", "South Georgia & the South Sandwich Islands"), + ("SS", "South Sudan"), + ("ES", "Spain"), + ("LK", "Sri Lanka"), + ("BL", "St Barthelemy"), + ("SH", "St Helena"), + ("KN", "St Kitts & Nevis"), + ("LC", "St Lucia"), + ("SX", "St Maarten (Dutch)"), + ("MF", "St Martin (French)"), + ("PM", "St Pierre & Miquelon"), + ("VC", "St Vincent"), + ("SD", "Sudan"), + ("SR", "Suriname"), + ("SJ", "Svalbard & Jan Mayen"), + ("SE", "Sweden"), + ("CH", "Switzerland"), + ("SY", "Syria"), + ("TW", "Taiwan"), + ("TJ", "Tajikistan"), + ("TZ", "Tanzania"), + ("TH", "Thailand"), + ("TG", "Togo"), + ("TK", "Tokelau"), + ("TO", "Tonga"), + ("TT", "Trinidad & Tobago"), + ("TN", "Tunisia"), + ("TR", "Turkey"), + ("TM", "Turkmenistan"), + ("TC", "Turks & Caicos Is"), + ("TV", "Tuvalu"), + ("UM", "US minor outlying islands"), + ("UG", "Uganda"), + ("UA", "Ukraine"), + ("AE", "United Arab Emirates"), + ("US", "United States"), + ("UY", "Uruguay"), + ("UZ", "Uzbekistan"), + ("VU", "Vanuatu"), + ("VA", "Vatican City"), + ("VE", "Venezuela"), + ("VN", "Vietnam"), + ("VG", "Virgin Islands (UK)"), + ("VI", "Virgin Islands (US)"), + ("WF", "Wallis & Futuna"), + ("EH", "Western Sahara"), + ("YE", "Yemen"), + ("ZM", "Zambia"), + ("ZW", "Zimbabwe"), + ("AX", "Åland Islands"), + ], + max_length=2, + ), + ), + migrations.AlterField( + model_name="meeting", + name="time_zone", + field=models.CharField( + choices=[ + ("", "---------"), + ("Africa/Abidjan", "Africa/Abidjan"), + ("Africa/Accra", "Africa/Accra"), + ("Africa/Addis_Ababa", "Africa/Addis_Ababa"), + ("Africa/Algiers", "Africa/Algiers"), + ("Africa/Asmara", "Africa/Asmara"), + ("Africa/Bamako", "Africa/Bamako"), + ("Africa/Bangui", "Africa/Bangui"), + ("Africa/Banjul", "Africa/Banjul"), + ("Africa/Bissau", "Africa/Bissau"), + ("Africa/Blantyre", "Africa/Blantyre"), + ("Africa/Brazzaville", "Africa/Brazzaville"), + ("Africa/Bujumbura", "Africa/Bujumbura"), + ("Africa/Cairo", "Africa/Cairo"), + ("Africa/Casablanca", "Africa/Casablanca"), + ("Africa/Ceuta", "Africa/Ceuta"), + ("Africa/Conakry", "Africa/Conakry"), + ("Africa/Dakar", "Africa/Dakar"), + ("Africa/Dar_es_Salaam", "Africa/Dar_es_Salaam"), + ("Africa/Djibouti", "Africa/Djibouti"), + ("Africa/Douala", "Africa/Douala"), + ("Africa/El_Aaiun", "Africa/El_Aaiun"), + ("Africa/Freetown", "Africa/Freetown"), + ("Africa/Gaborone", "Africa/Gaborone"), + ("Africa/Harare", "Africa/Harare"), + ("Africa/Johannesburg", "Africa/Johannesburg"), + ("Africa/Juba", "Africa/Juba"), + ("Africa/Kampala", "Africa/Kampala"), + ("Africa/Khartoum", "Africa/Khartoum"), + ("Africa/Kigali", "Africa/Kigali"), + ("Africa/Kinshasa", "Africa/Kinshasa"), + ("Africa/Lagos", "Africa/Lagos"), + ("Africa/Libreville", "Africa/Libreville"), + ("Africa/Lome", "Africa/Lome"), + ("Africa/Luanda", "Africa/Luanda"), + ("Africa/Lubumbashi", "Africa/Lubumbashi"), + ("Africa/Lusaka", "Africa/Lusaka"), + ("Africa/Malabo", "Africa/Malabo"), + ("Africa/Maputo", "Africa/Maputo"), + ("Africa/Maseru", "Africa/Maseru"), + ("Africa/Mbabane", "Africa/Mbabane"), + ("Africa/Mogadishu", "Africa/Mogadishu"), + ("Africa/Monrovia", "Africa/Monrovia"), + ("Africa/Nairobi", "Africa/Nairobi"), + ("Africa/Ndjamena", "Africa/Ndjamena"), + ("Africa/Niamey", "Africa/Niamey"), + ("Africa/Nouakchott", "Africa/Nouakchott"), + ("Africa/Ouagadougou", "Africa/Ouagadougou"), + ("Africa/Porto-Novo", "Africa/Porto-Novo"), + ("Africa/Sao_Tome", "Africa/Sao_Tome"), + ("Africa/Tripoli", "Africa/Tripoli"), + ("Africa/Tunis", "Africa/Tunis"), + ("Africa/Windhoek", "Africa/Windhoek"), + ("America/Adak", "America/Adak"), + ("America/Anchorage", "America/Anchorage"), + ("America/Anguilla", "America/Anguilla"), + ("America/Antigua", "America/Antigua"), + ("America/Araguaina", "America/Araguaina"), + ( + "America/Argentina/Buenos_Aires", + "America/Argentina/Buenos_Aires", + ), + ("America/Argentina/Catamarca", "America/Argentina/Catamarca"), + ("America/Argentina/Cordoba", "America/Argentina/Cordoba"), + ("America/Argentina/Jujuy", "America/Argentina/Jujuy"), + ("America/Argentina/La_Rioja", "America/Argentina/La_Rioja"), + ("America/Argentina/Mendoza", "America/Argentina/Mendoza"), + ( + "America/Argentina/Rio_Gallegos", + "America/Argentina/Rio_Gallegos", + ), + ("America/Argentina/Salta", "America/Argentina/Salta"), + ("America/Argentina/San_Juan", "America/Argentina/San_Juan"), + ("America/Argentina/San_Luis", "America/Argentina/San_Luis"), + ("America/Argentina/Tucuman", "America/Argentina/Tucuman"), + ("America/Argentina/Ushuaia", "America/Argentina/Ushuaia"), + ("America/Aruba", "America/Aruba"), + ("America/Asuncion", "America/Asuncion"), + ("America/Atikokan", "America/Atikokan"), + ("America/Bahia", "America/Bahia"), + ("America/Bahia_Banderas", "America/Bahia_Banderas"), + ("America/Barbados", "America/Barbados"), + ("America/Belem", "America/Belem"), + ("America/Belize", "America/Belize"), + ("America/Blanc-Sablon", "America/Blanc-Sablon"), + ("America/Boa_Vista", "America/Boa_Vista"), + ("America/Bogota", "America/Bogota"), + ("America/Boise", "America/Boise"), + ("America/Cambridge_Bay", "America/Cambridge_Bay"), + ("America/Campo_Grande", "America/Campo_Grande"), + ("America/Cancun", "America/Cancun"), + ("America/Caracas", "America/Caracas"), + ("America/Cayenne", "America/Cayenne"), + ("America/Cayman", "America/Cayman"), + ("America/Chicago", "America/Chicago"), + ("America/Chihuahua", "America/Chihuahua"), + ("America/Ciudad_Juarez", "America/Ciudad_Juarez"), + ("America/Costa_Rica", "America/Costa_Rica"), + ("America/Coyhaique", "America/Coyhaique"), + ("America/Creston", "America/Creston"), + ("America/Cuiaba", "America/Cuiaba"), + ("America/Curacao", "America/Curacao"), + ("America/Danmarkshavn", "America/Danmarkshavn"), + ("America/Dawson", "America/Dawson"), + ("America/Dawson_Creek", "America/Dawson_Creek"), + ("America/Denver", "America/Denver"), + ("America/Detroit", "America/Detroit"), + ("America/Dominica", "America/Dominica"), + ("America/Edmonton", "America/Edmonton"), + ("America/Eirunepe", "America/Eirunepe"), + ("America/El_Salvador", "America/El_Salvador"), + ("America/Fort_Nelson", "America/Fort_Nelson"), + ("America/Fortaleza", "America/Fortaleza"), + ("America/Glace_Bay", "America/Glace_Bay"), + ("America/Goose_Bay", "America/Goose_Bay"), + ("America/Grand_Turk", "America/Grand_Turk"), + ("America/Grenada", "America/Grenada"), + ("America/Guadeloupe", "America/Guadeloupe"), + ("America/Guatemala", "America/Guatemala"), + ("America/Guayaquil", "America/Guayaquil"), + ("America/Guyana", "America/Guyana"), + ("America/Halifax", "America/Halifax"), + ("America/Havana", "America/Havana"), + ("America/Hermosillo", "America/Hermosillo"), + ("America/Indiana/Indianapolis", "America/Indiana/Indianapolis"), + ("America/Indiana/Knox", "America/Indiana/Knox"), + ("America/Indiana/Marengo", "America/Indiana/Marengo"), + ("America/Indiana/Petersburg", "America/Indiana/Petersburg"), + ("America/Indiana/Tell_City", "America/Indiana/Tell_City"), + ("America/Indiana/Vevay", "America/Indiana/Vevay"), + ("America/Indiana/Vincennes", "America/Indiana/Vincennes"), + ("America/Indiana/Winamac", "America/Indiana/Winamac"), + ("America/Inuvik", "America/Inuvik"), + ("America/Iqaluit", "America/Iqaluit"), + ("America/Jamaica", "America/Jamaica"), + ("America/Juneau", "America/Juneau"), + ("America/Kentucky/Louisville", "America/Kentucky/Louisville"), + ("America/Kentucky/Monticello", "America/Kentucky/Monticello"), + ("America/La_Paz", "America/La_Paz"), + ("America/Lima", "America/Lima"), + ("America/Los_Angeles", "America/Los_Angeles"), + ("America/Maceio", "America/Maceio"), + ("America/Managua", "America/Managua"), + ("America/Manaus", "America/Manaus"), + ("America/Martinique", "America/Martinique"), + ("America/Matamoros", "America/Matamoros"), + ("America/Mazatlan", "America/Mazatlan"), + ("America/Menominee", "America/Menominee"), + ("America/Merida", "America/Merida"), + ("America/Metlakatla", "America/Metlakatla"), + ("America/Mexico_City", "America/Mexico_City"), + ("America/Miquelon", "America/Miquelon"), + ("America/Moncton", "America/Moncton"), + ("America/Monterrey", "America/Monterrey"), + ("America/Montevideo", "America/Montevideo"), + ("America/Montserrat", "America/Montserrat"), + ("America/Nassau", "America/Nassau"), + ("America/New_York", "America/New_York"), + ("America/Nome", "America/Nome"), + ("America/Noronha", "America/Noronha"), + ("America/North_Dakota/Beulah", "America/North_Dakota/Beulah"), + ("America/North_Dakota/Center", "America/North_Dakota/Center"), + ( + "America/North_Dakota/New_Salem", + "America/North_Dakota/New_Salem", + ), + ("America/Nuuk", "America/Nuuk"), + ("America/Ojinaga", "America/Ojinaga"), + ("America/Panama", "America/Panama"), + ("America/Paramaribo", "America/Paramaribo"), + ("America/Phoenix", "America/Phoenix"), + ("America/Port-au-Prince", "America/Port-au-Prince"), + ("America/Port_of_Spain", "America/Port_of_Spain"), + ("America/Porto_Velho", "America/Porto_Velho"), + ("America/Puerto_Rico", "America/Puerto_Rico"), + ("America/Punta_Arenas", "America/Punta_Arenas"), + ("America/Rankin_Inlet", "America/Rankin_Inlet"), + ("America/Recife", "America/Recife"), + ("America/Regina", "America/Regina"), + ("America/Resolute", "America/Resolute"), + ("America/Rio_Branco", "America/Rio_Branco"), + ("America/Santarem", "America/Santarem"), + ("America/Santiago", "America/Santiago"), + ("America/Santo_Domingo", "America/Santo_Domingo"), + ("America/Sao_Paulo", "America/Sao_Paulo"), + ("America/Scoresbysund", "America/Scoresbysund"), + ("America/Sitka", "America/Sitka"), + ("America/St_Johns", "America/St_Johns"), + ("America/St_Kitts", "America/St_Kitts"), + ("America/St_Lucia", "America/St_Lucia"), + ("America/St_Thomas", "America/St_Thomas"), + ("America/St_Vincent", "America/St_Vincent"), + ("America/Swift_Current", "America/Swift_Current"), + ("America/Tegucigalpa", "America/Tegucigalpa"), + ("America/Thule", "America/Thule"), + ("America/Tijuana", "America/Tijuana"), + ("America/Toronto", "America/Toronto"), + ("America/Tortola", "America/Tortola"), + ("America/Vancouver", "America/Vancouver"), + ("America/Whitehorse", "America/Whitehorse"), + ("America/Winnipeg", "America/Winnipeg"), + ("America/Yakutat", "America/Yakutat"), + ("Antarctica/Casey", "Antarctica/Casey"), + ("Antarctica/Davis", "Antarctica/Davis"), + ("Antarctica/DumontDUrville", "Antarctica/DumontDUrville"), + ("Antarctica/Macquarie", "Antarctica/Macquarie"), + ("Antarctica/Mawson", "Antarctica/Mawson"), + ("Antarctica/McMurdo", "Antarctica/McMurdo"), + ("Antarctica/Palmer", "Antarctica/Palmer"), + ("Antarctica/Rothera", "Antarctica/Rothera"), + ("Antarctica/Syowa", "Antarctica/Syowa"), + ("Antarctica/Troll", "Antarctica/Troll"), + ("Antarctica/Vostok", "Antarctica/Vostok"), + ("Asia/Aden", "Asia/Aden"), + ("Asia/Almaty", "Asia/Almaty"), + ("Asia/Amman", "Asia/Amman"), + ("Asia/Anadyr", "Asia/Anadyr"), + ("Asia/Aqtau", "Asia/Aqtau"), + ("Asia/Aqtobe", "Asia/Aqtobe"), + ("Asia/Ashgabat", "Asia/Ashgabat"), + ("Asia/Atyrau", "Asia/Atyrau"), + ("Asia/Baghdad", "Asia/Baghdad"), + ("Asia/Bahrain", "Asia/Bahrain"), + ("Asia/Baku", "Asia/Baku"), + ("Asia/Bangkok", "Asia/Bangkok"), + ("Asia/Barnaul", "Asia/Barnaul"), + ("Asia/Beirut", "Asia/Beirut"), + ("Asia/Bishkek", "Asia/Bishkek"), + ("Asia/Brunei", "Asia/Brunei"), + ("Asia/Chita", "Asia/Chita"), + ("Asia/Colombo", "Asia/Colombo"), + ("Asia/Damascus", "Asia/Damascus"), + ("Asia/Dhaka", "Asia/Dhaka"), + ("Asia/Dili", "Asia/Dili"), + ("Asia/Dubai", "Asia/Dubai"), + ("Asia/Dushanbe", "Asia/Dushanbe"), + ("Asia/Famagusta", "Asia/Famagusta"), + ("Asia/Gaza", "Asia/Gaza"), + ("Asia/Hebron", "Asia/Hebron"), + ("Asia/Ho_Chi_Minh", "Asia/Ho_Chi_Minh"), + ("Asia/Hong_Kong", "Asia/Hong_Kong"), + ("Asia/Hovd", "Asia/Hovd"), + ("Asia/Irkutsk", "Asia/Irkutsk"), + ("Asia/Jakarta", "Asia/Jakarta"), + ("Asia/Jayapura", "Asia/Jayapura"), + ("Asia/Jerusalem", "Asia/Jerusalem"), + ("Asia/Kabul", "Asia/Kabul"), + ("Asia/Kamchatka", "Asia/Kamchatka"), + ("Asia/Karachi", "Asia/Karachi"), + ("Asia/Kathmandu", "Asia/Kathmandu"), + ("Asia/Khandyga", "Asia/Khandyga"), + ("Asia/Kolkata", "Asia/Kolkata"), + ("Asia/Krasnoyarsk", "Asia/Krasnoyarsk"), + ("Asia/Kuala_Lumpur", "Asia/Kuala_Lumpur"), + ("Asia/Kuching", "Asia/Kuching"), + ("Asia/Kuwait", "Asia/Kuwait"), + ("Asia/Macau", "Asia/Macau"), + ("Asia/Magadan", "Asia/Magadan"), + ("Asia/Makassar", "Asia/Makassar"), + ("Asia/Manila", "Asia/Manila"), + ("Asia/Muscat", "Asia/Muscat"), + ("Asia/Nicosia", "Asia/Nicosia"), + ("Asia/Novokuznetsk", "Asia/Novokuznetsk"), + ("Asia/Novosibirsk", "Asia/Novosibirsk"), + ("Asia/Omsk", "Asia/Omsk"), + ("Asia/Oral", "Asia/Oral"), + ("Asia/Phnom_Penh", "Asia/Phnom_Penh"), + ("Asia/Pontianak", "Asia/Pontianak"), + ("Asia/Pyongyang", "Asia/Pyongyang"), + ("Asia/Qatar", "Asia/Qatar"), + ("Asia/Qostanay", "Asia/Qostanay"), + ("Asia/Qyzylorda", "Asia/Qyzylorda"), + ("Asia/Riyadh", "Asia/Riyadh"), + ("Asia/Sakhalin", "Asia/Sakhalin"), + ("Asia/Samarkand", "Asia/Samarkand"), + ("Asia/Seoul", "Asia/Seoul"), + ("Asia/Shanghai", "Asia/Shanghai"), + ("Asia/Singapore", "Asia/Singapore"), + ("Asia/Srednekolymsk", "Asia/Srednekolymsk"), + ("Asia/Taipei", "Asia/Taipei"), + ("Asia/Tashkent", "Asia/Tashkent"), + ("Asia/Tbilisi", "Asia/Tbilisi"), + ("Asia/Tehran", "Asia/Tehran"), + ("Asia/Thimphu", "Asia/Thimphu"), + ("Asia/Tokyo", "Asia/Tokyo"), + ("Asia/Tomsk", "Asia/Tomsk"), + ("Asia/Ulaanbaatar", "Asia/Ulaanbaatar"), + ("Asia/Urumqi", "Asia/Urumqi"), + ("Asia/Ust-Nera", "Asia/Ust-Nera"), + ("Asia/Vientiane", "Asia/Vientiane"), + ("Asia/Vladivostok", "Asia/Vladivostok"), + ("Asia/Yakutsk", "Asia/Yakutsk"), + ("Asia/Yangon", "Asia/Yangon"), + ("Asia/Yekaterinburg", "Asia/Yekaterinburg"), + ("Asia/Yerevan", "Asia/Yerevan"), + ("Atlantic/Azores", "Atlantic/Azores"), + ("Atlantic/Bermuda", "Atlantic/Bermuda"), + ("Atlantic/Canary", "Atlantic/Canary"), + ("Atlantic/Cape_Verde", "Atlantic/Cape_Verde"), + ("Atlantic/Faroe", "Atlantic/Faroe"), + ("Atlantic/Madeira", "Atlantic/Madeira"), + ("Atlantic/Reykjavik", "Atlantic/Reykjavik"), + ("Atlantic/South_Georgia", "Atlantic/South_Georgia"), + ("Atlantic/St_Helena", "Atlantic/St_Helena"), + ("Atlantic/Stanley", "Atlantic/Stanley"), + ("Australia/Adelaide", "Australia/Adelaide"), + ("Australia/Brisbane", "Australia/Brisbane"), + ("Australia/Broken_Hill", "Australia/Broken_Hill"), + ("Australia/Darwin", "Australia/Darwin"), + ("Australia/Eucla", "Australia/Eucla"), + ("Australia/Hobart", "Australia/Hobart"), + ("Australia/Lindeman", "Australia/Lindeman"), + ("Australia/Lord_Howe", "Australia/Lord_Howe"), + ("Australia/Melbourne", "Australia/Melbourne"), + ("Australia/Perth", "Australia/Perth"), + ("Australia/Sydney", "Australia/Sydney"), + ("Europe/Amsterdam", "Europe/Amsterdam"), + ("Europe/Andorra", "Europe/Andorra"), + ("Europe/Astrakhan", "Europe/Astrakhan"), + ("Europe/Athens", "Europe/Athens"), + ("Europe/Belgrade", "Europe/Belgrade"), + ("Europe/Berlin", "Europe/Berlin"), + ("Europe/Brussels", "Europe/Brussels"), + ("Europe/Bucharest", "Europe/Bucharest"), + ("Europe/Budapest", "Europe/Budapest"), + ("Europe/Chisinau", "Europe/Chisinau"), + ("Europe/Copenhagen", "Europe/Copenhagen"), + ("Europe/Dublin", "Europe/Dublin"), + ("Europe/Gibraltar", "Europe/Gibraltar"), + ("Europe/Helsinki", "Europe/Helsinki"), + ("Europe/Istanbul", "Europe/Istanbul"), + ("Europe/Kaliningrad", "Europe/Kaliningrad"), + ("Europe/Kirov", "Europe/Kirov"), + ("Europe/Kyiv", "Europe/Kyiv"), + ("Europe/Lisbon", "Europe/Lisbon"), + ("Europe/London", "Europe/London"), + ("Europe/Luxembourg", "Europe/Luxembourg"), + ("Europe/Madrid", "Europe/Madrid"), + ("Europe/Malta", "Europe/Malta"), + ("Europe/Minsk", "Europe/Minsk"), + ("Europe/Monaco", "Europe/Monaco"), + ("Europe/Moscow", "Europe/Moscow"), + ("Europe/Oslo", "Europe/Oslo"), + ("Europe/Paris", "Europe/Paris"), + ("Europe/Prague", "Europe/Prague"), + ("Europe/Riga", "Europe/Riga"), + ("Europe/Rome", "Europe/Rome"), + ("Europe/Samara", "Europe/Samara"), + ("Europe/Saratov", "Europe/Saratov"), + ("Europe/Simferopol", "Europe/Simferopol"), + ("Europe/Sofia", "Europe/Sofia"), + ("Europe/Stockholm", "Europe/Stockholm"), + ("Europe/Tallinn", "Europe/Tallinn"), + ("Europe/Tirane", "Europe/Tirane"), + ("Europe/Ulyanovsk", "Europe/Ulyanovsk"), + ("Europe/Vaduz", "Europe/Vaduz"), + ("Europe/Vienna", "Europe/Vienna"), + ("Europe/Vilnius", "Europe/Vilnius"), + ("Europe/Volgograd", "Europe/Volgograd"), + ("Europe/Warsaw", "Europe/Warsaw"), + ("Europe/Zurich", "Europe/Zurich"), + ("Indian/Antananarivo", "Indian/Antananarivo"), + ("Indian/Chagos", "Indian/Chagos"), + ("Indian/Christmas", "Indian/Christmas"), + ("Indian/Cocos", "Indian/Cocos"), + ("Indian/Comoro", "Indian/Comoro"), + ("Indian/Kerguelen", "Indian/Kerguelen"), + ("Indian/Mahe", "Indian/Mahe"), + ("Indian/Maldives", "Indian/Maldives"), + ("Indian/Mauritius", "Indian/Mauritius"), + ("Indian/Mayotte", "Indian/Mayotte"), + ("Indian/Reunion", "Indian/Reunion"), + ("Pacific/Apia", "Pacific/Apia"), + ("Pacific/Auckland", "Pacific/Auckland"), + ("Pacific/Bougainville", "Pacific/Bougainville"), + ("Pacific/Chatham", "Pacific/Chatham"), + ("Pacific/Chuuk", "Pacific/Chuuk"), + ("Pacific/Easter", "Pacific/Easter"), + ("Pacific/Efate", "Pacific/Efate"), + ("Pacific/Fakaofo", "Pacific/Fakaofo"), + ("Pacific/Fiji", "Pacific/Fiji"), + ("Pacific/Funafuti", "Pacific/Funafuti"), + ("Pacific/Galapagos", "Pacific/Galapagos"), + ("Pacific/Gambier", "Pacific/Gambier"), + ("Pacific/Guadalcanal", "Pacific/Guadalcanal"), + ("Pacific/Guam", "Pacific/Guam"), + ("Pacific/Honolulu", "Pacific/Honolulu"), + ("Pacific/Kanton", "Pacific/Kanton"), + ("Pacific/Kiritimati", "Pacific/Kiritimati"), + ("Pacific/Kosrae", "Pacific/Kosrae"), + ("Pacific/Kwajalein", "Pacific/Kwajalein"), + ("Pacific/Majuro", "Pacific/Majuro"), + ("Pacific/Marquesas", "Pacific/Marquesas"), + ("Pacific/Midway", "Pacific/Midway"), + ("Pacific/Nauru", "Pacific/Nauru"), + ("Pacific/Niue", "Pacific/Niue"), + ("Pacific/Norfolk", "Pacific/Norfolk"), + ("Pacific/Noumea", "Pacific/Noumea"), + ("Pacific/Pago_Pago", "Pacific/Pago_Pago"), + ("Pacific/Palau", "Pacific/Palau"), + ("Pacific/Pitcairn", "Pacific/Pitcairn"), + ("Pacific/Pohnpei", "Pacific/Pohnpei"), + ("Pacific/Port_Moresby", "Pacific/Port_Moresby"), + ("Pacific/Rarotonga", "Pacific/Rarotonga"), + ("Pacific/Saipan", "Pacific/Saipan"), + ("Pacific/Tahiti", "Pacific/Tahiti"), + ("Pacific/Tarawa", "Pacific/Tarawa"), + ("Pacific/Tongatapu", "Pacific/Tongatapu"), + ("Pacific/Wake", "Pacific/Wake"), + ("Pacific/Wallis", "Pacific/Wallis"), + ("UTC", "UTC"), + ], + default="UTC", + max_length=255, + ), + ), + ] diff --git a/requirements.txt b/requirements.txt index 7aadc7bf7f..b3226d24b1 100644 --- a/requirements.txt +++ b/requirements.txt @@ -66,8 +66,8 @@ python-json-logger>=3.1.0 python-magic==0.4.18 # Versions beyond the yanked .19 and .20 introduce form failures pymemcache>=4.0.0 # for django.core.cache.backends.memcached.PyMemcacheCache python-mimeparse>=1.6 # from TastyPie -pytz==2022.2.1 # Pinned as changes need to be vetted for their effect on Meeting fields -types-pytz==2022.2.1 # match pytz version +pytz==2025.2 # Pinned as changes need to be vetted for their effect on Meeting fields +types-pytz==2025.2.0.20250516 # match pytz versionrequests>=2.31.0 requests>=2.31.0 types-requests>=2.27.1 requests-mock>=1.9.3 From 891049aa28c12b1afab4e16561bd31316e3b508e Mon Sep 17 00:00:00 2001 From: rjsparks <10996692+rjsparks@users.noreply.github.com> Date: Thu, 21 Aug 2025 13:39:47 +0000 Subject: [PATCH 185/405] ci: update base image target version to 20250821T1326 --- dev/build/Dockerfile | 2 +- dev/build/TARGET_BASE | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/dev/build/Dockerfile b/dev/build/Dockerfile index 756b7021b7..9019dbb626 100644 --- a/dev/build/Dockerfile +++ b/dev/build/Dockerfile @@ -1,4 +1,4 @@ -FROM ghcr.io/ietf-tools/datatracker-app-base:20250819T1508 +FROM ghcr.io/ietf-tools/datatracker-app-base:20250821T1326 LABEL maintainer="IETF Tools Team " ENV DEBIAN_FRONTEND=noninteractive diff --git a/dev/build/TARGET_BASE b/dev/build/TARGET_BASE index 2e0f1519da..c5c7fb37b9 100644 --- a/dev/build/TARGET_BASE +++ b/dev/build/TARGET_BASE @@ -1 +1 @@ -20250819T1508 +20250821T1326 From 86f2cfa29b8958633bf9dd23b624cfb4246f5693 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 21 Aug 2025 08:59:40 -0500 Subject: [PATCH 186/405] chore(deps): bump types-pytz from 2022.2.1 to 2025.2.0.20250809 (#9339) Bumps [types-pytz](https://github.com/typeshed-internal/stub_uploader) from 2022.2.1 to 2025.2.0.20250809. - [Commits](https://github.com/typeshed-internal/stub_uploader/commits) --- updated-dependencies: - dependency-name: types-pytz dependency-version: 2025.2.0.20250809 dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index b3226d24b1..60d3d8152e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -67,7 +67,7 @@ python-magic==0.4.18 # Versions beyond the yanked .19 and .20 introduce form pymemcache>=4.0.0 # for django.core.cache.backends.memcached.PyMemcacheCache python-mimeparse>=1.6 # from TastyPie pytz==2025.2 # Pinned as changes need to be vetted for their effect on Meeting fields -types-pytz==2025.2.0.20250516 # match pytz versionrequests>=2.31.0 +types-pytz==2025.2.0.20250809 # match pytz versionrequests>=2.31.0 requests>=2.31.0 types-requests>=2.27.1 requests-mock>=1.9.3 From f84f2e5b4ade1bff90feb845707f70f5a317253c Mon Sep 17 00:00:00 2001 From: rjsparks <10996692+rjsparks@users.noreply.github.com> Date: Thu, 21 Aug 2025 14:13:02 +0000 Subject: [PATCH 187/405] ci: update base image target version to 20250821T1359 --- dev/build/Dockerfile | 2 +- dev/build/TARGET_BASE | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/dev/build/Dockerfile b/dev/build/Dockerfile index 9019dbb626..d619ee99ee 100644 --- a/dev/build/Dockerfile +++ b/dev/build/Dockerfile @@ -1,4 +1,4 @@ -FROM ghcr.io/ietf-tools/datatracker-app-base:20250821T1326 +FROM ghcr.io/ietf-tools/datatracker-app-base:20250821T1359 LABEL maintainer="IETF Tools Team " ENV DEBIAN_FRONTEND=noninteractive diff --git a/dev/build/TARGET_BASE b/dev/build/TARGET_BASE index c5c7fb37b9..b6fc12e128 100644 --- a/dev/build/TARGET_BASE +++ b/dev/build/TARGET_BASE @@ -1 +1 @@ -20250821T1326 +20250821T1359 From 64884e78ad3169a0d488ae3d046c1d65acf376b4 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Thu, 21 Aug 2025 15:03:05 -0300 Subject: [PATCH 188/405] fix: actually call fromisoformat() (#9399) --- ietf/api/__init__.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ietf/api/__init__.py b/ietf/api/__init__.py index e236347975..230f8339bd 100644 --- a/ietf/api/__init__.py +++ b/ietf/api/__init__.py @@ -62,14 +62,14 @@ def generate_cache_key(self, *args, **kwargs): # Use a list plus a ``.join()`` because it's faster than concatenation. return "%s:%s:%s:%s" % (self._meta.api_name, self._meta.resource_name, ':'.join(args), smooshed) - def _z_aware_fromisoformat(self, value): - """datetime.datetie.fromisoformat replacement that works with python < 3.11""" + def _z_aware_fromisoformat(self, value: str) -> datetime.datetime: + """datetime.datetime.fromisoformat replacement that works with python < 3.11""" if HAVE_BROKEN_FROMISOFORMAT: if value.upper().endswith("Z"): value = value[:-1] + "+00:00" # Z -> UTC elif re.match(r"[+-][0-9][0-9]$", value[-3:]): value = value + ":00" # -04 -> -04:00 - return value + return datetime.datetime.fromisoformat(value) def filter_value_to_python( self, value, field_name, filters, filter_expr, filter_type From 3b6b4770d67a8c88aee642db7129c35be930da12 Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Fri, 22 Aug 2025 03:58:37 -0400 Subject: [PATCH 189/405] ci: Fix build.yml for staging db recreate --- .github/workflows/build.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 9c24380764..79ef750b5d 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -448,11 +448,11 @@ jobs: - name: Refresh Staging DB uses: the-actions-org/workflow-dispatch@v4 with: - workflow: update-staging-db.yml + workflow: deploy-db.yml repo: ietf-tools/infra-k8s ref: main token: ${{ secrets.GH_INFRA_K8S_TOKEN }} - inputs: '{ "sourceDb":"datatracker" }' + inputs: '{ "environment":"${{ secrets.GHA_K8S_CLUSTER }}", "app":"datatracker", "manifest":"postgres", "forceRecreate":true, "waitClusterReady":true }' wait-for-completion: true wait-for-completion-timeout: 10m wait-for-completion-interval: 20s From 82a241084954146152196d612a13074bdc1b5de9 Mon Sep 17 00:00:00 2001 From: Eric Vyncke Date: Fri, 22 Aug 2025 17:46:45 +0200 Subject: [PATCH 190/405] feat: updated text for BoF request (per IESG demand) (#9406) --- ietf/templates/doc/bofreq/bofreq_template.md | 10 +++---- ietf/templates/doc/bofreq/new_bofreq.html | 30 ++++++++++++++++++-- 2 files changed, 32 insertions(+), 8 deletions(-) diff --git a/ietf/templates/doc/bofreq/bofreq_template.md b/ietf/templates/doc/bofreq/bofreq_template.md index 78949ba7bd..49c5e511a5 100644 --- a/ietf/templates/doc/bofreq/bofreq_template.md +++ b/ietf/templates/doc/bofreq/bofreq_template.md @@ -1,15 +1,15 @@ -# Name: Exact MPLS Edges (EXAMPLE) (There's an acronym for anything if you really want one ;-) +# Name: EXAct MPLs Edges (EXAMPLE) (There's an acronym for anything if you really want one ;-) ## Description Replace this with a few paragraphs describing the BOF request. Fill in the details below. Keep items in the order they appear here. ## Required Details -- Status: (not) WG Forming -- Responsible AD: name +- Status: "not WG Forming" or "WG forming" +- Responsible AD: name (or at least area(s) if you know) - BOF proponents: name , name (1-3 people - who are requesting and coordinating discussion for proposal) - Number of people expected to attend: 100 -- Length of session (1 or 2 hours): 2 hours +- Length of session (1 or usually 2 hours): 2 hours - Conflicts (whole Areas and/or WGs) - Chair Conflicts: TBD - Technology Overlap: TBD @@ -27,7 +27,7 @@ To allow evaluation of your proposal, please include the following items: - Items, Internet-Drafts, speakers, timing - Or a URL -## Links to the mailing list, draft charter if any, relevant Internet-Drafts, etc. +## Links to the mailing list, draft charter if any (for WG-forming BoF), relevant Internet-Drafts, etc. - Mailing List: https://www.ietf.org/mailman/listinfo/example - Draft charter: {{ settings.IDTRACKER_BASE_URL }}{% url 'ietf.doc.views_doc.document_main' name='charter-ietf-EXAMPLE' %} - Relevant Internet-Drafts: diff --git a/ietf/templates/doc/bofreq/new_bofreq.html b/ietf/templates/doc/bofreq/new_bofreq.html index 9506d9c8e6..c6aa0054f9 100644 --- a/ietf/templates/doc/bofreq/new_bofreq.html +++ b/ietf/templates/doc/bofreq/new_bofreq.html @@ -6,15 +6,39 @@ {% origin %}

    Start a new BOF Request

    - The IAB will also attempt to provide BoF Shepherds as described in their document on the subject only on request from the IESG. If you feel that your BoF would benefit from an IAB BoF Shepherd, please discuss this with your Area Director. + BoF proponents are strongly encouraged to review the following sources before submitting requests: +

    + + {# The following block needs to be commented out after the BoF deadline and re-opened before next BoF request opening #} +
    +

    + Announcement for IETF 124: The IESG and the IAB have organized Ask Me Anything (AMA) virtual sessions + for the community to help proponents who are interested in putting up BoF proposals for IETF 124 + (see also the IETF-announce email): +

    +
      +
    • 28th of August 13:00-14:00 UTC +
    • +
    • 28th of August 19:00-20:00 UTC +
    • +
    +
    + {# End of the temporary block #} +

    + The IAB will also attempt to provide BoF Shepherds as described in their document on the subject only on request from the IESG. + If you feel that your BoF would benefit from an IAB BoF Shepherd, please discuss this with your Area Director.

    - Choose a short descriptive title for your request. Take time to choose a good initial title - it will be used to make the filename for your request's content. The title can be changed later, but the filename will not change. + Choose a short descriptive title for your request. Take time to choose a good initial title - it will be used to make the filename for your request's content. + The title can be changed later, but the filename will not change.

    For example, a request with a title of "A new important bit" will be saved as bofreq-{{ user.person.last_name|xslugify|slice:"64" }}-a-new-important-bit-00.md.

    -

    All the items in the template MUST be filed in.

    +

    All the items in the template MUST be filed in.

    From b1cfa7082f60343210b8116668f182e3c67207bf Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Fri, 22 Aug 2025 22:08:59 -0400 Subject: [PATCH 191/405] ci: Increase wait-for-completion timeout to 30 minutes for staging refresh db step Increased the wait-for-completion timeout from 10 minutes to 30 minutes in the build workflow. --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 79ef750b5d..8567446cae 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -454,7 +454,7 @@ jobs: token: ${{ secrets.GH_INFRA_K8S_TOKEN }} inputs: '{ "environment":"${{ secrets.GHA_K8S_CLUSTER }}", "app":"datatracker", "manifest":"postgres", "forceRecreate":true, "waitClusterReady":true }' wait-for-completion: true - wait-for-completion-timeout: 10m + wait-for-completion-timeout: 30m wait-for-completion-interval: 20s display-workflow-run-url: false From b3f2756f6b5d6adf853eb7779412950291169c38 Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Wed, 27 Aug 2025 13:06:48 -0500 Subject: [PATCH 192/405] fix: clearly show To and From groups in liaison statement email (#9432) --- ietf/group/templatetags/group_filters.py | 7 +++++++ ietf/templates/liaisons/liaison_mail.txt | 11 +++++++++-- 2 files changed, 16 insertions(+), 2 deletions(-) diff --git a/ietf/group/templatetags/group_filters.py b/ietf/group/templatetags/group_filters.py index c9481b767b..bf2ad71949 100644 --- a/ietf/group/templatetags/group_filters.py +++ b/ietf/group/templatetags/group_filters.py @@ -37,3 +37,10 @@ def role_person_link(role, **kwargs): plain_name = role.person.plain_name() email = role.email.address return {'name': name, 'plain_name': plain_name, 'email': email, 'title': title, 'class': cls} + +@register.filter +def name_with_conditional_acronym(group): + if group.type_id in ("sdo", "isoc", "individ", "nomcom", "ietf", "irtf", ): + return group.name + else: + return f"{group.name} ({group.acronym})" diff --git a/ietf/templates/liaisons/liaison_mail.txt b/ietf/templates/liaisons/liaison_mail.txt index 6d6a07d7ef..18dfe610fd 100644 --- a/ietf/templates/liaisons/liaison_mail.txt +++ b/ietf/templates/liaisons/liaison_mail.txt @@ -1,13 +1,20 @@ -{% load ietf_filters %}{% autoescape off %}Title: {{ liaison.title|clean_whitespace }} +{% load ietf_filters group_filters %}{% autoescape off %}Title: {{ liaison.title|clean_whitespace }} Submission Date: {{ liaison.submitted|date:"Y-m-d" }} URL of the IETF Web page: {{ liaison.get_absolute_url }} + +To: {% for g in liaison.to_groups.all %}{{g|name_with_conditional_acronym}}{% if not forloop.last %}, {% endif %}{% endfor %} +From: {% for g in liaison.from_groups.all %}{{g|name_with_conditional_acronym}}{% if not forloop.last %}, {% endif %}{% endfor %} +Purpose: {{ liaison.purpose.name }} {% if liaison.deadline %}Please reply by {{ liaison.deadline }}{% endif %} + +Email Addresses +--------------- From: {% if liaison.from_contact %}{{ liaison.from_contact }}{% endif %} To: {{ liaison.to_contacts }} Cc: {{ liaison.cc_contacts }} Response Contacts: {{ liaison.response_contacts }} Technical Contacts: {{ liaison.technical_contacts }} -Purpose: {{ liaison.purpose.name }} + {% for related in liaison.source_of_set.all %} Referenced liaison: {% if related.target.title %}{{ related.target.title }}{% else %}Liaison #{{ related.target.pk }}{% endif %} ({{ related.target.get_absolute_url }}) {% endfor %} From 6e62bb32771cb564f52201a376ad6e754155343c Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Fri, 29 Aug 2025 10:44:51 -0500 Subject: [PATCH 193/405] fix: remove iab executive director specialization from the liaison app (#9435) --- ietf/liaisons/forms.py | 3 +-- ietf/liaisons/tests.py | 12 ------------ ietf/liaisons/tests_forms.py | 12 ------------ ietf/liaisons/utils.py | 1 - ietf/liaisons/views.py | 17 +++++++---------- 5 files changed, 8 insertions(+), 37 deletions(-) diff --git a/ietf/liaisons/forms.py b/ietf/liaisons/forms.py index 7483981595..ef5b29535e 100644 --- a/ietf/liaisons/forms.py +++ b/ietf/liaisons/forms.py @@ -105,7 +105,6 @@ def internal_groups_for_person(person: Optional[Person]): "Secretariat", "IETF Chair", "IAB Chair", - "IAB Executive Director", "Liaison Manager", "Liaison Coordinator", "Authorized Individual", @@ -115,7 +114,7 @@ def internal_groups_for_person(person: Optional[Person]): # Interesting roles, as Group queries queries = [ Q(role__person=person, role__name="chair", acronym="ietf"), - Q(role__person=person, role__name__in=("chair", "execdir"), acronym="iab"), + Q(role__person=person, role__name="chair", acronym="iab"), Q(role__person=person, role__name="ad", type="area", state="active"), Q( role__person=person, diff --git a/ietf/liaisons/tests.py b/ietf/liaisons/tests.py index 1d6cfe0c14..8bbaa4f053 100644 --- a/ietf/liaisons/tests.py +++ b/ietf/liaisons/tests.py @@ -123,7 +123,6 @@ def test_get_cc(self): cc = get_cc(Group.objects.get(acronym='iab')) self.assertTrue(EMAIL_ALIASES['IAB'] in cc) self.assertTrue(EMAIL_ALIASES['IABCHAIR'] in cc) - self.assertTrue(EMAIL_ALIASES['IABEXECUTIVEDIRECTOR'] in cc) # test an Area area = Group.objects.filter(type='area').first() cc = get_cc(area) @@ -166,7 +165,6 @@ def test_get_contacts_for_group(self): # test iab contacts = get_contacts_for_group(Group.objects.get(acronym='iab')) self.assertTrue(EMAIL_ALIASES['IABCHAIR'] in contacts) - self.assertTrue(EMAIL_ALIASES['IABEXECUTIVEDIRECTOR'] in contacts) # test iesg contacts = get_contacts_for_group(Group.objects.get(acronym='iesg')) self.assertTrue(EMAIL_ALIASES['IESG'] in contacts) @@ -534,7 +532,6 @@ def test_outgoing_access(self): RoleFactory(name_id='liaison_coordinator', group__acronym='iab', person__user__username='liaison-coordinator') mars = RoleFactory(name_id='chair',person__user__username='marschairman',group__acronym='mars').group RoleFactory(name_id='secr',group=mars,person__user__username='mars-secr') - RoleFactory(name_id='execdir',group=Group.objects.get(acronym='iab'),person__user__username='iab-execdir') url = urlreverse('ietf.liaisons.views.liaison_list') addurl = urlreverse('ietf.liaisons.views.liaison_add', kwargs={'type':'outgoing'}) @@ -592,15 +589,6 @@ def test_outgoing_access(self): r = self.client.get(addurl) self.assertEqual(r.status_code, 200) - # IAB Executive Director - self.assertTrue(self.client.login(username="iab-execdir", password="iab-execdir+password")) - r = self.client.get(url) - self.assertEqual(r.status_code, 200) - q = PyQuery(r.content) - self.assertEqual(len(q("a.btn:contains('New outgoing liaison')")), 1) - r = self.client.get(addurl) - self.assertEqual(r.status_code, 200) - # Liaison Manager has access self.assertTrue(self.client.login(username="ulm-liaiman", password="ulm-liaiman+password")) r = self.client.get(url) diff --git a/ietf/liaisons/tests_forms.py b/ietf/liaisons/tests_forms.py index c2afddea65..101c0c8298 100644 --- a/ietf/liaisons/tests_forms.py +++ b/ietf/liaisons/tests_forms.py @@ -94,11 +94,6 @@ def test_all_internal_groups(self): def test_internal_groups_for_person(self): # test relies on the data created in ietf.utils.test_data.make_immutable_test_data() # todo add liaison coordinator when modeled - RoleFactory( - name_id="execdir", - group=Group.objects.get(acronym="iab"), - person__user__username="iab-execdir", - ) RoleFactory( name_id="auth", group__type_id="sdo", @@ -121,7 +116,6 @@ def test_internal_groups_for_person(self): "secretary", "ietf-chair", "iab-chair", - "iab-execdir", "sdo-authperson", ): returned_queryset = internal_groups_for_person( @@ -151,11 +145,6 @@ def test_internal_groups_for_person(self): ) def test_external_groups_for_person(self): - RoleFactory( - name_id="execdir", - group=Group.objects.get(acronym="iab"), - person__user__username="iab-execdir", - ) RoleFactory(name_id="liaison_coordinator", group__acronym="iab", person__user__username="liaison-coordinator") the_sdo = GroupFactory(type_id="sdo", acronym="the-sdo") liaison_manager = RoleFactory(name_id="liaiman", group=the_sdo).person @@ -166,7 +155,6 @@ def test_external_groups_for_person(self): "secretary", "ietf-chair", "iab-chair", - "iab-execdir", "liaison-coordinator", "ad", "sopschairman", diff --git a/ietf/liaisons/utils.py b/ietf/liaisons/utils.py index ea06c5988e..469bbc5c87 100644 --- a/ietf/liaisons/utils.py +++ b/ietf/liaisons/utils.py @@ -8,7 +8,6 @@ OUTGOING_LIAISON_ROLES = [ "Area Director", "IAB Chair", - "IAB Executive Director", "IETF Chair", "Liaison Manager", "Liaison Coordinator", diff --git a/ietf/liaisons/views.py b/ietf/liaisons/views.py index 1b7e8d63bb..9710149c90 100644 --- a/ietf/liaisons/views.py +++ b/ietf/liaisons/views.py @@ -30,11 +30,12 @@ from ietf.utils.response import permission_denied EMAIL_ALIASES = { - 'IETFCHAIR':'The IETF Chair ', - 'IESG':'The IESG ', - 'IAB':'The IAB ', - 'IABCHAIR':'The IAB Chair ', - 'IABEXECUTIVEDIRECTOR':'The IAB Executive Director '} + "IETFCHAIR": "The IETF Chair ", + "IESG": "The IESG ", + "IAB": "The IAB ", + "IABCHAIR": "The IAB Chair ", +} + # ------------------------------------------------- # Helper Functions @@ -84,8 +85,6 @@ def _find_person_in_emails(liaison, person): return True elif addr in ('iab@iab.org', 'iab-chair@iab.org') and has_role(person.user, "IAB Chair"): return True - elif addr in ('execd@iab.org', ) and has_role(person.user, "IAB Executive Director"): - return True return False @@ -110,7 +109,6 @@ def get_cc(group): elif group.acronym in ('iab'): emails.append(EMAIL_ALIASES['IAB']) emails.append(EMAIL_ALIASES['IABCHAIR']) - emails.append(EMAIL_ALIASES['IABEXECUTIVEDIRECTOR']) elif group.type_id == 'area': emails.append(EMAIL_ALIASES['IETFCHAIR']) ad_roles = group.role_set.filter(name='ad') @@ -151,7 +149,6 @@ def get_contacts_for_group(group): contacts.append(EMAIL_ALIASES['IETFCHAIR']) elif group.acronym == 'iab': contacts.append(EMAIL_ALIASES['IABCHAIR']) - contacts.append(EMAIL_ALIASES['IABEXECUTIVEDIRECTOR']) elif group.acronym == 'iesg': contacts.append(EMAIL_ALIASES['IESG']) @@ -171,7 +168,7 @@ def needs_approval(group,person): user = person.user if group.acronym in ('ietf','iesg') and has_role(user, 'IETF Chair'): return False - if group.acronym == 'iab' and (has_role(user,'IAB Chair') or has_role(user,'IAB Executive Director')): + if group.acronym == 'iab' and has_role(user,'IAB Chair'): return False if group.type_id == 'area' and group.role_set.filter(name='ad',person=person): return False From 3ca4eec5abb6927837fbc849809b587f4bde6419 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Tue, 2 Sep 2025 14:41:52 -0300 Subject: [PATCH 194/405] feat: expose State.used in admin (#9449) --- ietf/doc/admin.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ietf/doc/admin.py b/ietf/doc/admin.py index b492aa3423..745536f9a1 100644 --- a/ietf/doc/admin.py +++ b/ietf/doc/admin.py @@ -22,8 +22,8 @@ class StateTypeAdmin(admin.ModelAdmin): admin.site.register(StateType, StateTypeAdmin) class StateAdmin(admin.ModelAdmin): - list_display = ["slug", "type", 'name', 'order', 'desc'] - list_filter = ["type", ] + list_display = ["slug", "type", 'name', 'order', 'desc', "used"] + list_filter = ["type", "used"] search_fields = ["slug", "type__label", "type__slug", "name", "desc"] filter_horizontal = ["next_states"] admin.site.register(State, StateAdmin) From 02dbe17fe7bfe707594531bb16dffd905c5c2a53 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Tue, 2 Sep 2025 16:48:38 -0300 Subject: [PATCH 195/405] feat: history for mailtrigger models (#9452) * feat: history for mailtrigger models * chore: update copyright years * fix: use py3.9-compatible call_command syntax It seems `option=[...]` does not work with positional arguments in py3.9's argparse. * chore: update resources --- ietf/mailtrigger/admin.py | 7 +- ...storicalrecipient_historicalmailtrigger.py | 122 ++++++++++++++++++ ietf/mailtrigger/models.py | 6 +- ietf/mailtrigger/resources.py | 42 +++++- 4 files changed, 172 insertions(+), 5 deletions(-) create mode 100644 ietf/mailtrigger/migrations/0007_historicalrecipient_historicalmailtrigger.py diff --git a/ietf/mailtrigger/admin.py b/ietf/mailtrigger/admin.py index a60fd5b072..8c73f2ae02 100644 --- a/ietf/mailtrigger/admin.py +++ b/ietf/mailtrigger/admin.py @@ -1,9 +1,10 @@ -# Copyright The IETF Trust 2015-2019, All Rights Reserved +# Copyright The IETF Trust 2015-2025, All Rights Reserved from django.contrib import admin +from simple_history.admin import SimpleHistoryAdmin from ietf.mailtrigger.models import MailTrigger, Recipient -class RecipientAdmin(admin.ModelAdmin): +class RecipientAdmin(SimpleHistoryAdmin): list_display = [ 'slug', 'desc', 'template', 'has_code', ] def has_code(self, obj): return hasattr(obj,'gather_%s'%obj.slug) @@ -11,7 +12,7 @@ def has_code(self, obj): admin.site.register(Recipient, RecipientAdmin) -class MailTriggerAdmin(admin.ModelAdmin): +class MailTriggerAdmin(SimpleHistoryAdmin): list_display = [ 'slug', 'desc', ] filter_horizontal = [ 'to', 'cc', ] admin.site.register(MailTrigger, MailTriggerAdmin) diff --git a/ietf/mailtrigger/migrations/0007_historicalrecipient_historicalmailtrigger.py b/ietf/mailtrigger/migrations/0007_historicalrecipient_historicalmailtrigger.py new file mode 100644 index 0000000000..d23b72d737 --- /dev/null +++ b/ietf/mailtrigger/migrations/0007_historicalrecipient_historicalmailtrigger.py @@ -0,0 +1,122 @@ +# Copyright The IETF Trust 2025, All Rights Reserved +from io import StringIO + +from django.conf import settings +from django.core import management +from django.db import migrations, models +import django.db.models.deletion +import simple_history.models + +from ietf.utils.log import log + + +def forward(apps, schema_editor): + # Fill in history for existing data using the populate_history management command + captured_stdout = StringIO() + captured_stderr = StringIO() + try: + management.call_command( + "populate_history", + "mailtrigger.MailTrigger", + "mailtrigger.Recipient", + stdout=captured_stdout, + stderr=captured_stderr, + ) + except management.CommandError as err: + log( + "Failed to populate history for mailtrigger models.\n" + "\n" + f"stdout:\n{captured_stdout.getvalue() or ''}\n" + "\n" + f"stderr:\n{captured_stderr.getvalue() or ''}\n" + ) + raise RuntimeError("Failed to populate history for mailtrigger models") from err + log( + "Populated history for mailtrigger models.\n" + "\n" + f"stdout:\n{captured_stdout.getvalue() or ''}\n" + "\n" + f"stderr:\n{captured_stderr.getvalue() or ''}\n" + ) + + +def reverse(apps, schema_editor): + pass # nothing to do + + +class Migration(migrations.Migration): + dependencies = [ + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ("mailtrigger", "0006_call_for_adoption_and_last_call_issued"), + ] + + operations = [ + migrations.CreateModel( + name="HistoricalRecipient", + fields=[ + ("slug", models.CharField(db_index=True, max_length=32)), + ("desc", models.TextField(blank=True)), + ("template", models.TextField(blank=True, null=True)), + ("history_id", models.AutoField(primary_key=True, serialize=False)), + ("history_date", models.DateTimeField(db_index=True)), + ("history_change_reason", models.CharField(max_length=100, null=True)), + ( + "history_type", + models.CharField( + choices=[("+", "Created"), ("~", "Changed"), ("-", "Deleted")], + max_length=1, + ), + ), + ( + "history_user", + models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="+", + to=settings.AUTH_USER_MODEL, + ), + ), + ], + options={ + "verbose_name": "historical recipient", + "verbose_name_plural": "historical recipients", + "ordering": ("-history_date", "-history_id"), + "get_latest_by": ("history_date", "history_id"), + }, + bases=(simple_history.models.HistoricalChanges, models.Model), + ), + migrations.CreateModel( + name="HistoricalMailTrigger", + fields=[ + ("slug", models.CharField(db_index=True, max_length=64)), + ("desc", models.TextField(blank=True)), + ("history_id", models.AutoField(primary_key=True, serialize=False)), + ("history_date", models.DateTimeField(db_index=True)), + ("history_change_reason", models.CharField(max_length=100, null=True)), + ( + "history_type", + models.CharField( + choices=[("+", "Created"), ("~", "Changed"), ("-", "Deleted")], + max_length=1, + ), + ), + ( + "history_user", + models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="+", + to=settings.AUTH_USER_MODEL, + ), + ), + ], + options={ + "verbose_name": "historical mail trigger", + "verbose_name_plural": "historical mail triggers", + "ordering": ("-history_date", "-history_id"), + "get_latest_by": ("history_date", "history_id"), + }, + bases=(simple_history.models.HistoricalChanges, models.Model), + ), + migrations.RunPython(forward, reverse), + ] diff --git a/ietf/mailtrigger/models.py b/ietf/mailtrigger/models.py index 66b7139fa5..435729f893 100644 --- a/ietf/mailtrigger/models.py +++ b/ietf/mailtrigger/models.py @@ -1,4 +1,4 @@ -# Copyright The IETF Trust 2015-2020, All Rights Reserved +# Copyright The IETF Trust 2015-2025, All Rights Reserved # -*- coding: utf-8 -*- @@ -7,6 +7,8 @@ from email.utils import parseaddr +from simple_history.models import HistoricalRecords + from ietf.doc.utils_bofreq import bofreq_editors, bofreq_responsible from ietf.utils.mail import formataddr, get_email_addresses_from_text from ietf.group.models import Group, Role @@ -38,6 +40,7 @@ class MailTrigger(models.Model): desc = models.TextField(blank=True) to = models.ManyToManyField('mailtrigger.Recipient', blank=True, related_name='used_in_to') cc = models.ManyToManyField('mailtrigger.Recipient', blank=True, related_name='used_in_cc') + history = HistoricalRecords() class Meta: ordering = ["slug"] @@ -49,6 +52,7 @@ class Recipient(models.Model): slug = models.CharField(max_length=32, primary_key=True) desc = models.TextField(blank=True) template = models.TextField(null=True, blank=True) + history = HistoricalRecords() class Meta: ordering = ["slug"] diff --git a/ietf/mailtrigger/resources.py b/ietf/mailtrigger/resources.py index eb5466618a..daca055bf4 100644 --- a/ietf/mailtrigger/resources.py +++ b/ietf/mailtrigger/resources.py @@ -7,7 +7,7 @@ from ietf import api -from ietf.mailtrigger.models import Recipient, MailTrigger +from ietf.mailtrigger.models import MailTrigger, Recipient class RecipientResource(ModelResource): @@ -37,3 +37,43 @@ class Meta: } api.mailtrigger.register(MailTriggerResource()) +from ietf.utils.resources import UserResource +class HistoricalMailTriggerResource(ModelResource): + history_user = ToOneField(UserResource, 'history_user', null=True) + class Meta: + queryset = MailTrigger.history.model.objects.all() + serializer = api.Serializer() + cache = SimpleCache() + #resource_name = 'historicalmailtrigger' + ordering = ['history_id', ] + filtering = { + "slug": ALL, + "desc": ALL, + "history_id": ALL, + "history_date": ALL, + "history_change_reason": ALL, + "history_type": ALL, + "history_user": ALL_WITH_RELATIONS, + } +api.mailtrigger.register(HistoricalMailTriggerResource()) + +from ietf.utils.resources import UserResource +class HistoricalRecipientResource(ModelResource): + history_user = ToOneField(UserResource, 'history_user', null=True) + class Meta: + queryset = Recipient.history.model.objects.all() + serializer = api.Serializer() + cache = SimpleCache() + #resource_name = 'historicalrecipient' + ordering = ['history_id', ] + filtering = { + "slug": ALL, + "desc": ALL, + "template": ALL, + "history_id": ALL, + "history_date": ALL, + "history_change_reason": ALL, + "history_type": ALL, + "history_user": ALL_WITH_RELATIONS, + } +api.mailtrigger.register(HistoricalRecipientResource()) From 2960164714f0c0380d3259408b028a9150c8c27e Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Wed, 3 Sep 2025 19:16:26 -0300 Subject: [PATCH 196/405] feat: Python 3.12 (#8811) * refactor: smtpd -> aiosmtpd * test: set mock return value for EmailOnFailureCommandTests The test has been working, but in a broken way, for as long as it has existed. The smtpd-based test_smtpserver was masking an exception that did not interfere with the test's effectiveness. * test: increase SMTP.line_length_limit * chore: suppress known deprecation warnings * refactor: utcfromtimestamp->fromtimestamp * refactor: it's now spelled "datetime.UTC" * feat: python 3.12 * chore: suppress deprecation warning * fix: utcnow() -> now(datetime.UTC) * chore: suppress deprecation warning * chore: more deprecation warnings * ci: update base image target version to 20250417T1507 * chore: reorg / clean up deprecation ignore list Removed a few suppressions that were OBE based on running the tests and checking versions of the dependencies that were causing them. Reordered kwargs to make it more readable (to me anyway). * chore: disable coverage test for now See the comment in settings.py for details. tl;dr coverage is unusably slow under python 3.12 as we're using it * ci: update base image target version to 20250422T1458 * ci: update base image target version to 20250604T2012 * ci: build/use py312 images (#9168) * ci: tag py312 base app * ci: datatrackerbase-app:latest -> py312 * ci: update base image target version to 20250719T0833 * refactor: update to coverage 7.9.2 + cleanup (#9256) * refactor: drop unused code_coverage_collection var * refactor: @skip_coverage -> pragma: no cover * chore(deps): bump coverage to current ver * refactor: split up set_coverage_checking() * refactor: inline IetfLiveServerTestCase (there's only one subclass) * feat: disable_coverage context mgr * chore: remove unused import * refactor: set_coverage_checking -> disable_coverage * refactor: elim more set_coverage_checking * refactor: start using coverage 7.9.2 * feat: working coverage 7.9 implementation * Extract coverage tools to ietf.utils.coverage * Revert to starting checker in settings_test Does not exactly match previous coverage reports. Need to investigate. * refactor: CustomJsonReporter->CustomDictReporter * chore: remove "migration" coverage entry Has not been populated in quite some time * test: test CoverageManager class * chore: exclude CustomDictReporter from coverage Setting up to test this will be complex and we'll notice other test failures/coverage weirdness if this does not behave. * chore: exclude coverage.py from coverage Way too meta * chore: update deps for py3.12 (#9270) * chore(deps): argon2-cffi (supports py3.14) * chore(deps): setuptools to latest (py3.9+) * chore(deps): bump beautifulsoup4 (py3.7+) * chore(deps): bump bibtexparser (py3) * chore(deps): bump bleach (py3.13) * chore(deps): bump bleach (py3.13) * chore(deps): lift pin on boto3 + adjust settings * chore(deps): bump celery (py3.13) * chore(deps): bump django-admin-rangefilter (py3.12) * chore(deps): bump django-analytical (py3.13) * chore(deps): bump django-bootstrap5 (py3.13) * chore(deps): bump django-celery-beat (py3.12) Still holding back until their #894 is conclusively resolved. The 2.8.x release adds official py3.13 support. * chore(deps): bump django-celery-results (py3.13) * chore(deps): remove django-csp (not used) * chore(deps): bump django-cors-headers (py3.13) * chore(deps): bump django-debug-toolbar (py3.13) * refactor: drop stale django-referrer-policy pkg Supported via django's SecurityMiddleware since longtime * chore(deps): bump django-simple-history (py3.13) * chore(deps): bump django-storages (py3.12) * chore(deps): bump django-tastypie+update patch * chore(deps): bump django_vite+update config * chore(deps): bump djangorestframework+remove cap * chore(deps): remove djlint * chore(deps): bump docutils (py3.14) * chore(deps): bump drf-standardized-errors (py3.13) * chore(deps): bump factory-boy (py3.13) * chore(deps): bump github3.py (py3.11??) * chore(deps): bump gunicorn (py3.12) * chore(deps): bump html2text (py3.13) * chore(deps): bump inflect * chore(deps): bump jsonfield (py3.10-3.13) * chore(deps): bump jsonschema (py3.13) * chore(deps): bump logging_tree (py3.12) * chore(deps): bump lxml (py3.13) * chore(deps): bump markdown (py3.13) * chore(deps): bump mock * chore(deps): bump oic (py3.11) * chore(deps): bump pillow (py3.13) * chore(deps): bump psycopg2 (py3.13) * chore(deps): bump pyang (py3.11) * chore(deps): bump pydyf (py3.12) * chore(deps): bump pyflakes (py3.9+) * chore(deps): bump pyopenssl (py3.13) * chore(deps): bump pyquery (py3.12) * chore(deps): bump python-dateutil (py3.12) * chore(deps): bump python-json-logger (py3.13) * chore(deps): bump python-mimeparse (py3.13) * chore(deps): bump pytz (py3.13) Brings a meeting migration to adjust tz/country choices. * chore(deps): bump requests (py3.13) * chore(deps): bump requests-mock (py3.12) * chore(deps): bump scout-apm (py3.12) * chore(deps): bump selenium (py3.13) * chore(deps): bump tblib (py3.13) * chore(deps): bump tqdm (py3.12) * chore(deps): bump unidecode (py3.11) * chore(deps): adjust requirements.txt to install correctly * chore(deps): bump urllib3, remove pin (py3.13) Situation requiring the pin to < 2.0 appears to have resolved. * chore(deps): bump weasyprint (py3.13) * chore(deps): bump xml2rfc (py3.13) * fix: lint * ci: py312 base for celery in sandbox * ci: update base image target version to 20250819T1645 * chore: finish dropping smtpd (#9384) * chore: smtpd debug server -> aiosmtpd * chore(dev): accept long SMTP lines * chore(dev): use correct aiosmtpd handler * chore: update copyright years * Revert "chore: update copyright years" This reverts commit 2814cb85dc43c9a27f9834c629474e58d1dfb0f7. --------- Co-authored-by: jennifer-richards <19472766+jennifer-richards@users.noreply.github.com> --- .github/workflows/build-base-app.yml | 1 + .github/workflows/tests-az.yml | 2 +- .vscode/tasks.json | 5 +- README.md | 2 +- dev/build/Dockerfile | 2 +- dev/build/TARGET_BASE | 2 +- dev/celery/Dockerfile | 0 dev/deploy-to-container/cli.js | 6 +- dev/diff/cli.js | 6 +- dev/tests/debug.sh | 2 +- dev/tests/docker-compose.debug.yml | 2 +- docker/app.Dockerfile | 2 +- docker/base.Dockerfile | 2 +- docker/celery.Dockerfile | 2 +- docker/configs/settings_local.py | 2 + docker/configs/settings_local_vite.py | 6 +- docker/scripts/app-configure-blobstore.py | 6 +- docker/scripts/app-init.sh | 2 +- ietf/api/__init__.py | 2 +- ietf/api/tests.py | 6 +- ietf/bin/aliases-from-json.py | 2 +- ietf/doc/models.py | 2 +- ietf/doc/templatetags/ballot_icon.py | 2 +- ietf/doc/tests_draft.py | 4 +- ietf/doc/tests_utils.py | 2 +- ietf/doc/views_stats.py | 6 +- ietf/group/views.py | 2 +- ietf/idindex/index.py | 4 +- ietf/iesg/views.py | 2 +- ietf/ietfauth/views.py | 2 +- ietf/ipr/mail.py | 4 +- ietf/ipr/views.py | 14 +- ietf/liaisons/tests.py | 8 +- ...meeting_country_alter_meeting_time_zone.py | 1 + ietf/meeting/models.py | 6 +- ietf/meeting/tests_js.py | 2 +- ietf/meeting/tests_tasks.py | 2 +- ietf/meeting/tests_views.py | 29 ++-- ietf/meeting/views.py | 6 +- ietf/nomcom/tests.py | 2 +- ietf/nomcom/views.py | 4 +- ietf/settings.py | 48 +++--- ietf/settings_test.py | 5 +- ietf/submit/checkers.py | 57 ++++--- ietf/sync/iana.py | 8 +- ietf/sync/tasks.py | 2 +- ietf/sync/tests.py | 6 +- .../utils/{test_smtpserver.py => aiosmtpd.py} | 21 ++- ietf/utils/coverage.py | 90 ++++++++++ ietf/utils/decorators.py | 12 -- ietf/utils/jstest.py | 41 ++++- ietf/utils/meetecho.py | 4 +- ietf/utils/serialize.py | 2 +- ietf/utils/test_runner.py | 155 ++++++------------ ietf/utils/tests.py | 14 +- ietf/utils/tests_coverage.py | 56 +++++++ ietf/utils/tests_meetecho.py | 26 +-- ietf/utils/timezone.py | 2 +- k8s/settings_local.py | 6 +- ...astypie-django22-fielderror-response.patch | 8 +- requirements.txt | 135 ++++++++------- 61 files changed, 505 insertions(+), 359 deletions(-) create mode 100644 dev/celery/Dockerfile rename ietf/utils/{test_smtpserver.py => aiosmtpd.py} (72%) create mode 100644 ietf/utils/coverage.py create mode 100644 ietf/utils/tests_coverage.py diff --git a/.github/workflows/build-base-app.yml b/.github/workflows/build-base-app.yml index ef8a17f6b4..4a4394fca0 100644 --- a/.github/workflows/build-base-app.yml +++ b/.github/workflows/build-base-app.yml @@ -51,6 +51,7 @@ jobs: push: true tags: | ghcr.io/ietf-tools/datatracker-app-base:${{ env.IMGVERSION }} + ghcr.io/ietf-tools/datatracker-app-base:py312 ${{ github.ref == 'refs/heads/main' && 'ghcr.io/ietf-tools/datatracker-app-base:latest' || '' }} - name: Update version references diff --git a/.github/workflows/tests-az.yml b/.github/workflows/tests-az.yml index 8553563a19..d1fe0cdf62 100644 --- a/.github/workflows/tests-az.yml +++ b/.github/workflows/tests-az.yml @@ -62,7 +62,7 @@ jobs: echo "Starting Containers..." sudo docker network create dtnet sudo docker run -d --name db --network=dtnet ghcr.io/ietf-tools/datatracker-db:latest & - sudo docker run -d --name app --network=dtnet ghcr.io/ietf-tools/datatracker-app-base:latest sleep infinity & + sudo docker run -d --name app --network=dtnet ghcr.io/ietf-tools/datatracker-app-base:py312 sleep infinity & wait echo "Cloning datatracker repo..." diff --git a/.vscode/tasks.json b/.vscode/tasks.json index 4bd0b99363..8b36b0e6ac 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -105,10 +105,11 @@ "command": "/usr/local/bin/python", "args": [ "-m", - "smtpd", + "aiosmtpd", "-n", "-c", - "DebuggingServer", + "ietf.utils.aiosmtpd.DevDebuggingHandler", + "-l", "localhost:2025" ], "presentation": { diff --git a/README.md b/README.md index abebb7ca02..4e1b7e1a45 100644 --- a/README.md +++ b/README.md @@ -142,7 +142,7 @@ Pages will gradually be updated to Vue 3 components. These components are locate Each Vue 3 app has its own sub-directory. For example, the agenda app is located under `/client/agenda`. -The datatracker makes use of the Django-Vite plugin to point to either the Vite.js server or the precompiled production files. The `DJANGO_VITE_DEV_MODE` flag, found in the `ietf/settings_local.py` file determines whether the Vite.js server is used or not. +The datatracker makes use of the Django-Vite plugin to point to either the Vite.js server or the precompiled production files. The `DJANGO_VITE["default"]["dev_mode"]` flag, found in the `ietf/settings_local.py` file determines whether the Vite.js server is used or not. In development mode, you must start the Vite.js development server, in addition to the usual Datatracker server: diff --git a/dev/build/Dockerfile b/dev/build/Dockerfile index d619ee99ee..658f1e5695 100644 --- a/dev/build/Dockerfile +++ b/dev/build/Dockerfile @@ -1,4 +1,4 @@ -FROM ghcr.io/ietf-tools/datatracker-app-base:20250821T1359 +FROM ghcr.io/ietf-tools/datatracker-app-base:20250819T1645 LABEL maintainer="IETF Tools Team " ENV DEBIAN_FRONTEND=noninteractive diff --git a/dev/build/TARGET_BASE b/dev/build/TARGET_BASE index b6fc12e128..9e510ad8db 100644 --- a/dev/build/TARGET_BASE +++ b/dev/build/TARGET_BASE @@ -1 +1 @@ -20250821T1359 +20250819T1645 diff --git a/dev/celery/Dockerfile b/dev/celery/Dockerfile new file mode 100644 index 0000000000..e69de29bb2 diff --git a/dev/deploy-to-container/cli.js b/dev/deploy-to-container/cli.js index 1a2d993ac4..2f0faad151 100644 --- a/dev/deploy-to-container/cli.js +++ b/dev/deploy-to-container/cli.js @@ -85,7 +85,7 @@ async function main () { // Pull latest Datatracker Base image console.info('Pulling latest Datatracker base docker image...') - const appImagePullStream = await dock.pull('ghcr.io/ietf-tools/datatracker-app-base:latest') + const appImagePullStream = await dock.pull('ghcr.io/ietf-tools/datatracker-app-base:py312') await new Promise((resolve, reject) => { dock.modem.followProgress(appImagePullStream, (err, res) => err ? reject(err) : resolve(res)) }) @@ -214,7 +214,7 @@ async function main () { const celeryContainers = {} for (const conConf of conConfs) { celeryContainers[conConf.name] = await dock.createContainer({ - Image: 'ghcr.io/ietf-tools/datatracker-app-base:latest', + Image: 'ghcr.io/ietf-tools/datatracker-app-base:py312', name: `dt-${conConf.name}-${branch}`, Hostname: `dt-${conConf.name}-${branch}`, Env: [ @@ -244,7 +244,7 @@ async function main () { // Create Datatracker container console.info(`Creating Datatracker docker container... [dt-app-${branch}]`) const appContainer = await dock.createContainer({ - Image: 'ghcr.io/ietf-tools/datatracker-app-base:latest', + Image: 'ghcr.io/ietf-tools/datatracker-app-base:py312', name: `dt-app-${branch}`, Hostname: `dt-app-${branch}`, Env: [ diff --git a/dev/diff/cli.js b/dev/diff/cli.js index 461b0c37a0..0cf353cc65 100644 --- a/dev/diff/cli.js +++ b/dev/diff/cli.js @@ -567,7 +567,7 @@ async function main () { { title: 'Pulling latest Datatracker base docker image...', task: async (subctx, subtask) => { - const appImagePullStream = await dock.pull('ghcr.io/ietf-tools/datatracker-app-base:latest') + const appImagePullStream = await dock.pull('ghcr.io/ietf-tools/datatracker-app-base:py312') await new Promise((resolve, reject) => { dock.modem.followProgress(appImagePullStream, (err, res) => err ? reject(err) : resolve(res)) }) @@ -648,7 +648,7 @@ async function main () { title: 'Creating source Datatracker docker container...', task: async (subctx, subtask) => { containers.appSource = await dock.createContainer({ - Image: 'ghcr.io/ietf-tools/datatracker-app-base:latest', + Image: 'ghcr.io/ietf-tools/datatracker-app-base:py312', name: 'dt-diff-app-source', Tty: true, Hostname: 'appsource', @@ -664,7 +664,7 @@ async function main () { title: 'Creating target Datatracker docker container...', task: async (subctx, subtask) => { containers.appTarget = await dock.createContainer({ - Image: 'ghcr.io/ietf-tools/datatracker-app-base:latest', + Image: 'ghcr.io/ietf-tools/datatracker-app-base:py312', name: 'dt-diff-app-target', Tty: true, Hostname: 'apptarget', diff --git a/dev/tests/debug.sh b/dev/tests/debug.sh index d87c504bb9..e92e6d9b2a 100644 --- a/dev/tests/debug.sh +++ b/dev/tests/debug.sh @@ -9,7 +9,7 @@ # Simply type "exit" + ENTER to exit and shutdown this test environment. echo "Fetching latest images..." -docker pull ghcr.io/ietf-tools/datatracker-app-base:latest +docker pull ghcr.io/ietf-tools/datatracker-app-base:py312 docker pull ghcr.io/ietf-tools/datatracker-db:latest echo "Starting containers..." docker compose -f docker-compose.debug.yml -p dtdebug --compatibility up -d diff --git a/dev/tests/docker-compose.debug.yml b/dev/tests/docker-compose.debug.yml index 8117b92375..168bbd4e92 100644 --- a/dev/tests/docker-compose.debug.yml +++ b/dev/tests/docker-compose.debug.yml @@ -5,7 +5,7 @@ version: '3.8' services: app: - image: ghcr.io/ietf-tools/datatracker-app-base:latest + image: ghcr.io/ietf-tools/datatracker-app-base:py312 command: -f /dev/null working_dir: /__w/datatracker/datatracker entrypoint: tail diff --git a/docker/app.Dockerfile b/docker/app.Dockerfile index fee3833733..e3df9bd4b4 100644 --- a/docker/app.Dockerfile +++ b/docker/app.Dockerfile @@ -1,4 +1,4 @@ -FROM ghcr.io/ietf-tools/datatracker-app-base:latest +FROM ghcr.io/ietf-tools/datatracker-app-base:py312 LABEL maintainer="IETF Tools Team " ENV DEBIAN_FRONTEND=noninteractive diff --git a/docker/base.Dockerfile b/docker/base.Dockerfile index 57aac8ee56..c1fe5b093e 100644 --- a/docker/base.Dockerfile +++ b/docker/base.Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.9-bookworm +FROM python:3.12-bookworm LABEL maintainer="IETF Tools Team " ENV DEBIAN_FRONTEND=noninteractive diff --git a/docker/celery.Dockerfile b/docker/celery.Dockerfile index e7c7b9cc3f..279d5c7550 100644 --- a/docker/celery.Dockerfile +++ b/docker/celery.Dockerfile @@ -1,4 +1,4 @@ -FROM ghcr.io/ietf-tools/datatracker-app-base:latest +FROM ghcr.io/ietf-tools/datatracker-app-base:py312 LABEL maintainer="IETF Tools Team " ENV DEBIAN_FRONTEND=noninteractive diff --git a/docker/configs/settings_local.py b/docker/configs/settings_local.py index ca51871463..3ee7a4295d 100644 --- a/docker/configs/settings_local.py +++ b/docker/configs/settings_local.py @@ -89,6 +89,8 @@ secret_key="minio_pass", security_token=None, client_config=botocore.config.Config( + request_checksum_calculation="when_required", + response_checksum_validation="when_required", signature_version="s3v4", connect_timeout=BLOBSTORAGE_CONNECT_TIMEOUT, read_timeout=BLOBSTORAGE_READ_TIMEOUT, diff --git a/docker/configs/settings_local_vite.py b/docker/configs/settings_local_vite.py index 7fb12a003d..9116905b12 100644 --- a/docker/configs/settings_local_vite.py +++ b/docker/configs/settings_local_vite.py @@ -2,5 +2,9 @@ # -*- coding: utf-8 -*- from ietf.settings_local import * # pyflakes:ignore +from ietf.settings_local import DJANGO_VITE -DJANGO_VITE_DEV_MODE = True +DJANGO_VITE["default"] |= { + "dev_mode": True, + "dev_server_port": 3000, +} diff --git a/docker/scripts/app-configure-blobstore.py b/docker/scripts/app-configure-blobstore.py index df4685b246..3140e39306 100755 --- a/docker/scripts/app-configure-blobstore.py +++ b/docker/scripts/app-configure-blobstore.py @@ -17,7 +17,11 @@ def init_blobstore(): aws_access_key_id=os.environ.get("BLOB_STORE_ACCESS_KEY", "minio_root"), aws_secret_access_key=os.environ.get("BLOB_STORE_SECRET_KEY", "minio_pass"), aws_session_token=None, - config=botocore.config.Config(signature_version="s3v4"), + config=botocore.config.Config( + request_checksum_calculation="when_required", + response_checksum_validation="when_required", + signature_version="s3v4", + ), ) for bucketname in ARTIFACT_STORAGE_NAMES: try: diff --git a/docker/scripts/app-init.sh b/docker/scripts/app-init.sh index 17e0c6c764..1d895cdf53 100755 --- a/docker/scripts/app-init.sh +++ b/docker/scripts/app-init.sh @@ -108,7 +108,7 @@ echo "Running initial checks..." if [ -z "$EDITOR_VSCODE" ]; then CODE=0 - python -m smtpd -n -c DebuggingServer localhost:2025 & + python -m aiosmtpd -n -c ietf.utils.aiosmtpd.DevDebuggingHandler -l localhost:2025 & if [ -z "$*" ]; then echo "-----------------------------------------------------------------" echo "Ready!" diff --git a/ietf/api/__init__.py b/ietf/api/__init__.py index 230f8339bd..d4562f97dd 100644 --- a/ietf/api/__init__.py +++ b/ietf/api/__init__.py @@ -181,7 +181,7 @@ class Serializer(tastypie.serializers.Serializer): OPTION_ESCAPE_NULLS = "datatracker-escape-nulls" def format_datetime(self, data): - return data.astimezone(datetime.timezone.utc).replace(tzinfo=None).isoformat(timespec="seconds") + "Z" + return data.astimezone(datetime.UTC).replace(tzinfo=None).isoformat(timespec="seconds") + "Z" def to_simple(self, data, options): options = options or {} diff --git a/ietf/api/tests.py b/ietf/api/tests.py index 865f877bfb..2a44791a5c 100644 --- a/ietf/api/tests.py +++ b/ietf/api/tests.py @@ -462,12 +462,12 @@ def test_api_add_session_attendees(self): self.assertTrue(session.attended_set.filter(person=recman).exists()) self.assertEqual( session.attended_set.get(person=recman).time, - datetime.datetime(2023, 9, 3, 12, 34, 56, tzinfo=datetime.timezone.utc), + datetime.datetime(2023, 9, 3, 12, 34, 56, tzinfo=datetime.UTC), ) self.assertTrue(session.attended_set.filter(person=otherperson).exists()) self.assertEqual( session.attended_set.get(person=otherperson).time, - datetime.datetime(2023, 9, 3, 3, 0, 19, tzinfo=datetime.timezone.utc), + datetime.datetime(2023, 9, 3, 3, 0, 19, tzinfo=datetime.UTC), ) def test_api_upload_polls_and_chatlog(self): @@ -871,7 +871,7 @@ def test_api_new_meeting_registration_v2_nomcom(self): self.assertEqual(volunteer.origin, 'registration') def test_api_version(self): - DumpInfo.objects.create(date=timezone.datetime(2022,8,31,7,10,1,tzinfo=datetime.timezone.utc), host='testapi.example.com',tz='UTC') + DumpInfo.objects.create(date=timezone.datetime(2022,8,31,7,10,1,tzinfo=datetime.UTC), host='testapi.example.com',tz='UTC') url = urlreverse('ietf.api.views.version') r = self.client.get(url) data = r.json() diff --git a/ietf/bin/aliases-from-json.py b/ietf/bin/aliases-from-json.py index a0c383a1ac..0da5d1f8b9 100644 --- a/ietf/bin/aliases-from-json.py +++ b/ietf/bin/aliases-from-json.py @@ -38,7 +38,7 @@ def generate_files(records, adest, vdest, postconfirm, vdomain): vpath = tmppath / "virtual" with apath.open("w") as afile, vpath.open("w") as vfile: - date = datetime.datetime.now(datetime.timezone.utc) + date = datetime.datetime.now(datetime.UTC) signature = f"# Generated by {Path(__file__).absolute()} at {date}\n" afile.write(signature) vfile.write(signature) diff --git a/ietf/doc/models.py b/ietf/doc/models.py index b6f36cb8a7..25ee734cbe 100644 --- a/ietf/doc/models.py +++ b/ietf/doc/models.py @@ -1157,7 +1157,7 @@ def fake_history_obj(self, rev): elif rev_events.exists(): time = rev_events.first().time else: - time = datetime.datetime.fromtimestamp(0, datetime.timezone.utc) + time = datetime.datetime.fromtimestamp(0, datetime.UTC) dh = DocHistory(name=self.name, rev=rev, doc=self, time=time, type=self.type, title=self.title, stream=self.stream, group=self.group) diff --git a/ietf/doc/templatetags/ballot_icon.py b/ietf/doc/templatetags/ballot_icon.py index a94c145007..07a6c7f926 100644 --- a/ietf/doc/templatetags/ballot_icon.py +++ b/ietf/doc/templatetags/ballot_icon.py @@ -196,7 +196,7 @@ def state_age_colored(doc): .time ) except IndexError: - state_datetime = datetime.datetime(1990, 1, 1, tzinfo=datetime.timezone.utc) + state_datetime = datetime.datetime(1990, 1, 1, tzinfo=datetime.UTC) days = (timezone.now() - state_datetime).days # loosely based on the Publish Path page at the iesg wiki if iesg_state == "lc": diff --git a/ietf/doc/tests_draft.py b/ietf/doc/tests_draft.py index ab7eaba768..ab33acebe6 100644 --- a/ietf/doc/tests_draft.py +++ b/ietf/doc/tests_draft.py @@ -678,11 +678,11 @@ def test_in_draft_expire_freeze(self): datetime.datetime.combine( ietf_monday - datetime.timedelta(days=1), datetime.time(0, 0, 0), - tzinfo=datetime.timezone.utc, + tzinfo=datetime.UTC, ) )) self.assertFalse(in_draft_expire_freeze( - datetime.datetime.combine(ietf_monday, datetime.time(0, 0, 0), tzinfo=datetime.timezone.utc) + datetime.datetime.combine(ietf_monday, datetime.time(0, 0, 0), tzinfo=datetime.UTC) )) def test_warn_expirable_drafts(self): diff --git a/ietf/doc/tests_utils.py b/ietf/doc/tests_utils.py index f610fe3d76..7db59819da 100644 --- a/ietf/doc/tests_utils.py +++ b/ietf/doc/tests_utils.py @@ -148,7 +148,7 @@ def test_update_action_holders_resets_age(self): doc = self.doc_in_iesg_state('pub-req') doc.action_holders.set([self.ad]) dah = doc.documentactionholder_set.get(person=self.ad) - dah.time_added = datetime.datetime(2020, 1, 1, tzinfo=datetime.timezone.utc) # arbitrary date in the past + dah.time_added = datetime.datetime(2020, 1, 1, tzinfo=datetime.UTC) # arbitrary date in the past dah.save() right_now = timezone.now() diff --git a/ietf/doc/views_stats.py b/ietf/doc/views_stats.py index 0bbf0b91c5..028573b338 100644 --- a/ietf/doc/views_stats.py +++ b/ietf/doc/views_stats.py @@ -18,7 +18,7 @@ from ietf.utils.timezone import date_today -epochday = datetime.datetime.utcfromtimestamp(0).date().toordinal() +epochday = datetime.datetime.fromtimestamp(0, datetime.UTC).date().toordinal() def dt(s): @@ -35,13 +35,13 @@ def model_to_timeline_data(model, field='time', **kwargs): assert field in [ f.name for f in model._meta.get_fields() ] objects = ( model.objects.filter(**kwargs) - .annotate(date=TruncDate(field, tzinfo=datetime.timezone.utc)) + .annotate(date=TruncDate(field, tzinfo=datetime.UTC)) .order_by('date') .values('date') .annotate(count=Count('id'))) if objects.exists(): obj_list = list(objects) - today = date_today(datetime.timezone.utc) + today = date_today(datetime.UTC) if not obj_list[-1]['date'] == today: obj_list += [ {'date': today, 'count': 0} ] data = [ ((e['date'].toordinal()-epochday)*1000*60*60*24, e['count']) for e in obj_list ] diff --git a/ietf/group/views.py b/ietf/group/views.py index 3529b31f68..bc785ff81e 100644 --- a/ietf/group/views.py +++ b/ietf/group/views.py @@ -941,7 +941,7 @@ def meetings(request, acronym, group_type=None): cutoff_date = revsub_dates_by_meeting[s.meeting.pk] else: cutoff_date = s.meeting.date + datetime.timedelta(days=s.meeting.submission_correction_day_offset) - s.cached_is_cutoff = date_today(datetime.timezone.utc) > cutoff_date + s.cached_is_cutoff = date_today(datetime.UTC) > cutoff_date future, in_progress, recent, past = group_sessions(sessions) diff --git a/ietf/idindex/index.py b/ietf/idindex/index.py index 4f021c0dc7..19eb29d4da 100644 --- a/ietf/idindex/index.py +++ b/ietf/idindex/index.py @@ -276,7 +276,7 @@ def active_drafts_index_by_group(extra_values=()): groups = [g for g in groups_dict.values() if hasattr(g, "active_drafts")] groups.sort(key=lambda g: g.acronym) - fallback_time = datetime.datetime(1950, 1, 1, tzinfo=datetime.timezone.utc) + fallback_time = datetime.datetime(1950, 1, 1, tzinfo=datetime.UTC) for g in groups: g.active_drafts.sort(key=lambda d: d.get("initial_rev_time", fallback_time)) @@ -302,6 +302,6 @@ def id_index_txt(with_abstracts=False): return render_to_string("idindex/id_index.txt", { 'groups': groups, - 'time': timezone.now().astimezone(datetime.timezone.utc).strftime("%Y-%m-%d %H:%M:%S %Z"), + 'time': timezone.now().astimezone(datetime.UTC).strftime("%Y-%m-%d %H:%M:%S %Z"), 'with_abstracts': with_abstracts, }) diff --git a/ietf/iesg/views.py b/ietf/iesg/views.py index 7b9f489b44..ffd4515c98 100644 --- a/ietf/iesg/views.py +++ b/ietf/iesg/views.py @@ -101,7 +101,7 @@ def agenda_json(request, date=None): res = { "telechat-date": str(data["date"]), - "as-of": str(datetime.datetime.utcnow()), + "as-of": str(datetime.datetime.now(datetime.UTC)), "page-counts": telechat_page_count(date=get_agenda_date(date))._asdict(), "sections": {}, } diff --git a/ietf/ietfauth/views.py b/ietf/ietfauth/views.py index 4219747e12..b5256b14f8 100644 --- a/ietf/ietfauth/views.py +++ b/ietf/ietfauth/views.py @@ -517,7 +517,7 @@ def confirm_password_reset(request, auth): password = data['password'] last_login = None if data['last_login']: - last_login = datetime.datetime.fromtimestamp(data['last_login'], datetime.timezone.utc) + last_login = datetime.datetime.fromtimestamp(data['last_login'], datetime.UTC) except django.core.signing.BadSignature: raise Http404("Invalid or expired auth") diff --git a/ietf/ipr/mail.py b/ietf/ipr/mail.py index 167b11956c..9bef751b95 100644 --- a/ietf/ipr/mail.py +++ b/ietf/ipr/mail.py @@ -66,9 +66,9 @@ def utc_from_string(s): if date is None: return None elif is_aware(date): - return date.astimezone(datetime.timezone.utc) + return date.astimezone(datetime.UTC) else: - return date.replace(tzinfo=datetime.timezone.utc) + return date.replace(tzinfo=datetime.UTC) # ---------------------------------------------------------------- # Email Functions diff --git a/ietf/ipr/views.py b/ietf/ipr/views.py index 24453df2d2..08979a3972 100644 --- a/ietf/ipr/views.py +++ b/ietf/ipr/views.py @@ -152,13 +152,13 @@ def ipr_rfc_number(disclosureDate, thirdPartyDisclosureFlag): # RFC publication date comes from the RFC Editor announcement ipr_rfc_pub_datetime = { - 1310 : datetime.datetime(1992, 3, 13, 0, 0, tzinfo=datetime.timezone.utc), - 1802 : datetime.datetime(1994, 3, 23, 0, 0, tzinfo=datetime.timezone.utc), - 2026 : datetime.datetime(1996, 10, 29, 0, 0, tzinfo=datetime.timezone.utc), - 3668 : datetime.datetime(2004, 2, 18, 0, 0, tzinfo=datetime.timezone.utc), - 3979 : datetime.datetime(2005, 3, 2, 2, 23, tzinfo=datetime.timezone.utc), - 4879 : datetime.datetime(2007, 4, 10, 18, 21, tzinfo=datetime.timezone.utc), - 8179 : datetime.datetime(2017, 5, 31, 23, 1, tzinfo=datetime.timezone.utc), + 1310 : datetime.datetime(1992, 3, 13, 0, 0, tzinfo=datetime.UTC), + 1802 : datetime.datetime(1994, 3, 23, 0, 0, tzinfo=datetime.UTC), + 2026 : datetime.datetime(1996, 10, 29, 0, 0, tzinfo=datetime.UTC), + 3668 : datetime.datetime(2004, 2, 18, 0, 0, tzinfo=datetime.UTC), + 3979 : datetime.datetime(2005, 3, 2, 2, 23, tzinfo=datetime.UTC), + 4879 : datetime.datetime(2007, 4, 10, 18, 21, tzinfo=datetime.UTC), + 8179 : datetime.datetime(2017, 5, 31, 23, 1, tzinfo=datetime.UTC), } if disclosureDate < ipr_rfc_pub_datetime[1310]: diff --git a/ietf/liaisons/tests.py b/ietf/liaisons/tests.py index 8bbaa4f053..a1fbf77841 100644 --- a/ietf/liaisons/tests.py +++ b/ietf/liaisons/tests.py @@ -723,7 +723,7 @@ def test_add_incoming_liaison(self): from_groups = [ str(g.pk) for g in Group.objects.filter(type="sdo") ] to_group = Group.objects.get(acronym="mars") submitter = Person.objects.get(user__username="marschairman") - today = date_today(datetime.timezone.utc) + today = date_today(datetime.UTC) related_liaison = liaison r = self.client.post(url, dict(from_groups=from_groups, @@ -808,7 +808,7 @@ def test_add_outgoing_liaison(self): from_group = Group.objects.get(acronym="mars") to_group = Group.objects.filter(type="sdo")[0] submitter = Person.objects.get(user__username="marschairman") - today = date_today(datetime.timezone.utc) + today = date_today(datetime.UTC) related_liaison = liaison r = self.client.post(url, dict(from_groups=str(from_group.pk), @@ -878,7 +878,7 @@ def test_add_outgoing_liaison_unapproved_post_only(self): from_group = Group.objects.get(acronym="mars") to_group = Group.objects.filter(type="sdo")[0] submitter = Person.objects.get(user__username="marschairman") - today = date_today(datetime.timezone.utc) + today = date_today(datetime.UTC) r = self.client.post(url, dict(from_groups=str(from_group.pk), from_contact=submitter.email_address(), @@ -1062,7 +1062,7 @@ def test_search(self): LiaisonStatementEventFactory(type_id='posted', statement__body="Has recently in its body",statement__from_groups=[GroupFactory(type_id='sdo',acronym='ulm'),]) # Statement 2 s2 = LiaisonStatementEventFactory(type_id='posted', statement__body="That word does not occur here", statement__title="Nor does it occur here") - s2.time=datetime.datetime(2010, 1, 1, tzinfo=datetime.timezone.utc) + s2.time=datetime.datetime(2010, 1, 1, tzinfo=datetime.UTC) s2.save() # test list only, no search filters diff --git a/ietf/meeting/migrations/0016_alter_meeting_country_alter_meeting_time_zone.py b/ietf/meeting/migrations/0016_alter_meeting_country_alter_meeting_time_zone.py index 8f5db26112..8c467ea156 100644 --- a/ietf/meeting/migrations/0016_alter_meeting_country_alter_meeting_time_zone.py +++ b/ietf/meeting/migrations/0016_alter_meeting_country_alter_meeting_time_zone.py @@ -4,6 +4,7 @@ class Migration(migrations.Migration): + dependencies = [ ("meeting", "0015_alter_meeting_time_zone"), ] diff --git a/ietf/meeting/models.py b/ietf/meeting/models.py index de0192769e..f3df23e916 100644 --- a/ietf/meeting/models.py +++ b/ietf/meeting/models.py @@ -149,7 +149,7 @@ def get_00_cutoff(self): cutoff_date = importantdate.date else: cutoff_date = self.date + datetime.timedelta(days=ImportantDateName.objects.get(slug='idcutoff').default_offset_days) - cutoff_time = datetime_from_date(cutoff_date, datetime.timezone.utc) + self.idsubmit_cutoff_time_utc + cutoff_time = datetime_from_date(cutoff_date, datetime.UTC) + self.idsubmit_cutoff_time_utc return cutoff_time def get_01_cutoff(self): @@ -161,7 +161,7 @@ def get_01_cutoff(self): cutoff_date = importantdate.date else: cutoff_date = self.date + datetime.timedelta(days=ImportantDateName.objects.get(slug='idcutoff').default_offset_days) - cutoff_time = datetime_from_date(cutoff_date, datetime.timezone.utc) + self.idsubmit_cutoff_time_utc + cutoff_time = datetime_from_date(cutoff_date, datetime.UTC) + self.idsubmit_cutoff_time_utc return cutoff_time def get_reopen_time(self): @@ -1172,7 +1172,7 @@ def can_manage_materials(self, user): return can_manage_materials(user,self.group) def is_material_submission_cutoff(self): - return date_today(datetime.timezone.utc) > self.meeting.get_submission_correction_date() + return date_today(datetime.UTC) > self.meeting.get_submission_correction_date() def joint_with_groups_acronyms(self): return [group.acronym for group in self.joint_with_groups.all()] diff --git a/ietf/meeting/tests_js.py b/ietf/meeting/tests_js.py index a184a7c6d0..262b47652c 100644 --- a/ietf/meeting/tests_js.py +++ b/ietf/meeting/tests_js.py @@ -1576,7 +1576,7 @@ def test_delete_timeslot_cancel(self): def do_delete_time_interval_test(self, cancel=False): delete_time_local = datetime_from_date(self.meeting.date, self.meeting.tz()).replace(hour=10) - delete_time = delete_time_local.astimezone(datetime.timezone.utc) + delete_time = delete_time_local.astimezone(datetime.UTC) duration = datetime.timedelta(minutes=60) delete: [TimeSlot] = TimeSlotFactory.create_batch( # type: ignore[annotation-unchecked] diff --git a/ietf/meeting/tests_tasks.py b/ietf/meeting/tests_tasks.py index 0c442c4bf7..a5da00ecbf 100644 --- a/ietf/meeting/tests_tasks.py +++ b/ietf/meeting/tests_tasks.py @@ -23,7 +23,7 @@ def test_proceedings_content_refresh_task(self, mock_generate): meeting127 = MeetingFactory(type_id="ietf", number="127") # 24 * 5 + 7 # Times to be returned - now_utc = datetime.datetime.now(tz=datetime.timezone.utc) + now_utc = datetime.datetime.now(tz=datetime.UTC) hour_00_utc = now_utc.replace(hour=0) hour_01_utc = now_utc.replace(hour=1) hour_07_utc = now_utc.replace(hour=7) diff --git a/ietf/meeting/tests_views.py b/ietf/meeting/tests_views.py index f382772485..bd3ab772fc 100644 --- a/ietf/meeting/tests_views.py +++ b/ietf/meeting/tests_views.py @@ -55,9 +55,8 @@ from ietf.meeting.views import session_draft_list, parse_agenda_filter_params, sessions_post_save, agenda_extract_schedule from ietf.meeting.views import get_summary_by_area, get_summary_by_type, get_summary_by_purpose, generate_agenda_data from ietf.name.models import SessionStatusName, ImportantDateName, RoleName, ProceedingsMaterialTypeName -from ietf.utils.decorators import skip_coverage from ietf.utils.mail import outbox, empty_outbox, get_payload_text -from ietf.utils.test_runner import TestBlobstoreManager +from ietf.utils.test_runner import TestBlobstoreManager, disable_coverage from ietf.utils.test_utils import TestCase, login_testing_unauthorized, unicontent from ietf.utils.timezone import date_today, time_now @@ -321,11 +320,11 @@ def test_meeting_agenda(self): self.assertContains(r, session.group.parent.acronym.upper()) self.assertContains(r, slot.location.name) self.assertContains(r, "{}-{}".format( - slot.time.astimezone(datetime.timezone.utc).strftime("%H%M"), - (slot.time + slot.duration).astimezone(datetime.timezone.utc).strftime("%H%M"), + slot.time.astimezone(datetime.UTC).strftime("%H%M"), + (slot.time + slot.duration).astimezone(datetime.UTC).strftime("%H%M"), )) self.assertContains(r, "shown in UTC") - updated = meeting.updated().astimezone(datetime.timezone.utc).strftime("%Y-%m-%d %H:%M:%S %Z") + updated = meeting.updated().astimezone(datetime.UTC).strftime("%Y-%m-%d %H:%M:%S %Z") self.assertContains(r, f"Updated {updated}") # text, invalid updated (none) @@ -369,8 +368,8 @@ def test_meeting_agenda(self): self.assertContains(r, session.group.parent.acronym.upper()) self.assertContains(r, slot.location.name) self.assertContains(r, registration_text) - start_time = slot.time.astimezone(datetime.timezone.utc) - end_time = slot.end_time().astimezone(datetime.timezone.utc) + start_time = slot.time.astimezone(datetime.UTC) + end_time = slot.end_time().astimezone(datetime.UTC) self.assertContains(r, '"{}","{}","{}"'.format( start_time.strftime("%Y-%m-%d"), start_time.strftime("%H%M"), @@ -1037,7 +1036,7 @@ def test_important_dates_ical(self): updated = meeting.updated() self.assertIsNotNone(updated) - expected_updated = updated.astimezone(datetime.timezone.utc).strftime("%Y%m%dT%H%M%SZ") + expected_updated = updated.astimezone(datetime.UTC).strftime("%Y%m%dT%H%M%SZ") self.assertContains(r, f"DTSTAMP:{expected_updated}") dtstamps_count = r.content.decode("utf-8").count(f"DTSTAMP:{expected_updated}") self.assertEqual(dtstamps_count, meeting.importantdate_set.count()) @@ -1181,8 +1180,8 @@ def test_session_draft_tarfile(self): os.unlink(filename) @skipIf(skip_pdf_tests, skip_message) - @skip_coverage - def test_session_draft_pdf(self): + @disable_coverage() + def test_session_draft_pdf(self): # pragma: no cover session, filenames = self.build_session_setup() try: url = urlreverse('ietf.meeting.views.session_draft_pdf', kwargs={'num':session.meeting.number,'acronym':session.group.acronym}) @@ -2117,8 +2116,8 @@ def test_editor_time_zone(self): # strftime() does not seem to support hours without leading 0, so do this manually time_label_string = f'{ts_start.hour:d}:{ts_start.minute:02d} - {ts_end.hour:d}:{ts_end.minute:02d}' self.assertIn(time_label_string, time_label.text()) - self.assertEqual(time_label.attr('data-start'), ts_start.astimezone(datetime.timezone.utc).isoformat()) - self.assertEqual(time_label.attr('data-end'), ts_end.astimezone(datetime.timezone.utc).isoformat()) + self.assertEqual(time_label.attr('data-start'), ts_start.astimezone(datetime.UTC).isoformat()) + self.assertEqual(time_label.attr('data-end'), ts_end.astimezone(datetime.UTC).isoformat()) ts_swap = time_label.find('.swap-timeslot-col') origin_label = ts_swap.attr('data-origin-label') @@ -2129,8 +2128,8 @@ def test_editor_time_zone(self): timeslot_elt = pq(f'#timeslot{timeslot.pk}') self.assertEqual(len(timeslot_elt), 1) - self.assertEqual(timeslot_elt.attr('data-start'), ts_start.astimezone(datetime.timezone.utc).isoformat()) - self.assertEqual(timeslot_elt.attr('data-end'), ts_end.astimezone(datetime.timezone.utc).isoformat()) + self.assertEqual(timeslot_elt.attr('data-start'), ts_start.astimezone(datetime.UTC).isoformat()) + self.assertEqual(timeslot_elt.attr('data-end'), ts_end.astimezone(datetime.UTC).isoformat()) timeslot_label = pq(f'#timeslot{timeslot.pk} .time-label') self.assertEqual(len(timeslot_label), 1) @@ -5233,7 +5232,7 @@ def test_upcoming_ical(self): updated = meeting.updated() self.assertIsNotNone(updated) - expected_updated = updated.astimezone(datetime.timezone.utc).strftime("%Y%m%dT%H%M%SZ") + expected_updated = updated.astimezone(datetime.UTC).strftime("%Y%m%dT%H%M%SZ") self.assertContains(r, f"DTSTAMP:{expected_updated}") # With default cached_updated, 1970-01-01 diff --git a/ietf/meeting/views.py b/ietf/meeting/views.py index 7fa3d21259..fcc9312609 100644 --- a/ietf/meeting/views.py +++ b/ietf/meeting/views.py @@ -153,7 +153,7 @@ def materials(request, num=None): begin_date = meeting.get_submission_start_date() cut_off_date = meeting.get_submission_cut_off_date() cor_cut_off_date = meeting.get_submission_correction_date() - today_utc = date_today(datetime.timezone.utc) + today_utc = date_today(datetime.UTC) old = timezone.now() - datetime.timedelta(days=1) if settings.SERVER_MODE != 'production' and '_testoverride' in request.GET: pass @@ -1921,7 +1921,7 @@ def slides_field(item): write_row(headings) - tz = datetime.timezone.utc if utc else schedule.meeting.tz() + tz = datetime.UTC if utc else schedule.meeting.tz() for item in filtered_assignments: row = [] row.append(item.timeslot.time.astimezone(tz).strftime("%Y-%m-%d")) @@ -2814,7 +2814,7 @@ def session_attendance(request, session_id, num): raise Http404("Bluesheets not found") cor_cut_off_date = session.meeting.get_submission_correction_date() - today_utc = date_today(datetime.timezone.utc) + today_utc = date_today(datetime.UTC) was_there = False can_add = False if request.user.is_authenticated: diff --git a/ietf/nomcom/tests.py b/ietf/nomcom/tests.py index cc2e0826d3..dcdb9ef836 100644 --- a/ietf/nomcom/tests.py +++ b/ietf/nomcom/tests.py @@ -2930,7 +2930,7 @@ def test_decorate_volunteers_with_qualifications(self): elig_date.year - 3, elig_date.month, 28 if elig_date.month == 2 and elig_date.day == 29 else elig_date.day, - tzinfo=datetime.timezone.utc, + tzinfo=datetime.UTC, ) ) nomcom.volunteer_set.create(person=author_person) diff --git a/ietf/nomcom/views.py b/ietf/nomcom/views.py index c04e13f92b..3f90be5253 100644 --- a/ietf/nomcom/views.py +++ b/ietf/nomcom/views.py @@ -981,7 +981,7 @@ def view_feedback_topic(request, year, topic_id): reviewer = request.user.person last_seen = TopicFeedbackLastSeen.objects.filter(reviewer=reviewer,topic=topic).first() - last_seen_time = (last_seen and last_seen.time) or datetime.datetime(year=1, month=1, day=1, tzinfo=datetime.timezone.utc) + last_seen_time = (last_seen and last_seen.time) or datetime.datetime(year=1, month=1, day=1, tzinfo=datetime.UTC) if last_seen: last_seen.save() else: @@ -1044,7 +1044,7 @@ def view_feedback_nominee(request, year, nominee_id): }) last_seen = FeedbackLastSeen.objects.filter(reviewer=reviewer,nominee=nominee).first() - last_seen_time = (last_seen and last_seen.time) or datetime.datetime(year=1, month=1, day=1, tzinfo=datetime.timezone.utc) + last_seen_time = (last_seen and last_seen.time) or datetime.datetime(year=1, month=1, day=1, tzinfo=datetime.UTC) if last_seen: last_seen.save() else: diff --git a/ietf/settings.py b/ietf/settings.py index 3af01d76e6..753508dc99 100644 --- a/ietf/settings.py +++ b/ietf/settings.py @@ -14,19 +14,27 @@ from hashlib import sha384 from typing import Any, Dict, List, Tuple # pyflakes:ignore +# DeprecationWarnings are suppressed by default, enable them warnings.simplefilter("always", DeprecationWarning) -warnings.filterwarnings("ignore", message="pkg_resources is deprecated as an API") -warnings.filterwarnings("ignore", "Log out via GET requests is deprecated") # happens in oidc_provider -warnings.filterwarnings("ignore", module="tastypie", message="The django.utils.datetime_safe module is deprecated.") -warnings.filterwarnings("ignore", module="oidc_provider", message="The django.utils.timezone.utc alias is deprecated.") + +# Warnings that must be resolved for Django 5.x +warnings.filterwarnings("ignore", "Log out via GET requests is deprecated") # caused by oidc_provider +warnings.filterwarnings("ignore", message="The django.utils.timezone.utc alias is deprecated.", module="oidc_provider") +warnings.filterwarnings("ignore", message="The django.utils.datetime_safe module is deprecated.", module="tastypie") warnings.filterwarnings("ignore", message="The USE_DEPRECATED_PYTZ setting,") # https://github.com/ietf-tools/datatracker/issues/5635 warnings.filterwarnings("ignore", message="The USE_L10N setting is deprecated.") # https://github.com/ietf-tools/datatracker/issues/5648 warnings.filterwarnings("ignore", message="django.contrib.auth.hashers.CryptPasswordHasher is deprecated.") # https://github.com/ietf-tools/datatracker/issues/5663 -warnings.filterwarnings("ignore", message="'urllib3\\[secure\\]' extra is deprecated") -warnings.filterwarnings("ignore", message="The logout\\(\\) view is superseded by") + +# Other DeprecationWarnings +warnings.filterwarnings("ignore", message="pkg_resources is deprecated as an API", module="pyang.plugin") warnings.filterwarnings("ignore", message="Report.file_reporters will no longer be available in Coverage.py 4.2", module="coverage.report") -warnings.filterwarnings("ignore", message="Using or importing the ABCs from 'collections' instead of from 'collections.abc' is deprecated", module="bleach") -warnings.filterwarnings("ignore", message="HTTPResponse.getheader\\(\\) is deprecated", module='selenium.webdriver') +warnings.filterwarnings("ignore", message="currentThread\\(\\) is deprecated", module="coverage.pytracer") +warnings.filterwarnings("ignore", message="co_lnotab is deprecated", module="coverage.parser") +warnings.filterwarnings("ignore", message="datetime.datetime.utcnow\\(\\) is deprecated", module="botocore.auth") +warnings.filterwarnings("ignore", message="datetime.datetime.utcnow\\(\\) is deprecated", module="oic.utils.time_util") +warnings.filterwarnings("ignore", message="datetime.datetime.utcfromtimestamp\\(\\) is deprecated", module="oic.utils.time_util") +warnings.filterwarnings("ignore", message="datetime.datetime.utcfromtimestamp\\(\\) is deprecated", module="pytz.tzinfo") + base_path = pathlib.Path(__file__).resolve().parent BASE_DIR = str(base_path) @@ -447,23 +455,24 @@ def skip_unreadable_post(record): "ietf.middleware.SMTPExceptionMiddleware", "ietf.middleware.Utf8ExceptionMiddleware", "ietf.middleware.redirect_trailing_period_middleware", - "django_referrer_policy.middleware.ReferrerPolicyMiddleware", "django.middleware.clickjacking.XFrameOptionsMiddleware", "django.middleware.security.SecurityMiddleware", - #"csp.middleware.CSPMiddleware", "ietf.middleware.unicode_nfkc_normalization_middleware", "ietf.middleware.is_authenticated_header_middleware", ] ROOT_URLCONF = 'ietf.urls' -DJANGO_VITE_ASSETS_PATH = os.path.join(BASE_DIR, 'static/dist-neue') +# Configure django_vite +DJANGO_VITE: dict = {"default": {}} if DEBUG: - DJANGO_VITE_MANIFEST_PATH = os.path.join(BASE_DIR, 'static/dist-neue/manifest.json') + DJANGO_VITE["default"]["manifest_path"] = os.path.join( + BASE_DIR, 'static/dist-neue/manifest.json' + ) # Additional locations of static files (in addition to each app's static/ dir) STATICFILES_DIRS = ( - DJANGO_VITE_ASSETS_PATH, + os.path.join(BASE_DIR, "static/dist-neue"), # for django_vite os.path.join(BASE_DIR, 'static/dist'), os.path.join(BASE_DIR, 'secr/static/dist'), ) @@ -567,8 +576,6 @@ def skip_unreadable_post(record): CORS_ALLOW_METHODS = ( 'GET', 'OPTIONS', ) CORS_URLS_REGEX = r'^(/api/.*|.*\.json|.*/json/?)$' -# Setting for django_referrer_policy.middleware.ReferrerPolicyMiddleware -REFERRER_POLICY = 'strict-origin-when-cross-origin' # django.middleware.security.SecurityMiddleware SECURE_BROWSER_XSS_FILTER = True @@ -581,6 +588,7 @@ def skip_unreadable_post(record): #SECURE_SSL_REDIRECT = True # Relax the COOP policy to allow Meetecho authentication pop-up SECURE_CROSS_ORIGIN_OPENER_POLICY = "unsafe-none" +SECURE_REFERRER_POLICY = "strict-origin-when-cross-origin" # Override this in your settings_local with the IP addresses relevant for you: INTERNAL_IPS = ( @@ -666,11 +674,6 @@ def skip_unreadable_post(record): IDNITS3_BASE_URL = "https://author-tools.ietf.org/idnits3/results" IDNITS_SERVICE_URL = "https://author-tools.ietf.org/idnits" -# Content security policy configuration (django-csp) -# (In current production, the Content-Security-Policy header is completely set by nginx configuration, but -# we try to keep this in sync to avoid confusion) -CSP_DEFAULT_SRC = ("'self'", "'unsafe-inline'", f"data: {IDTRACKER_BASE_URL} http://ietf.org/ https://www.ietf.org/ https://analytics.ietf.org/ https://static.ietf.org") - # The name of the method to use to invoke the test suite TEST_RUNNER = 'ietf.utils.test_runner.IetfTestRunner' @@ -709,6 +712,7 @@ def skip_unreadable_post(record): "ietf/utils/patch.py", "ietf/utils/test_data.py", "ietf/utils/jstest.py", + "ietf/utils/coverage.py", ] # These are code line regex patterns @@ -738,8 +742,8 @@ def skip_unreadable_post(record): TEST_CODE_COVERAGE_CHECKER = None if SERVER_MODE != 'production': - import coverage - TEST_CODE_COVERAGE_CHECKER = coverage.Coverage(source=[ BASE_DIR ], cover_pylib=False, omit=TEST_CODE_COVERAGE_EXCLUDE_FILES) + from ietf.utils.coverage import CoverageManager + TEST_CODE_COVERAGE_CHECKER = CoverageManager() TEST_CODE_COVERAGE_REPORT_PATH = "coverage/" TEST_CODE_COVERAGE_REPORT_URL = os.path.join(STATIC_URL, TEST_CODE_COVERAGE_REPORT_PATH, "index.html") diff --git a/ietf/settings_test.py b/ietf/settings_test.py index 9a42e8b99d..6479069db0 100755 --- a/ietf/settings_test.py +++ b/ietf/settings_test.py @@ -14,7 +14,7 @@ import shutil import tempfile from ietf.settings import * # pyflakes:ignore -from ietf.settings import TEST_CODE_COVERAGE_CHECKER, ORIG_AUTH_PASSWORD_VALIDATORS +from ietf.settings import ORIG_AUTH_PASSWORD_VALIDATORS import debug # pyflakes:ignore debug.debug = True @@ -52,10 +52,9 @@ def __getitem__(self, item): BLOBDB_DATABASE = "default" DATABASE_ROUTERS = [] # type: ignore -if TEST_CODE_COVERAGE_CHECKER and not TEST_CODE_COVERAGE_CHECKER._started: # pyflakes:ignore +if TEST_CODE_COVERAGE_CHECKER: # pyflakes:ignore TEST_CODE_COVERAGE_CHECKER.start() # pyflakes:ignore - def tempdir_with_cleanup(**kwargs): """Utility to create a temporary dir and arrange cleanup""" _dir = tempfile.mkdtemp(**kwargs) diff --git a/ietf/submit/checkers.py b/ietf/submit/checkers.py index 89908748a7..e02b686576 100644 --- a/ietf/submit/checkers.py +++ b/ietf/submit/checkers.py @@ -18,7 +18,7 @@ from ietf.utils import tool_version from ietf.utils.log import log, assertion from ietf.utils.pipe import pipe -from ietf.utils.test_runner import set_coverage_checking +from ietf.utils.test_runner import disable_coverage class DraftSubmissionChecker(object): name = "" @@ -247,34 +247,33 @@ def check_file_txt(self, path): ) # yanglint - set_coverage_checking(False) # we can't count the following as it may or may not be run, depending on setup - if settings.SUBMIT_YANGLINT_COMMAND and os.path.exists(settings.YANGLINT_BINARY): - cmd_template = settings.SUBMIT_YANGLINT_COMMAND - command = [ w for w in cmd_template.split() if not '=' in w ][0] - cmd = cmd_template.format(model=path, rfclib=settings.SUBMIT_YANG_RFC_MODEL_DIR, tmplib=workdir, - draftlib=settings.SUBMIT_YANG_DRAFT_MODEL_DIR, ianalib=settings.SUBMIT_YANG_IANA_MODEL_DIR, - cataloglib=settings.SUBMIT_YANG_CATALOG_MODEL_DIR, ) - code, out, err = pipe(cmd) - out = out.decode('utf-8') - err = err.decode('utf-8') - if code > 0 or len(err.strip()) > 0: - err_lines = err.splitlines() - for line in err_lines: - if line.strip(): - try: - if 'err : ' in line: - errors += 1 - if 'warn: ' in line: - warnings += 1 - except ValueError: - pass - #passed = passed and code == 0 # For the submission tool. Yang checks always pass - message += "{version}: {template}:\n{output}\n".format( - version=tool_version[command], - template=cmd_template, - output=out + "No validation errors\n" if (code == 0 and len(err) == 0) else out + err, - ) - set_coverage_checking(True) + with disable_coverage(): # pragma: no cover + if settings.SUBMIT_YANGLINT_COMMAND and os.path.exists(settings.YANGLINT_BINARY): + cmd_template = settings.SUBMIT_YANGLINT_COMMAND + command = [ w for w in cmd_template.split() if not '=' in w ][0] + cmd = cmd_template.format(model=path, rfclib=settings.SUBMIT_YANG_RFC_MODEL_DIR, tmplib=workdir, + draftlib=settings.SUBMIT_YANG_DRAFT_MODEL_DIR, ianalib=settings.SUBMIT_YANG_IANA_MODEL_DIR, + cataloglib=settings.SUBMIT_YANG_CATALOG_MODEL_DIR, ) + code, out, err = pipe(cmd) + out = out.decode('utf-8') + err = err.decode('utf-8') + if code > 0 or len(err.strip()) > 0: + err_lines = err.splitlines() + for line in err_lines: + if line.strip(): + try: + if 'err : ' in line: + errors += 1 + if 'warn: ' in line: + warnings += 1 + except ValueError: + pass + #passed = passed and code == 0 # For the submission tool. Yang checks always pass + message += "{version}: {template}:\n{output}\n".format( + version=tool_version[command], + template=cmd_template, + output=out + "No validation errors\n" if (code == 0 and len(err) == 0) else out + err, + ) else: errors += 1 message += "No such file: %s\nPossible mismatch between extracted xym file name and returned module name?\n" % (path) diff --git a/ietf/sync/iana.py b/ietf/sync/iana.py index f46fe407d4..0d40c5337e 100644 --- a/ietf/sync/iana.py +++ b/ietf/sync/iana.py @@ -66,8 +66,8 @@ def update_rfc_log_from_protocol_page(rfc_names, rfc_must_published_later_than): def fetch_changes_json(url, start, end): - url += "?start=%s&end=%s" % (urlquote(start.astimezone(datetime.timezone.utc).strftime("%Y-%m-%d %H:%M:%S")), - urlquote(end.astimezone(datetime.timezone.utc).strftime("%Y-%m-%d %H:%M:%S"))) + url += "?start=%s&end=%s" % (urlquote(start.astimezone(datetime.UTC).strftime("%Y-%m-%d %H:%M:%S")), + urlquote(end.astimezone(datetime.UTC).strftime("%Y-%m-%d %H:%M:%S"))) # HTTP basic auth username = "ietfsync" password = settings.IANA_SYNC_PASSWORD @@ -161,7 +161,7 @@ def update_history_with_changes(changes, send_email=True): for c in changes: docname = c['doc'] - timestamp = datetime.datetime.strptime(c["time"], "%Y-%m-%d %H:%M:%S",).replace(tzinfo=datetime.timezone.utc) + timestamp = datetime.datetime.strptime(c["time"], "%Y-%m-%d %H:%M:%S",).replace(tzinfo=datetime.UTC) if c['type'] in ("iana_state", "iana_review"): if c['type'] == "iana_state": @@ -247,7 +247,7 @@ def parse_review_email(text): review_time = parsedate_to_datetime(msg["Date"]) # parsedate_to_datetime() may return a naive timezone - treat as UTC if review_time.tzinfo is None or review_time.tzinfo.utcoffset(review_time) is None: - review_time = review_time.replace(tzinfo=datetime.timezone.utc) + review_time = review_time.replace(tzinfo=datetime.UTC) # by by = None diff --git a/ietf/sync/tasks.py b/ietf/sync/tasks.py index 18ab4fe66e..e4174d3729 100644 --- a/ietf/sync/tasks.py +++ b/ietf/sync/tasks.py @@ -152,7 +152,7 @@ def iana_protocols_update_task(): 2012, 11, 26, - tzinfo=datetime.timezone.utc, + tzinfo=datetime.UTC, ) try: diff --git a/ietf/sync/tests.py b/ietf/sync/tests.py index 182b6e24c4..3432f6214a 100644 --- a/ietf/sync/tests.py +++ b/ietf/sync/tests.py @@ -206,7 +206,7 @@ def test_iana_review_mail(self): doc_name, review_time, by, comment = iana.parse_review_email(msg.encode('utf-8')) self.assertEqual(doc_name, draft.name) - self.assertEqual(review_time, datetime.datetime(2012, 5, 10, 12, 0, rtime, tzinfo=datetime.timezone.utc)) + self.assertEqual(review_time, datetime.datetime(2012, 5, 10, 12, 0, rtime, tzinfo=datetime.UTC)) self.assertEqual(by, Person.objects.get(user__username="iana")) self.assertIn("there are no IANA Actions", comment.replace("\n", "")) @@ -240,7 +240,7 @@ def test_ingest_review_email(self, mock_parse_review_email, mock_add_review_comm args = ( "doc-name", - datetime.datetime.now(tz=datetime.timezone.utc), + datetime.datetime.now(tz=datetime.UTC), PersonFactory(), "yadda yadda yadda", ) @@ -1121,7 +1121,7 @@ def test_iana_protocols_update_task( ) self.assertEqual( published_later_than, - {datetime.datetime(2012,11,26,tzinfo=datetime.timezone.utc)} + {datetime.datetime(2012,11,26,tzinfo=datetime.UTC)} ) # try with an exception diff --git a/ietf/utils/test_smtpserver.py b/ietf/utils/aiosmtpd.py similarity index 72% rename from ietf/utils/test_smtpserver.py rename to ietf/utils/aiosmtpd.py index 40da758d66..3e4cd65dd9 100644 --- a/ietf/utils/test_smtpserver.py +++ b/ietf/utils/aiosmtpd.py @@ -1,10 +1,14 @@ # Copyright The IETF Trust 2014-2025, All Rights Reserved -# -*- coding: utf-8 -*- +"""aiosmtpd-related utilities +These are for testing / dev use. If you're using this for production code, think very +hard about the choices you're making... +""" +from aiosmtpd import handlers from aiosmtpd.controller import Controller from aiosmtpd.smtp import SMTP from email.utils import parseaddr -from typing import Optional +from typing import Optional, TextIO class SMTPTestHandler: @@ -54,3 +58,16 @@ def start(self): def stop(self): self.controller.stop() + + +class DevDebuggingHandler(handlers.Debugging): + """Debugging handler for use in dev ONLY""" + def __init__(self, stream: Optional[TextIO] = None): + # Allow longer lines than the 1001 that RFC 5321 requires. As of 2025-04-16 the + # datatracker emits some non-compliant messages. + # See https://aiosmtpd.aio-libs.org/en/latest/smtp.html + # Doing this in a handler class is a huge hack. Tests all pass with this set + # to 4000, but make the limit longer for dev just in case. + SMTP.line_length_limit = 10000 + super().__init__(stream) + diff --git a/ietf/utils/coverage.py b/ietf/utils/coverage.py new file mode 100644 index 0000000000..bd205ce586 --- /dev/null +++ b/ietf/utils/coverage.py @@ -0,0 +1,90 @@ +# Copyright The IETF Trust 2025, All Rights Reserved +from coverage import Coverage, CoverageData, FileReporter +from coverage.control import override_config as override_coverage_config +from coverage.results import Numbers +from coverage.report_core import get_analysis_to_report +from coverage.results import Analysis +from django.conf import settings + + +class CoverageManager: + checker: Coverage | None = None + started = False + + def start(self): + if settings.SERVER_MODE != "production" and not self.started: + self.checker = Coverage( + source=[settings.BASE_DIR], + cover_pylib=False, + omit=settings.TEST_CODE_COVERAGE_EXCLUDE_FILES, + ) + for exclude_regex in getattr( + settings, + "TEST_CODE_COVERAGE_EXCLUDE_LINES", + [], + ): + self.checker.exclude(exclude_regex) + self.checker.start() + self.started = True + + def stop(self): + if self.checker is not None: + self.checker.stop() + + def save(self): + if self.checker is not None: + self.checker.save() + + def report(self, include: list[str] | None = None): + if self.checker is None: + return None + reporter = CustomDictReporter() + with override_coverage_config( + self.checker, + report_include=include, + ): + return reporter.report(self.checker) + + +class CustomDictReporter: # pragma: no cover + total = Numbers() + + def report(self, coverage): + coverage_data = coverage.get_data() + coverage_data.set_query_contexts(None) + measured_files = {} + for file_reporter, analysis in get_analysis_to_report(coverage, None): + measured_files[file_reporter.relative_filename()] = self.report_one_file( + coverage_data, + analysis, + file_reporter, + ) + tot_numer, tot_denom = self.total.ratio_covered + return { + "coverage": 1 if tot_denom == 0 else tot_numer / tot_denom, + "covered": measured_files, + "format": 5, + } + + def report_one_file( + self, + coverage_data: CoverageData, + analysis: Analysis, + file_reporter: FileReporter, + ): + """Extract the relevant report data for a single file.""" + nums = analysis.numbers + self.total += nums + n_statements = nums.n_statements + numer, denom = nums.ratio_covered + fraction_covered = 1 if denom == 0 else numer / denom + missing_line_nums = sorted(analysis.missing) + # Extract missing lines from source files + source_lines = file_reporter.source().splitlines() + missing_lines = [source_lines[num - 1] for num in missing_line_nums] + return ( + n_statements, + fraction_covered, + missing_line_nums, + missing_lines, + ) diff --git a/ietf/utils/decorators.py b/ietf/utils/decorators.py index 5e94dda91d..b50e0e7f96 100644 --- a/ietf/utils/decorators.py +++ b/ietf/utils/decorators.py @@ -15,21 +15,9 @@ import debug # pyflakes:ignore -from ietf.utils.test_runner import set_coverage_checking from ietf.person.models import Person, PersonalApiKey, PersonApiKeyEvent from ietf.utils import log -def skip_coverage(f): - @wraps(f) - def _wrapper(*args, **kwargs): - if settings.TEST_CODE_COVERAGE_CHECKER: - set_coverage_checking(False) - result = f(*args, **kwargs) - set_coverage_checking(True) - return result - else: - return f(*args, **kwargs) - return _wrapper def person_required(f): @wraps(f) diff --git a/ietf/utils/jstest.py b/ietf/utils/jstest.py index 215d78d65f..cf242fc4eb 100644 --- a/ietf/utils/jstest.py +++ b/ietf/utils/jstest.py @@ -3,6 +3,8 @@ import os +from django.conf import settings +from django.contrib.staticfiles.testing import StaticLiveServerTestCase from django.urls import reverse as urlreverse from unittest import skipIf @@ -21,7 +23,11 @@ from ietf.utils.pipe import pipe -from ietf.utils.test_runner import IetfLiveServerTestCase +from ietf.utils.test_runner import ( + set_template_coverage, + set_url_coverage, + load_and_run_fixtures, +) executable_name = 'geckodriver' code, out, err = pipe('{} --version'.format(executable_name)) @@ -49,17 +55,44 @@ def ifSeleniumEnabled(func): return skipIf(skip_selenium, skip_message)(func) -class IetfSeleniumTestCase(IetfLiveServerTestCase): +class IetfSeleniumTestCase(StaticLiveServerTestCase): # pragma: no cover login_view = 'ietf.ietfauth.views.login' + @classmethod + def setUpClass(cls): + set_template_coverage(False) + set_url_coverage(False) + super().setUpClass() + + @classmethod + def tearDownClass(cls): + super().tearDownClass() + set_template_coverage(True) + set_url_coverage(True) + def setUp(self): - super(IetfSeleniumTestCase, self).setUp() + super().setUp() + # LiveServerTestCase uses TransactionTestCase which seems to + # somehow interfere with the fixture loading process in + # IetfTestRunner when running multiple tests (the first test + # is fine, in the next ones the fixtures have been wiped) - + # this is no doubt solvable somehow, but until then we simply + # recreate them here + from ietf.person.models import Person + if not Person.objects.exists(): + load_and_run_fixtures(verbosity=0) + self.replaced_settings = dict() + if hasattr(settings, 'IDTRACKER_BASE_URL'): + self.replaced_settings['IDTRACKER_BASE_URL'] = settings.IDTRACKER_BASE_URL + settings.IDTRACKER_BASE_URL = self.live_server_url self.driver = start_web_driver() self.driver.set_window_size(1024,768) def tearDown(self): - super(IetfSeleniumTestCase, self).tearDown() self.driver.close() + for k, v in self.replaced_settings.items(): + setattr(settings, k, v) + super().tearDown() def absreverse(self,*args,**kwargs): return '%s%s'%(self.live_server_url, urlreverse(*args, **kwargs)) diff --git a/ietf/utils/meetecho.py b/ietf/utils/meetecho.py index 0dbf75736a..7654f67cd1 100644 --- a/ietf/utils/meetecho.py +++ b/ietf/utils/meetecho.py @@ -27,7 +27,7 @@ class MeetechoAPI: - timezone = datetime.timezone.utc + timezone = datetime.UTC def __init__( self, api_base: str, client_id: str, client_secret: str, request_timeout=3.01 @@ -504,7 +504,7 @@ def _should_send_update(self, session): if self.slides_notify_time < datetime.timedelta(0): return True # < 0 means "always" for a scheduled session else: - now = datetime.datetime.now(tz=datetime.timezone.utc) + now = datetime.datetime.now(tz=datetime.UTC) return (timeslot.time - self.slides_notify_time) < now < (timeslot.end_time() + self.slides_notify_time) def add(self, session: "Session", slides: "Document", order: int): diff --git a/ietf/utils/serialize.py b/ietf/utils/serialize.py index 342d211cf5..77f97942cb 100644 --- a/ietf/utils/serialize.py +++ b/ietf/utils/serialize.py @@ -16,7 +16,7 @@ def object_as_shallow_dict(obj): if isinstance(f, models.ManyToManyField): v = list(v.values_list("pk", flat=True)) elif isinstance(f, models.DateTimeField): - v = v.astimezone(datetime.timezone.utc).isoformat() + v = v.astimezone(datetime.UTC).isoformat() elif isinstance(f, models.DateField): v = v.strftime('%Y-%m-%d') diff --git a/ietf/utils/test_runner.py b/ietf/utils/test_runner.py index a9b2e5d572..1a3d4e5c3d 100644 --- a/ietf/utils/test_runner.py +++ b/ietf/utils/test_runner.py @@ -48,6 +48,8 @@ import subprocess import tempfile import copy +from contextlib import contextmanager + import boto3 import botocore.config import factory.random @@ -57,10 +59,6 @@ from typing import Callable, Optional from urllib.parse import urlencode -from coverage.report import Reporter -from coverage.results import Numbers -from coverage.misc import NotPython - import django from django.conf import settings from django.contrib.staticfiles.testing import StaticLiveServerTestCase @@ -84,7 +82,7 @@ import ietf import ietf.utils.mail from ietf.utils.management.commands import pyflakes -from ietf.utils.test_smtpserver import SMTPTestServerDriver +from ietf.utils.aiosmtpd import SMTPTestServerDriver from ietf.utils.test_utils import TestCase from mypy_boto3_s3.service_resource import Bucket @@ -96,11 +94,11 @@ old_destroy: Optional[Callable] = None old_create: Optional[Callable] = None -template_coverage_collection = None -code_coverage_collection = None -url_coverage_collection = None +template_coverage_collection = False +url_coverage_collection = False validation_settings = {"validate_html": None, "validate_html_harder": None, "show_logging": False} + def start_vnu_server(port=8888): "Start a vnu validation server on the indicated port" vnu = subprocess.Popen( @@ -463,50 +461,29 @@ def save_test_results(failures, test_labels): tfile.write("%s OK\n" % (timestr, )) tfile.close() -def set_coverage_checking(flag=True): + +def set_template_coverage(flag): global template_coverage_collection - global code_coverage_collection + orig = template_coverage_collection + template_coverage_collection = flag + return orig + + +def set_url_coverage(flag): global url_coverage_collection - if settings.SERVER_MODE == 'test': - if flag: - settings.TEST_CODE_COVERAGE_CHECKER.collector.resume() - template_coverage_collection = True - code_coverage_collection = True - url_coverage_collection = True - else: - settings.TEST_CODE_COVERAGE_CHECKER.collector.pause() - template_coverage_collection = False - code_coverage_collection = False - url_coverage_collection = False - -class CoverageReporter(Reporter): - def report(self): - self.find_file_reporters(None) - - total = Numbers() - result = {"coverage": 0.0, "covered": {}, "format": 5, } - for fr in self.file_reporters: - try: - analysis = self.coverage._analyze(fr) - nums = analysis.numbers - missing_nums = sorted(analysis.missing) - with io.open(analysis.filename, encoding='utf-8') as file: - lines = file.read().splitlines() - missing_lines = [ lines[l-1] for l in missing_nums ] - result["covered"][fr.relative_filename()] = (nums.n_statements, nums.pc_covered/100.0, missing_nums, missing_lines) - total += nums - except KeyboardInterrupt: # pragma: not covered - raise - except Exception: - report_it = not self.config.ignore_errors - if report_it: - typ, msg = sys.exc_info()[:2] - if typ is NotPython and not fr.should_be_python(): - report_it = False - if report_it: - raise - result["coverage"] = total.pc_covered/100.0 - return result + orig = url_coverage_collection + url_coverage_collection = flag + return orig + + +@contextmanager +def disable_coverage(): + """Context manager/decorator that disables template/url coverage""" + orig_template = set_template_coverage(False) + orig_url = set_url_coverage(False) + yield + set_template_coverage(orig_template) + set_url_coverage(orig_url) class CoverageTest(unittest.TestCase): @@ -594,23 +571,24 @@ def ignore_pattern(regex, pattern): self.skipTest("Coverage switched off with --skip-coverage") def code_coverage_test(self): - if self.runner.check_coverage: - include = [ os.path.join(path, '*') for path in self.runner.test_paths ] - checker = self.runner.code_coverage_checker - checker.stop() + if ( + self.runner.check_coverage + and settings.TEST_CODE_COVERAGE_CHECKER is not None + ): + coverage_manager = settings.TEST_CODE_COVERAGE_CHECKER + coverage_manager.stop() # Save to the .coverage file - checker.save() + coverage_manager.save() # Apply the configured and requested omit and include data - checker.config.from_args(ignore_errors=None, omit=settings.TEST_CODE_COVERAGE_EXCLUDE_FILES, - include=include, file=None) - for pattern in settings.TEST_CODE_COVERAGE_EXCLUDE_LINES: - checker.exclude(pattern) # Maybe output an HTML report if self.runner.run_full_test_suite and self.runner.html_report: - checker.html_report(directory=settings.TEST_CODE_COVERAGE_REPORT_DIR) - # In any case, build a dictionary with per-file data for this run - reporter = CoverageReporter(checker, checker.config) - self.runner.coverage_data["code"] = reporter.report() + coverage_manager.checker.html_report( + directory=settings.TEST_CODE_COVERAGE_REPORT_DIR + ) + # Generate the output report data + self.runner.coverage_data["code"] = coverage_manager.report( + include=[str(pathlib.Path(p) / "*") for p in self.runner.test_paths] + ) self.report_test_result("code") else: self.skipTest("Coverage switched off with --skip-coverage") @@ -824,23 +802,12 @@ def setup_test_environment(self, **kwargs): "covered": {}, "format": 1, }, - "migration": { - "present": {}, - "format": 3, - } } settings.TEMPLATES[0]['OPTIONS']['loaders'] = ('ietf.utils.test_runner.TemplateCoverageLoader',) + settings.TEMPLATES[0]['OPTIONS']['loaders'] settings.MIDDLEWARE = ('ietf.utils.test_runner.record_urls_middleware',) + tuple(settings.MIDDLEWARE) - self.code_coverage_checker = settings.TEST_CODE_COVERAGE_CHECKER - if not self.code_coverage_checker._started: - sys.stderr.write(" ** Warning: In %s: Expected the coverage checker to have\n" - " been started already, but it wasn't. Doing so now. Coverage numbers\n" - " will be off, though.\n" % __name__) - self.code_coverage_checker.start() - if settings.SITE_ID != 1: print(" Changing SITE_ID to '1' during testing.") settings.SITE_ID = 1 @@ -1140,9 +1107,8 @@ def _extra_tests(self): ), ] if self.check_coverage: - global template_coverage_collection, code_coverage_collection, url_coverage_collection + global template_coverage_collection, url_coverage_collection template_coverage_collection = True - code_coverage_collection = True url_coverage_collection = True tests += [ PyFlakesTestCase(test_runner=self, methodName='pyflakes_test'), @@ -1226,37 +1192,6 @@ def run_tests(self, test_labels, extra_tests=None, **kwargs): return failures -class IetfLiveServerTestCase(StaticLiveServerTestCase): - @classmethod - def setUpClass(cls): - set_coverage_checking(False) - super(IetfLiveServerTestCase, cls).setUpClass() - - def setUp(self): - super(IetfLiveServerTestCase, self).setUp() - # LiveServerTestCase uses TransactionTestCase which seems to - # somehow interfere with the fixture loading process in - # IetfTestRunner when running multiple tests (the first test - # is fine, in the next ones the fixtures have been wiped) - - # this is no doubt solvable somehow, but until then we simply - # recreate them here - from ietf.person.models import Person - if not Person.objects.exists(): - load_and_run_fixtures(verbosity=0) - self.replaced_settings = dict() - if hasattr(settings, 'IDTRACKER_BASE_URL'): - self.replaced_settings['IDTRACKER_BASE_URL'] = settings.IDTRACKER_BASE_URL - settings.IDTRACKER_BASE_URL = self.live_server_url - - @classmethod - def tearDownClass(cls): - super(IetfLiveServerTestCase, cls).tearDownClass() - set_coverage_checking(True) - - def tearDown(self): - for k, v in self.replaced_settings.items(): - setattr(settings, k, v) - super().tearDown() class TestBlobstoreManager(): # N.B. buckets and blobstore are intentional Class-level attributes @@ -1267,7 +1202,11 @@ class TestBlobstoreManager(): aws_access_key_id="minio_root", aws_secret_access_key="minio_pass", aws_session_token=None, - config = botocore.config.Config(signature_version="s3v4"), + config = botocore.config.Config( + request_checksum_calculation="when_required", + response_checksum_validation="when_required", + signature_version="s3v4", + ), #config=botocore.config.Config(signature_version=botocore.UNSIGNED), verify=False ) diff --git a/ietf/utils/tests.py b/ietf/utils/tests.py index 01433888fe..3288309095 100644 --- a/ietf/utils/tests.py +++ b/ietf/utils/tests.py @@ -54,7 +54,11 @@ decode_header_value, show_that_mail_was_sent, ) -from ietf.utils.test_runner import get_template_paths, set_coverage_checking +from ietf.utils.test_runner import ( + get_template_paths, + set_template_coverage, + set_url_coverage, +) from ietf.utils.test_utils import TestCase, unicontent from ietf.utils.text import parse_unicode from ietf.utils.timezone import timezone_not_near_midnight @@ -311,14 +315,15 @@ def qualified(name): return list(callbacks) -class TemplateChecksTestCase(TestCase): +class TemplateChecksTestCase(TestCase): # pragma: no cover paths = [] # type: List[str] templates = {} # type: Dict[str, Template] def setUp(self): super().setUp() - set_coverage_checking(False) + set_template_coverage(False) + set_url_coverage(False) self.paths = get_template_paths() # already filtered ignores self.paths.sort() for path in self.paths: @@ -328,7 +333,8 @@ def setUp(self): pass def tearDown(self): - set_coverage_checking(True) + set_template_coverage(True) + set_url_coverage(True) super().tearDown() def test_parse_templates(self): diff --git a/ietf/utils/tests_coverage.py b/ietf/utils/tests_coverage.py new file mode 100644 index 0000000000..68795994a7 --- /dev/null +++ b/ietf/utils/tests_coverage.py @@ -0,0 +1,56 @@ +# Copyright The IETF Trust 2025, All Rights Reserved +"""Tests of the coverage.py module""" + +from unittest import mock + +from django.test import override_settings + +from .coverage import CoverageManager +from .test_utils import TestCase + + +class CoverageManagerTests(TestCase): + @override_settings( + BASE_DIR="/path/to/project/ietf", + TEST_CODE_COVERAGE_EXCLUDE_FILES=["a.py"], + TEST_CODE_COVERAGE_EXCLUDE_LINES=["some-regex"], + ) + @mock.patch("ietf.utils.coverage.Coverage") + def test_coverage_manager(self, mock_coverage): + """CoverageManager managed coverage correctly in non-production mode + + Presumes we're not running tests in production mode. + """ + cm = CoverageManager() + self.assertFalse(cm.started) + + cm.start() + self.assertTrue(cm.started) + self.assertEqual(cm.checker, mock_coverage.return_value) + self.assertTrue(mock_coverage.called) + coverage_kwargs = mock_coverage.call_args.kwargs + self.assertEqual(coverage_kwargs["source"], ["/path/to/project/ietf"]) + self.assertEqual(coverage_kwargs["omit"], ["a.py"]) + self.assertTrue(isinstance(cm.checker.exclude, mock.Mock)) + assert isinstance(cm.checker.exclude, mock.Mock) # for type checker + self.assertEqual(cm.checker.exclude.call_count, 1) + cm.checker.exclude.assert_called_with("some-regex") + + @mock.patch("ietf.utils.coverage.Coverage") + def test_coverage_manager_is_defanged_in_production(self, mock_coverage): + """CoverageManager is a no-op in production mode""" + # Be careful faking settings.SERVER_MODE, but there's really no other way to + # test this. + with override_settings(SERVER_MODE="production"): + cm = CoverageManager() + cm.start() + + # Check that nothing actually happened + self.assertFalse(mock_coverage.called) + self.assertIsNone(cm.checker) + self.assertFalse(cm.started) + + # Check that other methods are guarded appropriately + cm.stop() + cm.save() + self.assertIsNone(cm.report()) diff --git a/ietf/utils/tests_meetecho.py b/ietf/utils/tests_meetecho.py index a10ac68c27..502e936483 100644 --- a/ietf/utils/tests_meetecho.py +++ b/ietf/utils/tests_meetecho.py @@ -98,7 +98,7 @@ def test_schedule_meeting(self): api_response = api.schedule_meeting( wg_token='my-token', room_id=18, - start_time=datetime.datetime(2021, 9, 14, 10, 0, 0, tzinfo=datetime.timezone.utc), + start_time=datetime.datetime(2021, 9, 14, 10, 0, 0, tzinfo=datetime.UTC), duration=datetime.timedelta(minutes=130), description='interim-2021-wgname-01', extrainfo='message for staff', @@ -127,7 +127,7 @@ def test_schedule_meeting(self): ) # same time in different time zones for start_time in [ - datetime.datetime(2021, 9, 14, 10, 0, 0, tzinfo=datetime.timezone.utc), + datetime.datetime(2021, 9, 14, 10, 0, 0, tzinfo=datetime.UTC), datetime.datetime(2021, 9, 14, 7, 0, 0, tzinfo=ZoneInfo('America/Halifax')), datetime.datetime(2021, 9, 14, 13, 0, 0, tzinfo=ZoneInfo('Europe/Kiev')), datetime.datetime(2021, 9, 14, 5, 0, 0, tzinfo=ZoneInfo('Pacific/Easter')), @@ -198,7 +198,7 @@ def test_fetch_meetings(self): '3d55bce0-535e-4ba8-bb8e-734911cf3c32': { 'room': { 'id': 18, - 'start_time': datetime.datetime(2021, 9, 14, 10, 0, 0, tzinfo=datetime.timezone.utc), + 'start_time': datetime.datetime(2021, 9, 14, 10, 0, 0, tzinfo=datetime.UTC), 'duration': datetime.timedelta(minutes=130), 'description': 'interim-2021-wgname-01', }, @@ -208,7 +208,7 @@ def test_fetch_meetings(self): 'e68e96d4-d38f-475b-9073-ecab46ca96a5': { 'room': { 'id': 23, - 'start_time': datetime.datetime(2021, 9, 15, 14, 30, 0, tzinfo=datetime.timezone.utc), + 'start_time': datetime.datetime(2021, 9, 15, 14, 30, 0, tzinfo=datetime.UTC), 'duration': datetime.timedelta(minutes=30), 'description': 'interim-2021-wgname-02', }, @@ -386,7 +386,7 @@ def test_request_helper_exception(self): def test_time_serialization(self): """Time de/serialization should be consistent""" - time = timezone.now().astimezone(datetime.timezone.utc).replace(microsecond=0) # cut off to 0 microseconds + time = timezone.now().astimezone(datetime.UTC).replace(microsecond=0) # cut off to 0 microseconds api = MeetechoAPI(API_BASE, CLIENT_ID, CLIENT_SECRET) self.assertEqual(api._deserialize_time(api._serialize_time(time)), time) @@ -400,7 +400,7 @@ def test_conference_from_api_dict(self): 'session-1-uuid': { 'room': { 'id': 1, - 'start_time': datetime.datetime(2022,2,4,1,2,3, tzinfo=datetime.timezone.utc), + 'start_time': datetime.datetime(2022,2,4,1,2,3, tzinfo=datetime.UTC), 'duration': datetime.timedelta(minutes=45), 'description': 'some-description', }, @@ -410,7 +410,7 @@ def test_conference_from_api_dict(self): 'session-2-uuid': { 'room': { 'id': 2, - 'start_time': datetime.datetime(2022,2,5,4,5,6, tzinfo=datetime.timezone.utc), + 'start_time': datetime.datetime(2022,2,5,4,5,6, tzinfo=datetime.UTC), 'duration': datetime.timedelta(minutes=90), 'description': 'another-description', }, @@ -427,7 +427,7 @@ def test_conference_from_api_dict(self): id=1, public_id='session-1-uuid', description='some-description', - start_time=datetime.datetime(2022, 2, 4, 1, 2, 3, tzinfo=datetime.timezone.utc), + start_time=datetime.datetime(2022, 2, 4, 1, 2, 3, tzinfo=datetime.UTC), duration=datetime.timedelta(minutes=45), url='https://example.com/some/url', deletion_token='delete-me', @@ -437,7 +437,7 @@ def test_conference_from_api_dict(self): id=2, public_id='session-2-uuid', description='another-description', - start_time=datetime.datetime(2022, 2, 5, 4, 5, 6, tzinfo=datetime.timezone.utc), + start_time=datetime.datetime(2022, 2, 5, 4, 5, 6, tzinfo=datetime.UTC), duration=datetime.timedelta(minutes=90), url='https://example.com/another/url', deletion_token='delete-me-too', @@ -453,7 +453,7 @@ def test_fetch(self, mock_fetch, _): 'session-1-uuid': { 'room': { 'id': 1, - 'start_time': datetime.datetime(2022,2,4,1,2,3, tzinfo=datetime.timezone.utc), + 'start_time': datetime.datetime(2022,2,4,1,2,3, tzinfo=datetime.UTC), 'duration': datetime.timedelta(minutes=45), 'description': 'some-description', }, @@ -472,7 +472,7 @@ def test_fetch(self, mock_fetch, _): id=1, public_id='session-1-uuid', description='some-description', - start_time=datetime.datetime(2022,2,4,1,2,3, tzinfo=datetime.timezone.utc), + start_time=datetime.datetime(2022,2,4,1,2,3, tzinfo=datetime.UTC), duration=datetime.timedelta(minutes=45), url='https://example.com/some/url', deletion_token='delete-me', @@ -488,7 +488,7 @@ def test_create(self, mock_schedule, _): 'session-1-uuid': { 'room': { 'id': 1, # value should match session_id param to cm.create() below - 'start_time': datetime.datetime(2022,2,4,1,2,3, tzinfo=datetime.timezone.utc), + 'start_time': datetime.datetime(2022,2,4,1,2,3, tzinfo=datetime.UTC), 'duration': datetime.timedelta(minutes=45), 'description': 'some-description', }, @@ -506,7 +506,7 @@ def test_create(self, mock_schedule, _): id=1, public_id='session-1-uuid', description='some-description', - start_time=datetime.datetime(2022,2,4,1,2,3, tzinfo=datetime.timezone.utc), + start_time=datetime.datetime(2022,2,4,1,2,3, tzinfo=datetime.UTC), duration=datetime.timedelta(minutes=45), url='https://example.com/some/url', deletion_token='delete-me', diff --git a/ietf/utils/timezone.py b/ietf/utils/timezone.py index a396b5e82d..e08dfa02f2 100644 --- a/ietf/utils/timezone.py +++ b/ietf/utils/timezone.py @@ -26,7 +26,7 @@ def _tzinfo(tz: Union[str, datetime.tzinfo, None]): Accepts a tzinfo or string containing a timezone name. Defaults to UTC if tz is None. """ if tz is None: - return datetime.timezone.utc + return datetime.UTC elif isinstance(tz, datetime.tzinfo): return tz else: diff --git a/k8s/settings_local.py b/k8s/settings_local.py index 482a4b110a..c1436e158b 100644 --- a/k8s/settings_local.py +++ b/k8s/settings_local.py @@ -280,7 +280,9 @@ def _multiline_to_list(s): PHOTOS_DIR = MEDIA_ROOT + PHOTOS_DIRNAME # Normally only set for debug, but needed until we have a real FS -DJANGO_VITE_MANIFEST_PATH = os.path.join(BASE_DIR, "static/dist-neue/manifest.json") +DJANGO_VITE["default"]["manifest_path"] = os.path.join( + BASE_DIR, "static/dist-neue/manifest.json" +) # Binaries that are different in the docker image DE_GFM_BINARY = "/usr/local/bin/de-gfm" @@ -379,6 +381,8 @@ def _multiline_to_list(s): secret_key=_blob_store_secret_key, security_token=None, client_config=botocore.config.Config( + request_checksum_calculation="when_required", + response_checksum_validation="when_required", signature_version="s3v4", connect_timeout=_blob_store_connect_timeout, read_timeout=_blob_store_read_timeout, diff --git a/patch/tastypie-django22-fielderror-response.patch b/patch/tastypie-django22-fielderror-response.patch index ffb152d319..3b4418fc66 100644 --- a/patch/tastypie-django22-fielderror-response.patch +++ b/patch/tastypie-django22-fielderror-response.patch @@ -1,5 +1,5 @@ ---- tastypie/resources.py.orig 2020-08-24 13:14:25.463166100 +0200 -+++ tastypie/resources.py 2020-08-24 13:15:55.133759224 +0200 +--- tastypie/resources.py.orig 2025-07-29 19:00:01.526948002 +0000 ++++ tastypie/resources.py 2025-07-29 19:07:15.324127008 +0000 @@ -12,7 +12,7 @@ ObjectDoesNotExist, MultipleObjectsReturned, ValidationError, FieldDoesNotExist ) @@ -9,13 +9,13 @@ from django.db.models.fields.related import ForeignKey from django.urls.conf import re_path from tastypie.utils.timezone import make_naive_utc -@@ -2198,6 +2198,8 @@ +@@ -2216,6 +2216,8 @@ return self.authorized_read_list(objects, bundle) except ValueError: raise BadRequest("Invalid resource lookup data provided (mismatched type).") + except FieldError as e: + raise BadRequest("Invalid resource lookup: %s." % e) - + def obj_get(self, bundle, **kwargs): """ --- tastypie/paginator.py.orig 2020-08-25 15:24:46.391588425 +0200 diff --git a/requirements.txt b/requirements.txt index 60d3d8152e..cf7c920fa3 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,86 +1,85 @@ # -*- conf-mode -*- -setuptools>=51.1.0 # Require this first, to prevent later errors +setuptools>=80.9.0 # Require this first, to prevent later errors # aiosmtpd>=1.4.6 -argon2-cffi>=21.3.0 # For the Argon2 password hasher option -beautifulsoup4>=4.11.1 # Only used in tests -bibtexparser>=1.2.0 # Only used in tests -bleach>=6 -types-bleach>=6 -boto3>=1.35,<1.36 -boto3-stubs[s3]>=1.35,<1.36 -botocore>=1.35,<1.36 -celery>=5.2.6 -coverage>=4.5.4,<5.0 # Coverage 5.x moves from a json database to SQLite. Moving to 5.x will require substantial rewrites in ietf.utils.test_runner and ietf.release.views +argon2-cffi>=25.1.0 # For the Argon2 password hasher option +beautifulsoup4>=4.13.4 # Only used in tests +bibtexparser>=1.4.3 # Only used in tests +bleach>=6.2.0 # project is deprecated but supported +types-bleach>=6.2.0 +boto3>=1.39.15 +boto3-stubs[s3]>=1.39.15 +botocore>=1.39.15 +celery>=5.5.3 +coverage>=7.9.2 defusedxml>=0.7.1 # for TastyPie when using xml; not a declared dependency Django>4.2,<5 -django-admin-rangefilter>=0.13.2 -django-analytical>=3.1.0 -django-bootstrap5>=21.3 -django-celery-beat>=2.3.0,<2.8.0 # pin until https://github.com/celery/django-celery-beat/issues/875 is resolved, then revisit -django-celery-results>=2.5.1 -django-csp>=3.7 -django-cors-headers>=3.11.0 -django-debug-toolbar>=3.2.4 -django-markup>=1.5 # Limited use - need to reconcile against direct use of markdown +django-admin-rangefilter>=0.13.3 +django-analytical>=3.2.0 +django-bootstrap5>=25.1 +django-celery-beat>=2.7.0,<2.8.0 # pin until https://github.com/celery/django-celery-beat/issues/875 is resolved, then revisit +django-celery-results>=2.6.0 +django-cors-headers>=4.7.0 +django-debug-toolbar>=6.0.0 +django-markup>=1.10 # Limited use - need to reconcile against direct use of markdown django-oidc-provider==0.8.2 # 0.8.3 changes logout flow and claim return -django-referrer-policy>=1.0 -django-simple-history>=3.0.0 -django-storages>=1.14.4 +django-simple-history>=3.10.1 +django-storages>=1.14.6 django-stubs>=4.2.7,<5 # The django-stubs version used determines the the mypy version indicated below -django-tastypie>=0.14.7,<0.15.0 # Version must be locked in sync with version of Django -django-vite>=2.0.2,<3 +django-tastypie>=0.15.1 # Version must be kept in sync with Django +django-vite>=3.1.0 django-widget-tweaks>=1.4.12 -djangorestframework>=3.15,<4 -djlint>=1.0.0 # To auto-indent templates via "djlint --profile django --reformat" -docutils>=0.18.1 # Used only by dbtemplates for RestructuredText +djangorestframework>=3.16.0 +docutils>=0.22.0 # Used only by dbtemplates for RestructuredText +types-docutils>=0.21.0 # should match docutils (0.22.0 not out yet) drf-spectacular>=0.27 -drf-standardized-errors[openapi] >= 0.14 -types-docutils>=0.18.1 -factory-boy>=3.3 -gunicorn>=20.1.0 +drf-standardized-errors[openapi] >= 0.15.0 +factory-boy>=3.3.3 +gunicorn>=23.0.0 hashids>=1.3.1 -html2text>=2020.1.16 # Used only to clean comment field of secr/sreq +html2text>=2025.4.15 # Used only to clean comment field of secr/sreq html5lib>=1.1 # Only used in tests icalendar>=5.0.0 -inflect>= 6.0.2 -jsonfield>=3.1.0,<3.2.0 # 3.2.0 needs py3.10; deprecated-replace with Django JSONField -jsonschema[format]>=4.2.1 -jwcrypto>=1.2 # for signed notifications - this is aspirational, and is not really used. -logging_tree>=1.9 # Used only by the showloggers management command -lxml>=5.3.0 -markdown>=3.3.6 -types-markdown>=3.3.6 -mypy~=1.7.0 # Version requirements determined by django-stubs. -oic>=1.3 # Used only by tests -Pillow>=9.1.0 -psycopg2>=2.9.6 -pyang>=2.5.3 -pydyf>0.8.0 -pyflakes>=2.4.0 -pyopenssl>=22.0.0 # Used by urllib3.contrib, which is used by PyQuery but not marked as a dependency -pyquery>=1.4.3 -python-dateutil>=2.8.2 -types-python-dateutil>=2.8.2 -python-json-logger>=3.1.0 +inflect>= 7.5.0 +jsonfield>=3.2.0 # deprecated - need to replace with Django's JSONField +jsonschema[format]>=4.25.0 +jwcrypto>=1.5.6 # for signed notifications - this is aspirational, and is not really used. +logging_tree>=1.10 # Used only by the showloggers management command +lxml>=6.0.0 +markdown>=3.8.0 +types-markdown>=3.8.0 +mock>=5.2.0 # should replace with unittest.mock and remove dependency +types-mock>=5.2.0 +mypy~=1.7.0 # Version requirements determined by django-stubs. +oic>=1.7.0 # Used only by tests +pillow>=11.3.0 +psycopg2>=2.9.10 +pyang>=2.6.1 +pydyf>=0.11.0 +pyflakes>=3.4.0 +pyopenssl>=25.1.0 # Used by urllib3.contrib, which is used by PyQuery but not marked as a dependency +pyquery>=2.0.1 +python-dateutil>=2.9.0 +types-python-dateutil>=2.9.0 +python-json-logger>=3.3.0 python-magic==0.4.18 # Versions beyond the yanked .19 and .20 introduce form failures pymemcache>=4.0.0 # for django.core.cache.backends.memcached.PyMemcacheCache -python-mimeparse>=1.6 # from TastyPie +python-mimeparse>=2.0.0 # from TastyPie pytz==2025.2 # Pinned as changes need to be vetted for their effect on Meeting fields -types-pytz==2025.2.0.20250809 # match pytz versionrequests>=2.31.0 -requests>=2.31.0 -types-requests>=2.27.1 -requests-mock>=1.9.3 +types-pytz==2025.2.0.20250809 # match pytz version +requests>=2.32.4 +types-requests>=2.32.4 +requests-mock>=1.12.1 rfc2html>=2.0.3 -scout-apm>=2.24.2 -selenium>=4.0 -tblib>=1.7.0 # So that the django test runner provides tracebacks -tlds>=2022042700 # Used to teach bleach about which TLDs currently exist -tqdm>=4.64.0 -types-zxcvbn~=4.5.0.20250223 # match zxcvbn version -Unidecode>=1.3.4 -urllib3>=1.26,<2 -weasyprint>=64.1 -xml2rfc>=3.23.0 +scout-apm>=3.4.0 +selenium>=4.34.2 +tblib>=3.1.0 # So that the django test runner provides tracebacks +tlds>=2022042700 # Used to teach bleach about which TLDs currently exist +tqdm>=4.67.1 +unidecode>=1.4.0 +urllib3>=2.5.0 +weasyprint>=66.0 +xml2rfc>=3.30.0 xym>=0.6,<1.0 zxcvbn>=4.5.0 +types-zxcvbn~=4.5.0.20250223 # match zxcvbn version From b14512e840d8dfccf4e418ac184c77321595278b Mon Sep 17 00:00:00 2001 From: rjsparks <10996692+rjsparks@users.noreply.github.com> Date: Wed, 3 Sep 2025 22:29:19 +0000 Subject: [PATCH 197/405] ci: update base image target version to 20250903T2216 --- dev/build/Dockerfile | 2 +- dev/build/TARGET_BASE | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/dev/build/Dockerfile b/dev/build/Dockerfile index 658f1e5695..d3b186e1f5 100644 --- a/dev/build/Dockerfile +++ b/dev/build/Dockerfile @@ -1,4 +1,4 @@ -FROM ghcr.io/ietf-tools/datatracker-app-base:20250819T1645 +FROM ghcr.io/ietf-tools/datatracker-app-base:20250903T2216 LABEL maintainer="IETF Tools Team " ENV DEBIAN_FRONTEND=noninteractive diff --git a/dev/build/TARGET_BASE b/dev/build/TARGET_BASE index 9e510ad8db..9d8427efdb 100644 --- a/dev/build/TARGET_BASE +++ b/dev/build/TARGET_BASE @@ -1 +1 @@ -20250819T1645 +20250903T2216 From e444d9e73c78a1100ad5b909f2b15012be287889 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Wed, 3 Sep 2025 20:55:17 -0300 Subject: [PATCH 198/405] chore: use :latest instead of :py312 (#9460) --- .github/workflows/tests-az.yml | 2 +- dev/deploy-to-container/cli.js | 6 +++--- dev/diff/cli.js | 6 +++--- dev/tests/debug.sh | 2 +- dev/tests/docker-compose.debug.yml | 2 +- docker/app.Dockerfile | 2 +- docker/celery.Dockerfile | 2 +- 7 files changed, 11 insertions(+), 11 deletions(-) diff --git a/.github/workflows/tests-az.yml b/.github/workflows/tests-az.yml index d1fe0cdf62..8553563a19 100644 --- a/.github/workflows/tests-az.yml +++ b/.github/workflows/tests-az.yml @@ -62,7 +62,7 @@ jobs: echo "Starting Containers..." sudo docker network create dtnet sudo docker run -d --name db --network=dtnet ghcr.io/ietf-tools/datatracker-db:latest & - sudo docker run -d --name app --network=dtnet ghcr.io/ietf-tools/datatracker-app-base:py312 sleep infinity & + sudo docker run -d --name app --network=dtnet ghcr.io/ietf-tools/datatracker-app-base:latest sleep infinity & wait echo "Cloning datatracker repo..." diff --git a/dev/deploy-to-container/cli.js b/dev/deploy-to-container/cli.js index 2f0faad151..1a2d993ac4 100644 --- a/dev/deploy-to-container/cli.js +++ b/dev/deploy-to-container/cli.js @@ -85,7 +85,7 @@ async function main () { // Pull latest Datatracker Base image console.info('Pulling latest Datatracker base docker image...') - const appImagePullStream = await dock.pull('ghcr.io/ietf-tools/datatracker-app-base:py312') + const appImagePullStream = await dock.pull('ghcr.io/ietf-tools/datatracker-app-base:latest') await new Promise((resolve, reject) => { dock.modem.followProgress(appImagePullStream, (err, res) => err ? reject(err) : resolve(res)) }) @@ -214,7 +214,7 @@ async function main () { const celeryContainers = {} for (const conConf of conConfs) { celeryContainers[conConf.name] = await dock.createContainer({ - Image: 'ghcr.io/ietf-tools/datatracker-app-base:py312', + Image: 'ghcr.io/ietf-tools/datatracker-app-base:latest', name: `dt-${conConf.name}-${branch}`, Hostname: `dt-${conConf.name}-${branch}`, Env: [ @@ -244,7 +244,7 @@ async function main () { // Create Datatracker container console.info(`Creating Datatracker docker container... [dt-app-${branch}]`) const appContainer = await dock.createContainer({ - Image: 'ghcr.io/ietf-tools/datatracker-app-base:py312', + Image: 'ghcr.io/ietf-tools/datatracker-app-base:latest', name: `dt-app-${branch}`, Hostname: `dt-app-${branch}`, Env: [ diff --git a/dev/diff/cli.js b/dev/diff/cli.js index 0cf353cc65..461b0c37a0 100644 --- a/dev/diff/cli.js +++ b/dev/diff/cli.js @@ -567,7 +567,7 @@ async function main () { { title: 'Pulling latest Datatracker base docker image...', task: async (subctx, subtask) => { - const appImagePullStream = await dock.pull('ghcr.io/ietf-tools/datatracker-app-base:py312') + const appImagePullStream = await dock.pull('ghcr.io/ietf-tools/datatracker-app-base:latest') await new Promise((resolve, reject) => { dock.modem.followProgress(appImagePullStream, (err, res) => err ? reject(err) : resolve(res)) }) @@ -648,7 +648,7 @@ async function main () { title: 'Creating source Datatracker docker container...', task: async (subctx, subtask) => { containers.appSource = await dock.createContainer({ - Image: 'ghcr.io/ietf-tools/datatracker-app-base:py312', + Image: 'ghcr.io/ietf-tools/datatracker-app-base:latest', name: 'dt-diff-app-source', Tty: true, Hostname: 'appsource', @@ -664,7 +664,7 @@ async function main () { title: 'Creating target Datatracker docker container...', task: async (subctx, subtask) => { containers.appTarget = await dock.createContainer({ - Image: 'ghcr.io/ietf-tools/datatracker-app-base:py312', + Image: 'ghcr.io/ietf-tools/datatracker-app-base:latest', name: 'dt-diff-app-target', Tty: true, Hostname: 'apptarget', diff --git a/dev/tests/debug.sh b/dev/tests/debug.sh index e92e6d9b2a..d87c504bb9 100644 --- a/dev/tests/debug.sh +++ b/dev/tests/debug.sh @@ -9,7 +9,7 @@ # Simply type "exit" + ENTER to exit and shutdown this test environment. echo "Fetching latest images..." -docker pull ghcr.io/ietf-tools/datatracker-app-base:py312 +docker pull ghcr.io/ietf-tools/datatracker-app-base:latest docker pull ghcr.io/ietf-tools/datatracker-db:latest echo "Starting containers..." docker compose -f docker-compose.debug.yml -p dtdebug --compatibility up -d diff --git a/dev/tests/docker-compose.debug.yml b/dev/tests/docker-compose.debug.yml index 168bbd4e92..8117b92375 100644 --- a/dev/tests/docker-compose.debug.yml +++ b/dev/tests/docker-compose.debug.yml @@ -5,7 +5,7 @@ version: '3.8' services: app: - image: ghcr.io/ietf-tools/datatracker-app-base:py312 + image: ghcr.io/ietf-tools/datatracker-app-base:latest command: -f /dev/null working_dir: /__w/datatracker/datatracker entrypoint: tail diff --git a/docker/app.Dockerfile b/docker/app.Dockerfile index e3df9bd4b4..fee3833733 100644 --- a/docker/app.Dockerfile +++ b/docker/app.Dockerfile @@ -1,4 +1,4 @@ -FROM ghcr.io/ietf-tools/datatracker-app-base:py312 +FROM ghcr.io/ietf-tools/datatracker-app-base:latest LABEL maintainer="IETF Tools Team " ENV DEBIAN_FRONTEND=noninteractive diff --git a/docker/celery.Dockerfile b/docker/celery.Dockerfile index 279d5c7550..e7c7b9cc3f 100644 --- a/docker/celery.Dockerfile +++ b/docker/celery.Dockerfile @@ -1,4 +1,4 @@ -FROM ghcr.io/ietf-tools/datatracker-app-base:py312 +FROM ghcr.io/ietf-tools/datatracker-app-base:latest LABEL maintainer="IETF Tools Team " ENV DEBIAN_FRONTEND=noninteractive From c4d69d0118a068c873dc066fe9adde829e86f14e Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Fri, 5 Sep 2025 17:22:52 -0500 Subject: [PATCH 199/405] feat: links to postorious (#9470) * feat: links to postorious * fix: remove redundant divider * chore: better use of whitespace * chore: remove what the cat typed in * chore: more stray removal --- ietf/templates/base/menu_user.html | 31 ++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/ietf/templates/base/menu_user.html b/ietf/templates/base/menu_user.html index 9a0bf56838..fd921638a4 100644 --- a/ietf/templates/base/menu_user.html +++ b/ietf/templates/base/menu_user.html @@ -115,6 +115,37 @@ {% endif %} +
  • + + List subscriptions + + +
  • {% if user|has_role:"Reviewer" %}
  • Date: Tue, 16 Sep 2025 17:55:07 -0300 Subject: [PATCH 200/405] chore: hide weasyprint internal deprecation warning (#9544) --- ietf/settings.py | 1 + 1 file changed, 1 insertion(+) diff --git a/ietf/settings.py b/ietf/settings.py index 753508dc99..d6be1d1e0f 100644 --- a/ietf/settings.py +++ b/ietf/settings.py @@ -34,6 +34,7 @@ warnings.filterwarnings("ignore", message="datetime.datetime.utcnow\\(\\) is deprecated", module="oic.utils.time_util") warnings.filterwarnings("ignore", message="datetime.datetime.utcfromtimestamp\\(\\) is deprecated", module="oic.utils.time_util") warnings.filterwarnings("ignore", message="datetime.datetime.utcfromtimestamp\\(\\) is deprecated", module="pytz.tzinfo") +warnings.filterwarnings("ignore", message="'instantiateVariableFont' is deprecated", module="weasyprint") base_path = pathlib.Path(__file__).resolve().parent From c71871855769d9c2980cad853cf92a9ec25cb50a Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Tue, 16 Sep 2025 15:55:45 -0500 Subject: [PATCH 201/405] fix: normalize 3gpp groups and resolve duplication (#9505) * fix: don't bother the rfc-editor with group type sdo name changes * fix: normalize 3gpp groups and resolve duplication * fix: improve guard, update t2 * fix: exclude the task from test coverage * fix: exclude harder * fix: tweak the pragma --- ietf/group/models.py | 2 + ietf/group/tasks.py | 121 ++++++++++++++++++++++++++++++++++++++++++- 2 files changed, 121 insertions(+), 2 deletions(-) diff --git a/ietf/group/models.py b/ietf/group/models.py index 608dcc86b9..2d5e7c4e6f 100644 --- a/ietf/group/models.py +++ b/ietf/group/models.py @@ -491,6 +491,8 @@ def notify_rfceditor_of_group_name_change(sender, instance=None, **kwargs): current = Group.objects.get(pk=instance.pk) except Group.DoesNotExist: return + if current.type_id == "sdo": + return addr = settings.RFC_EDITOR_GROUP_NOTIFICATION_EMAIL if addr and instance.name != current.name: msg = """ diff --git a/ietf/group/tasks.py b/ietf/group/tasks.py index 693aafb385..ada83e80e2 100644 --- a/ietf/group/tasks.py +++ b/ietf/group/tasks.py @@ -9,12 +9,15 @@ from django.conf import settings from django.template.loader import render_to_string +from django.utils import timezone from ietf.doc.storage_utils import store_file +from ietf.liaisons.models import LiaisonStatement from ietf.utils import log +from ietf.utils.test_runner import disable_coverage -from .models import Group -from .utils import fill_in_charter_info, fill_in_wg_drafts, fill_in_wg_roles +from .models import Group, GroupHistory +from .utils import fill_in_charter_info, fill_in_wg_drafts, fill_in_wg_roles, save_group_in_history from .views import extract_last_name, roles @@ -113,3 +116,117 @@ def generate_wg_summary_files_task(): store_file("indexes", "1wg-summary.txt", f, allow_overwrite=True) with summary_by_acronym_file.open("rb") as f: store_file("indexes", "1wg-summary-by-acronym.txt", f, allow_overwrite=True) + +@shared_task +@disable_coverage() +def run_once_adjust_liaison_groups(): # pragma: no cover + log.log("Starting run_once_adjust_liaison_groups") + if all( + [ + Group.objects.filter( + acronym__in=[ + "3gpp-tsg-ct", + "3gpp-tsg-ran-wg1", + "3gpp-tsg-ran-wg4", + "3gpp-tsg-sa", + "3gpp-tsg-sa-wg5", + "3gpp-tsgct", # duplicates 3gpp-tsg-ct above already + "3gpp-tsgct-ct1", # will normalize all acronyms to hyphenated form + "3gpp-tsgct-ct3", # and consistently match the name + "3gpp-tsgct-ct4", # (particularly use of WG) + "3gpp-tsgran", + "3gpp-tsgran-ran2", + "3gpp-tsgsa", # duplicates 3gpp-tsg-sa above + "3gpp-tsgsa-sa2", # will normalize + "3gpp-tsgsa-sa3", + "3gpp-tsgsa-sa4", + "3gpp-tsgt-wg2", + ] + ).count() + == 16, + not Group.objects.filter( + acronym__in=[ + "3gpp-tsg-ran-wg3", + "3gpp-tsg-ct-wg1", + "3gpp-tsg-ct-wg3", + "3gpp-tsg-ct-wg4", + "3gpp-tsg-ran", + "3gpp-tsg-ran-wg2", + "3gpp-tsg-sa-wg2", + "3gpp-tsg-sa-wg3", + "3gpp-tsg-sa-wg4", + "3gpp-tsg-t-wg2", + ] + ).exists(), + Group.objects.filter(acronym="o3gpptsgran3").exists(), + not LiaisonStatement.objects.filter( + to_groups__acronym__in=["3gpp-tsgct", "3gpp-tsgsa"] + ).exists(), + not LiaisonStatement.objects.filter( + from_groups__acronym="3gpp-tsgct" + ).exists(), + LiaisonStatement.objects.filter(from_groups__acronym="3gpp-tsgsa").count() + == 1, + LiaisonStatement.objects.get(from_groups__acronym="3gpp-tsgsa").pk == 1448, + ] + ): + for old_acronym, new_acronym, new_name in ( + ("o3gpptsgran3", "3gpp-tsg-ran-wg3", "3GPP TSG RAN WG3"), + ("3gpp-tsgct-ct1", "3gpp-tsg-ct-wg1", "3GPP TSG CT WG1"), + ("3gpp-tsgct-ct3", "3gpp-tsg-ct-wg3", "3GPP TSG CT WG3"), + ("3gpp-tsgct-ct4", "3gpp-tsg-ct-wg4", "3GPP TSG CT WG4"), + ("3gpp-tsgran", "3gpp-tsg-ran", "3GPP TSG RAN"), + ("3gpp-tsgran-ran2", "3gpp-tsg-ran-wg2", "3GPP TSG RAN WG2"), + ("3gpp-tsgsa-sa2", "3gpp-tsg-sa-wg2", "3GPP TSG SA WG2"), + ("3gpp-tsgsa-sa3", "3gpp-tsg-sa-wg3", "3GPP TSG SA WG3"), + ("3gpp-tsgsa-sa4", "3gpp-tsg-sa-wg4", "3GPP TSG SA WG4"), + ("3gpp-tsgt-wg2", "3gpp-tsg-t-wg2", "3GPP TSG T WG2"), + ): + group = Group.objects.get(acronym=old_acronym) + save_group_in_history(group) + group.time = timezone.now() + group.acronym = new_acronym + group.name = new_name + if old_acronym.startswith("3gpp-tsgct-"): + group.parent = Group.objects.get(acronym="3gpp-tsg-ct") + elif old_acronym.startswith("3gpp-tsgsa-"): + group.parent = Group.objects.get(acronym="3gpp-tsg-sa") + group.save() + group.groupevent_set.create( + time=group.time, + by_id=1, # (System) + type="info_changed", + desc=f"acronym changed from {old_acronym} to {new_acronym}, name set to {new_name}", + ) + + for acronym, new_name in (("3gpp-tsg-ct", "3GPP TSG CT"),): + group = Group.objects.get(acronym=acronym) + save_group_in_history(group) + group.time = timezone.now() + group.name = new_name + group.save() + group.groupevent_set.create( + time=group.time, + by_id=1, # (System) + type="info_changed", + desc=f"name set to {new_name}", + ) + + ls = LiaisonStatement.objects.get(pk=1448) + ls.from_groups.remove(Group.objects.get(acronym="3gpp-tsgsa")) + ls.from_groups.add(Group.objects.get(acronym="3gpp-tsg-sa")) + + # Rewriting history to effectively merge the histories of the duplicate groups + GroupHistory.objects.filter(parent__acronym="3gpp-tsgsa").update( + parent=Group.objects.get(acronym="3gpp-tsg-sa") + ) + GroupHistory.objects.filter(parent__acronym="3gpp-tsgct").update( + parent=Group.objects.get(acronym="3gpp-tsg-ct") + ) + + deleted = Group.objects.filter( + acronym__in=["3gpp-tsgsa", "3gpp-tsgct"] + ).delete() + log.log(f"Deleted Groups: {deleted}") + else: + log.log("* Refusing to continue as preconditions have changed") From 0a1705193dfde6695921191540049b88d91d9ec9 Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Wed, 17 Sep 2025 12:45:32 -0500 Subject: [PATCH 202/405] fix: update draft-stream-ietf state descriptions (#9543) --- .../0026_change_wg_state_descriptions.py | 117 ++++++++++++++++++ 1 file changed, 117 insertions(+) create mode 100644 ietf/doc/migrations/0026_change_wg_state_descriptions.py diff --git a/ietf/doc/migrations/0026_change_wg_state_descriptions.py b/ietf/doc/migrations/0026_change_wg_state_descriptions.py new file mode 100644 index 0000000000..b02b12c97e --- /dev/null +++ b/ietf/doc/migrations/0026_change_wg_state_descriptions.py @@ -0,0 +1,117 @@ +# Copyright The IETF Trust 2025, All Rights Reserved + +from django.db import migrations + +def forward(apps, schema_editor): + State = apps.get_model("doc","State") + for name, desc in [ + ("WG Document","The document has been adopted by the Working Group (WG) and is under development. A document can only be adopted by one WG at a time. However, a document may be transferred between WGs."), + ("Parked WG Document","The Working Group (WG) document is in a temporary state where it will not be actively developed. The reason for the pause is explained via a datatracker comments section."), + ("Dead WG Document","The Working Group (WG) document has been abandoned by the WG. No further development is planned in this WG. A decision to resume work on this document and move it out of this state is possible."), + ("In WG Last Call","The Working Group (WG) document is currently subject to an active WG Last Call (WGLC) review per Section 7.4 of RFC2418."), + ("Waiting for Implementation","The progression of this Working Group (WG) document towards publication is paused as it awaits implementation. The process governing the approach to implementations is WG-specific."), + ("Held by WG","Held by Working Group (WG) chairs for administrative reasons. See document history for details."), + ("Waiting for WG Chair Go-Ahead","The Working Group (WG) document has completed Working Group Last Call (WGLC), but the WG chair(s) are not yet ready to call consensus on the document. The reasons for this may include comments from the WGLC need to be responded to, or a revision to the document is needed"), + ("WG Consensus: Waiting for Write-Up","The Working Group (WG) document has consensus to proceed to publication. However, the document is waiting for a document shepherd write-up per RFC4858."), + ("Submitted to IESG for Publication","The Working Group (WG) document has left the WG and been submitted to the Internet Engineering Steering Group (IESG) for evaluation and publication. See the “IESG State” or “RFC Editor State” for further details on the state of the document."), + ("Candidate for WG Adoption","The individual submission document has been marked by the Working Group (WG) chairs as a candidate for adoption by the WG, but no adoption call has been started."), + ("Call For Adoption By WG Issued","A call for adoption of the individual submission document has been issued by the Working Group (WG) chairs. This call is still running but the WG has not yet reached consensus for adoption."), + ("Adopted by a WG","The individual submission document has been adopted by the Working Group (WG), but a WG document replacing this document with the typical naming convention of 'draft- ietf-wgname-topic-nn' has not yet been submitted."), + ("Adopted for WG Info Only","The document is adopted by the Working Group (WG) for its internal use. The WG has decided that it will not pursue publication of it as an RFC."), + ]: + State.objects.filter(name=name).update(desc=desc) + +def reverse(apps, schema_editor): + State = apps.get_model("doc","State") + for name, desc in [ + ("WG Document","""4.2.4. WG Document + + The "WG Document" state describes an I-D that has been adopted by an IETF WG and is being actively developed. + + A WG Chair may transition an I-D into the "WG Document" state at any time as long as the I-D is not being considered or developed in any other WG. + + Alternatively, WG Chairs may rely upon new functionality to be added to the Datatracker to automatically move version-00 drafts into the "WG Document" state as described in Section 4.1. + + Under normal conditions, it should not be possible for an I-D to be in the "WG Document" state in more than one WG at a time. This said, I-Ds may be transferred from one WG to another with the consent of the WG Chairs and the responsible ADs."""), + ("Parked WG Document","""4.2.5. Parked WG Document + + A "Parked WG Document" is an I-D that has lost its author or editor, is waiting for another document to be written or for a review to be completed, or cannot be progressed by the working group for some other reason. + + Some of the annotation tags described in Section 4.3 may be used in conjunction with this state to indicate why an I-D has been parked, and/or what may need to happen for the I-D to be un-parked. + + Parking a WG draft will not prevent it from expiring; however, this state can be used to indicate why the I-D has stopped progressing in the WG. + + A "Parked WG Document" that is not expired may be transferred from one WG to another with the consent of the WG Chairs and the responsible ADs."""), + ("Dead WG Document","""4.2.6. Dead WG Document + + A "Dead WG Document" is an I-D that has been abandoned. Note that 'Dead' is not always a final state for a WG I-D. If consensus is subsequently achieved, a "Dead WG Document" may be resurrected. A "Dead WG Document" that is not resurrected will eventually expire. + + Note that an I-D that is declared to be "Dead" in one WG and that is not expired may be transferred to a non-dead state in another WG with the consent of the WG Chairs and the responsible ADs."""), + ("In WG Last Call","""4.2.7. In WG Last Call + + A document "In WG Last Call" is an I-D for which a WG Last Call (WGLC) has been issued and is in progress. + + Note that conducting a WGLC is an optional part of the IETF WG process, per Section 7.4 of RFC 2418 [RFC2418]. + + If a WG Chair decides to conduct a WGLC on an I-D, the "In WG Last Call" state can be used to track the progress of the WGLC. The Chair may configure the Datatracker to send a WGLC message to one or more mailing lists when the Chair moves the I-D into this state. The WG Chair may also be able to select a different set of mailing lists for a different document undergoing a WGLC; some documents may deserve coordination with other WGs. + + A WG I-D in this state should remain "In WG Last Call" until the WG Chair moves it to another state. The WG Chair may configure the Datatracker to send an e-mail after a specified period of time to remind or 'nudge' the Chair to conclude the WGLC and to determine the next state for the document. + + It is possible for one WGLC to lead into another WGLC for the same document. For example, an I-D that completed a WGLC as an "Informational" document may need another WGLC if a decision is taken to convert the I-D into a Standards Track document."""), + ("Waiting for Implementation","""In some areas, it can be desirable to wait for multiple interoperable implementations before progressing a draft to be an RFC, and in some WGs this is required. This state should be entered after WG Last Call has completed."""), + ("Held by WG","""Held by WG, see document history for details."""), + ("Waiting for WG Chair Go-Ahead","""4.2.8. Waiting for WG Chair Go-Ahead + + A WG Chair may wish to place an I-D that receives a lot of comments during a WGLC into the "Waiting for WG Chair Go-Ahead" state. This state describes an I-D that has undergone a WGLC; however, the Chair is not yet ready to call consensus on the document. + + If comments from the WGLC need to be responded to, or a revision to the I-D is needed, the Chair may place an I-D into this state until all of the WGLC comments are adequately addressed and the (possibly revised) document is in the I-D repository."""), + ("WG Consensus: Waiting for Write-Up","""4.2.9. WG Consensus: Waiting for Writeup + + A document in the "WG Consensus: Waiting for Writeup" state has essentially completed its development within the working group, and is nearly ready to be sent to the IESG for publication. The last thing to be done is the preparation of a protocol writeup by a Document Shepherd. The IESG requires that a document shepherd writeup be completed before publication of the I-D is requested. The IETF document shepherding process and the role of a WG Document Shepherd is described in RFC 4858 [RFC4858] + + A WG Chair may call consensus on an I-D without a formal WGLC and transition an I-D that was in the "WG Document" state directly into this state. + + The name of this state includes the words "Waiting for Writeup" because a good document shepherd writeup takes time to prepare."""), + ("Submitted to IESG for Publication","""4.2.10. Submitted to IESG for Publication + + This state describes a WG document that has been submitted to the IESG for publication and that has not been sent back to the working group for revision. + + An I-D in this state may be under review by the IESG, it may have been approved and be in the RFC Editor's queue, or it may have been published as an RFC. Other possibilities exist too. The document may be "Dead" (in the IESG state machine) or in a "Do Not Publish" state."""), + ("Candidate for WG Adoption","""The document has been marked as a candidate for WG adoption by the WG Chair. This state can be used before a call for adoption is issued (and the document is put in the "Call For Adoption By WG Issued" state), to indicate that the document is in the queue for a call for adoption, even if none has been issued yet."""), + ("Call For Adoption By WG Issued","""4.2.1. Call for Adoption by WG Issued + + The "Call for Adoption by WG Issued" state should be used to indicate when an I-D is being considered for adoption by an IETF WG. An I-D that is in this state is actively being considered for adoption and has not yet achieved consensus, preference, or selection in the WG. + + This state may be used to describe an I-D that someone has asked a WG to consider for adoption, if the WG Chair has agreed with the request. This state may also be used to identify an I-D that a WG Chair asked an author to write specifically for consideration as a candidate WG item [WGDTSPEC], and/or an I-D that is listed as a 'candidate draft' in the WG's charter. + + Under normal conditions, it should not be possible for an I-D to be in the "Call for Adoption by WG Issued" state in more than one working group at the same time. This said, it is not uncommon for authors to "shop" their I-Ds to more than one WG at a time, with the hope of getting their documents adopted somewhere. + + After this state is implemented in the Datatracker, an I-D that is in the "Call for Adoption by WG Issued" state will not be able to be "shopped" to any other WG without the consent of the WG Chairs and the responsible ADs impacted by the shopping. + + Note that Figure 1 includes an arc leading from this state to outside of the WG state machine. This illustrates that some I-Ds that are considered do not get adopted as WG drafts. An I-D that is not adopted as a WG draft will transition out of the WG state machine and revert back to having no stream-specific state; however, the status change history log of the I-D will record that the I-D was previously in the "Call for Adoption by WG Issued" state."""), + ("Adopted by a WG","""4.2.2. Adopted by a WG + + The "Adopted by a WG" state describes an individual submission I-D that an IETF WG has agreed to adopt as one of its WG drafts. + + WG Chairs who use this state will be able to clearly indicate when their WGs adopt individual submission I-Ds. This will facilitate the Datatracker's ability to correctly capture "Replaces" information for WG drafts and correct "Replaced by" information for individual submission I-Ds that have been replaced by WG drafts. + + This state is needed because the Datatracker uses the filename of an I-D as a key to search its database for status information about the I-D, and because the filename of a WG I-D is supposed to be different from the filename of an individual submission I-D. The filename of an individual submission I-D will typically be formatted as 'draft-author-wgname-topic-nn'. + + The filename of a WG document is supposed to be formatted as 'draft- ietf-wgname-topic-nn'. + + An individual I-D that is adopted by a WG may take weeks or months to be resubmitted by the author as a new (version-00) WG draft. If the "Adopted by a WG" state is not used, the Datatracker has no way to determine that an I-D has been adopted until a new version of the I-D is submitted to the WG by the author and until the I-D is approved for posting by a WG Chair."""), + ("Adopted for WG Info Only","""4.2.3. Adopted for WG Info Only + + The "Adopted for WG Info Only" state describes a document that contains useful information for the WG that adopted it, but the document is not intended to be published as an RFC. The WG will not actively develop the contents of the I-D or progress it for publication as an RFC. The only purpose of the I-D is to provide information for internal use by the WG."""), + ]: + State.objects.filter(name=name).update(desc=desc) + +class Migration(migrations.Migration): + + dependencies = [ + ("doc", "0025_storedobject_storedobject_unique_name_per_store"), + ] + + operations = [ + migrations.RunPython(forward, reverse) + ] From 327447f91fa21ef7620d958b5f8fc1f00d4f85a5 Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Wed, 17 Sep 2025 13:42:09 -0500 Subject: [PATCH 203/405] feat: iesg dashboard of wg documents (#9363) * feat: iesg dashboard of wg documents (#8999) * fix: removed template html cruft * fix: avoid triggering a Ghostery false positive * fix: remove related-id, milestone, and last meeting columns * fix: make wgs with no docs show in last table * fix: remove wg w/wo docs columns from first three thables * fix: Make table names closer to original request * chore: ruff format ietf.iesg.utils * feat: refactor, test, cleanup * chore: added comment about the test wg acronyms --------- Co-authored-by: Jennifer Richards --- ietf/iesg/tests.py | 1583 +++++++++++++++++++++++ ietf/iesg/urls.py | 1 + ietf/iesg/utils.py | 296 ++++- ietf/iesg/views.py | 12 +- ietf/templates/iesg/working_groups.html | 159 +++ 5 files changed, 2028 insertions(+), 23 deletions(-) create mode 100644 ietf/templates/iesg/working_groups.html diff --git a/ietf/iesg/tests.py b/ietf/iesg/tests.py index 746ea3f56f..f3778d1ded 100644 --- a/ietf/iesg/tests.py +++ b/ietf/iesg/tests.py @@ -2,6 +2,7 @@ # -*- coding: utf-8 -*- +from collections import Counter import datetime import io import tarfile @@ -24,7 +25,9 @@ from ietf.group.models import Group, GroupMilestone, Role from ietf.iesg.agenda import get_agenda_date, agenda_data, fill_in_agenda_administrivia, agenda_sections from ietf.iesg.models import TelechatDate, TelechatAgendaContent +from ietf.iesg.utils import get_wg_dashboard_info from ietf.name.models import StreamName, TelechatAgendaSectionName +from ietf.person.factories import PersonFactory from ietf.person.models import Person from ietf.utils.test_utils import TestCase, login_testing_unauthorized, unicontent from ietf.iesg.factories import IESGMgmtItemFactory, TelechatAgendaContentFactory @@ -182,6 +185,1586 @@ def test_ietf_activity(self): r = self.client.get(url) self.assertEqual(r.status_code, 200) + def test_working_groups(self): + # Clean away the wasted built-for-every-test noise + Group.objects.filter(type__in=["wg", "area"]).delete() + + ( + area_summary, + area_totals, + ad_summary, + noad_summary, + ad_totals, + noad_totals, + totals, + wg_summary, + ) = get_wg_dashboard_info() + self.assertEqual(area_summary, []) + self.assertEqual( + area_totals, {"group_count": 0, "doc_count": 0, "page_count": 0} + ) + self.assertEqual(ad_summary, []) + self.assertEqual(noad_summary, []) + self.assertEqual( + ad_totals, + { + "ad_group_count": 0, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + }, + ) + self.assertEqual( + noad_totals, + { + "ad_group_count": 0, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + }, + ) + self.assertEqual( + totals, + { + "group_count": 0, + "doc_count": 0, + "page_count": 0, + "groups_with_docs_count": 0, + }, + ) + self.assertEqual(wg_summary, []) + + # Construct Areas with WGs similar in shape to a real moment of the IETF + + # Note that this test construciton uses the first letter of the wg acronyms + # for convenience to switch on whether groups have documents with assigned ADs. + # (Search for ` if wg_acronym[0] > "g"`) + # There's no other significance to the names of the area directors or the + # acronyms of the areas and groups other than being distinct. Taking the + # values from sets of similar things hopefully helps with debugging the tests. + + areas = {} + for area_acronym in ["red", "orange", "yellow", "green", "blue", "violet"]: + areas[area_acronym] = GroupFactory(type_id="area", acronym=area_acronym) + for ad, area, wgs in [ + ("Alpha", "red", ["bassoon"]), + ("Bravo", "orange", ["celesta"]), + ("Charlie", "orange", ["clarinet", "cymbals"]), + ("Delta", "yellow", ["flute"]), + ("Echo", "yellow", ["glockenspiel"]), + ("Foxtrot", "green", ["gong", "guitar"]), + ("Golf", "green", ["harp"]), + ("Hotel", "blue", ["harpsichord"]), + ("Indigo", "blue", ["oboe", "organ"]), + ("Juliet", "violet", ["piano"]), + ("Kilo", "violet", ["piccolo"]), + ("Lima", "violet", ["saxophone", "tambourine"]), + ]: + p = Person.objects.filter(name=ad).first() or PersonFactory(name=ad) + RoleFactory(group=areas[area], person=p, name_id="ad") + for wg in wgs: + g = GroupFactory(acronym=wg, type_id="wg", parent=areas[area]) + RoleFactory(group=g, person=p, name_id="ad") + + # Some ADs have out of area groups + g = GroupFactory(acronym="timpani", parent=areas["orange"]) + RoleFactory(group=g, person=Person.objects.get(name="Juliet"), name_id="ad") + + ( + area_summary, + area_totals, + ad_summary, + noad_summary, + ad_totals, + noad_totals, + totals, + wg_summary, + ) = get_wg_dashboard_info() + + self.assertEqual( + area_summary, + [ + { + "area": "red", + "groups_in_area": 1, + "groups_with_docs": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "area": "orange", + "groups_in_area": 4, + "groups_with_docs": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "area": "yellow", + "groups_in_area": 2, + "groups_with_docs": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "area": "green", + "groups_in_area": 3, + "groups_with_docs": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "area": "blue", + "groups_in_area": 3, + "groups_with_docs": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "area": "violet", + "groups_in_area": 4, + "groups_with_docs": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + ], + ) + self.assertEqual( + area_totals, {"group_count": 0, "doc_count": 0, "page_count": 0} + ) + self.assertEqual( + ad_summary, + [ + { + "ad": "Alpha", + "area": "red", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Bravo", + "area": "orange", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Charlie", + "area": "orange", + "ad_group_count": 2, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Delta", + "area": "yellow", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Echo", + "area": "yellow", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Foxtrot", + "area": "green", + "ad_group_count": 2, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Golf", + "area": "green", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Hotel", + "area": "blue", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Indigo", + "area": "blue", + "ad_group_count": 2, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Juliet", + "area": "orange", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Juliet", + "area": "violet", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Kilo", + "area": "violet", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Lima", + "area": "violet", + "ad_group_count": 2, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + ], + ) + self.assertEqual( + noad_summary, + [ + { + "ad": "Alpha", + "area": "red", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Bravo", + "area": "orange", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Charlie", + "area": "orange", + "ad_group_count": 2, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Delta", + "area": "yellow", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Echo", + "area": "yellow", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Foxtrot", + "area": "green", + "ad_group_count": 2, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Golf", + "area": "green", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Hotel", + "area": "blue", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Indigo", + "area": "blue", + "ad_group_count": 2, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Juliet", + "area": "orange", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Juliet", + "area": "violet", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Kilo", + "area": "violet", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Lima", + "area": "violet", + "ad_group_count": 2, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + ], + ) + self.assertEqual( + ad_totals, + { + "ad_group_count": 17, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + }, + ) + self.assertEqual( + noad_totals, + { + "ad_group_count": 17, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + }, + ) + self.assertEqual( + totals, + { + "group_count": 17, + "doc_count": 0, + "page_count": 0, + "groups_with_docs_count": 0, + }, + ) + self.assertEqual( + wg_summary, + [ + { + "wg": "bassoon", + "area": "red", + "ad": "Alpha", + "doc_count": 0, + "page_count": 0, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "celesta", + "area": "orange", + "ad": "Bravo", + "doc_count": 0, + "page_count": 0, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "clarinet", + "area": "orange", + "ad": "Charlie", + "doc_count": 0, + "page_count": 0, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "cymbals", + "area": "orange", + "ad": "Charlie", + "doc_count": 0, + "page_count": 0, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "flute", + "area": "yellow", + "ad": "Delta", + "doc_count": 0, + "page_count": 0, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "glockenspiel", + "area": "yellow", + "ad": "Echo", + "doc_count": 0, + "page_count": 0, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "gong", + "area": "green", + "ad": "Foxtrot", + "doc_count": 0, + "page_count": 0, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "guitar", + "area": "green", + "ad": "Foxtrot", + "doc_count": 0, + "page_count": 0, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "harp", + "area": "green", + "ad": "Golf", + "doc_count": 0, + "page_count": 0, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "harpsichord", + "area": "blue", + "ad": "Hotel", + "doc_count": 0, + "page_count": 0, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "oboe", + "area": "blue", + "ad": "Indigo", + "doc_count": 0, + "page_count": 0, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "organ", + "area": "blue", + "ad": "Indigo", + "doc_count": 0, + "page_count": 0, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "piano", + "area": "violet", + "ad": "Juliet", + "doc_count": 0, + "page_count": 0, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "piccolo", + "area": "violet", + "ad": "Kilo", + "doc_count": 0, + "page_count": 0, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "saxophone", + "area": "violet", + "ad": "Lima", + "doc_count": 0, + "page_count": 0, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "tambourine", + "area": "violet", + "ad": "Lima", + "doc_count": 0, + "page_count": 0, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "timpani", + "area": "orange", + "ad": "Juliet", + "doc_count": 0, + "page_count": 0, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + ], + ) + + # As seen above, all doc and page counts are currently 0 + + # We'll give a group a document but not assign it to its AD + WgDraftFactory( + group=Group.objects.get(acronym="saxophone"), pages=len("saxophone") + ) + ( + area_summary, + area_totals, + ad_summary, + noad_summary, + ad_totals, + noad_totals, + totals, + wg_summary, + ) = get_wg_dashboard_info() + count_violet_dicts = 0 + for d in area_summary: + if d["area"] == "violet": + count_violet_dicts += 1 + self.assertEqual(d["groups_with_docs"], 1) + self.assertEqual(d["doc_count"], 1) + self.assertEqual(d["page_count"], 9) + self.assertEqual(d["group_percent"], 100.0) + self.assertEqual(d["doc_percent"], 100.0) + self.assertEqual(d["page_percent"], 100.0) + else: + self.assertEqual(d["groups_with_docs"], 0) + self.assertEqual(d["doc_count"], 0) + self.assertEqual(d["page_count"], 0) + self.assertEqual(d["group_percent"], 0) + self.assertEqual(d["doc_percent"], 0) + self.assertEqual(d["page_percent"], 0) + self.assertEqual(count_violet_dicts, 1) + + self.assertEqual( + area_totals, {"group_count": 1, "doc_count": 1, "page_count": 9} + ) + + # No AD has this document, even though it's in Lima's group + count_lima_dicts = 0 + for d in ad_summary: + if d["ad"] == "Lima": + count_lima_dicts += 1 + self.assertEqual(d["doc_group_count"], 0) + self.assertEqual(d["doc_count"], 0) + self.assertEqual(d["page_count"], 0) + self.assertEqual(d["group_percent"], 0) + self.assertEqual(d["doc_percent"], 0) + self.assertEqual(d["page_percent"], 0) + self.assertEqual(count_lima_dicts, 1) + + # It's in Lima's group, so normally it will eventually land on Lima + count_lima_dicts = 0 + for d in noad_summary: + if d["ad"] == "Lima": + count_lima_dicts += 1 + self.assertEqual(d["doc_group_count"], 1) + self.assertEqual(d["doc_count"], 1) + self.assertEqual(d["page_count"], 9) + self.assertEqual(d["group_percent"], 100.0) + self.assertEqual(d["doc_percent"], 100.0) + self.assertEqual(d["page_percent"], 100.0) + else: + self.assertEqual(d["doc_group_count"], 0) + self.assertEqual(d["doc_count"], 0) + self.assertEqual(d["page_count"], 0) + self.assertEqual(d["group_percent"], 0) + self.assertEqual(d["doc_percent"], 0) + self.assertEqual(d["page_percent"], 0) + self.assertEqual(count_lima_dicts, 1) + + self.assertEqual( + ad_totals, + { + "ad_group_count": 17, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + }, + ) + self.assertEqual( + noad_totals, + { + "ad_group_count": 17, + "doc_group_count": 1, + "doc_count": 1, + "page_count": 9, + }, + ) + self.assertEqual( + totals, + { + "group_count": 17, + "doc_count": 1, + "page_count": 9, + "groups_with_docs_count": 1, + }, + ) + + count_sax_dicts = 0 + for d in wg_summary: + if d["wg"] == "saxophone": + count_sax_dicts += 1 + self.assertEqual(d["doc_count"], 1) + self.assertEqual(d["page_count"], 9) + else: + self.assertEqual(d["doc_count"], 0) + self.assertEqual(d["page_count"], 0) + self.assertEqual(count_sax_dicts, 1) + + # Assign that doc to Lima + self.assertEqual(Document.objects.count(), 1) + Document.objects.all().update(ad=Person.objects.get(name="Lima")) + ( + area_summary, + area_totals, + ad_summary, + noad_summary, + ad_totals, + noad_totals, + totals, + wg_summary, + ) = get_wg_dashboard_info() + count_violet_dicts = 0 + for d in area_summary: + if d["area"] == "violet": + count_violet_dicts += 1 + self.assertEqual(d["groups_with_docs"], 1) + self.assertEqual(d["doc_count"], 1) + self.assertEqual(d["page_count"], 9) + self.assertEqual(d["group_percent"], 100.0) + self.assertEqual(d["doc_percent"], 100.0) + self.assertEqual(d["page_percent"], 100.0) + else: + self.assertEqual(d["groups_with_docs"], 0) + self.assertEqual(d["doc_count"], 0) + self.assertEqual(d["page_count"], 0) + self.assertEqual(d["group_percent"], 0) + self.assertEqual(d["doc_percent"], 0) + self.assertEqual(d["page_percent"], 0) + self.assertEqual(count_violet_dicts, 1) + + self.assertEqual( + area_totals, {"group_count": 1, "doc_count": 1, "page_count": 9} + ) + + # This time it will show up as a doc assigned to Lima + count_lima_dicts = 0 + for d in ad_summary: + if d["ad"] == "Lima": + count_lima_dicts += 1 + self.assertEqual(d["doc_group_count"], 1) + self.assertEqual(d["doc_count"], 1) + self.assertEqual(d["page_count"], 9) + self.assertEqual(d["group_percent"], 100.0) + self.assertEqual(d["doc_percent"], 100.0) + self.assertEqual(d["page_percent"], 100.0) + else: + self.assertEqual(d["doc_group_count"], 0) + self.assertEqual(d["doc_count"], 0) + self.assertEqual(d["page_count"], 0) + self.assertEqual(d["group_percent"], 0) + self.assertEqual(d["doc_percent"], 0) + self.assertEqual(d["page_percent"], 0) + self.assertEqual(count_lima_dicts, 1) + + # and there will be no noad documents + count_lima_dicts = 0 + for d in noad_summary: + if d["ad"] == "Lima": + count_lima_dicts += 1 + self.assertEqual(d["doc_group_count"], 0) + self.assertEqual(d["doc_count"], 0) + self.assertEqual(d["page_count"], 0) + self.assertEqual(d["group_percent"], 0) + self.assertEqual(d["doc_percent"], 0) + self.assertEqual(d["page_percent"], 0) + self.assertEqual(count_lima_dicts, 1) + + self.assertEqual( + ad_totals, + { + "ad_group_count": 17, + "doc_group_count": 1, + "doc_count": 1, + "page_count": 9, + }, + ) + self.assertEqual( + noad_totals, + { + "ad_group_count": 17, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + }, + ) + self.assertEqual( + totals, + { + "group_count": 17, + "doc_count": 1, + "page_count": 9, + "groups_with_docs_count": 1, + }, + ) + + count_sax_dicts = 0 + for d in wg_summary: + if d["wg"] == "saxophone": + count_sax_dicts += 1 + self.assertEqual(d["doc_count"], 1) + self.assertEqual(d["page_count"], 9) + else: + self.assertEqual(d["doc_count"], 0) + self.assertEqual(d["page_count"], 0) + self.assertEqual(count_sax_dicts, 1) + + # Now give Lima a document in a group that's not in their area: + WgDraftFactory( + group=Group.objects.get(acronym="gong"), + pages=len("gong"), + ad=Person.objects.get(name="Lima"), + ) + ( + area_summary, + area_totals, + ad_summary, + noad_summary, + ad_totals, + noad_totals, + totals, + wg_summary, + ) = get_wg_dashboard_info() + seen_dicts = Counter([d["area"] for d in area_summary]) + for d in areas: + self.assertEqual(seen_dicts[area], 1 if area in ["violet", "green"] else 0) + for d in area_summary: + if d["area"] in ["violet", "green"]: + self.assertEqual(d["doc_count"], 1) + self.assertEqual(d["page_count"], 9 if d["area"] == "violet" else 4) + self.assertEqual(d["group_percent"], 50) + self.assertEqual(d["doc_percent"], 50) + self.assertEqual( + d["page_percent"], + 100 * 9 / 13 if d["area"] == "violet" else 100 * 4 / 13, + ) + else: + self.assertEqual(d["doc_count"], 0) + self.assertEqual(d["page_count"], 0) + self.assertEqual(d["group_percent"], 0) + self.assertEqual(d["doc_percent"], 0) + self.assertEqual(d["page_percent"], 0) + + self.assertEqual( + area_totals, {"group_count": 2, "doc_count": 2, "page_count": 13} + ) + + for d in ad_summary: + if d["ad"] == "Lima": + self.assertEqual(d["doc_group_count"], 1) + self.assertEqual(d["doc_count"], 1) + self.assertEqual(d["page_count"], 9 if d["area"] == "violet" else 4) + self.assertEqual(d["group_percent"], 50) + self.assertEqual(d["doc_percent"], 50) + self.assertEqual( + d["page_percent"], + 100 * 9 / 13 if d["area"] == "violet" else 100 * 4 / 13, + ) + else: + self.assertEqual(d["doc_group_count"], 0) + self.assertEqual( + d["doc_count"], 0 + ) # Note in particular this is 0 for Foxtrot + self.assertEqual(d["page_count"], 0) + self.assertEqual(d["group_percent"], 0) + self.assertEqual(d["doc_percent"], 0) + self.assertEqual(d["page_percent"], 0) + + for d in wg_summary: + if d["wg"] == "gong": + # Lima's doc in gong above counts at the dict for gong even though the ad reported there is Foxtrot. + self.assertEqual( + d, + { + "wg": "gong", + "area": "green", + "ad": "Foxtrot", + "doc_count": 1, + "page_count": 4, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + ) + elif d["ad"] == "Lima": + self.assertEqual( + d["area"], "violet" + ) # The out of area assignment is not reflected in the wg_summary at all. + + # Now pile on a lot of documents + for wg_acronym in [ + "bassoon", + "celesta", + "clarinet", + "cymbals", + "flute", + "glockenspiel", + "gong", + "guitar", + "harp", + "harpsichord", + "oboe", + "organ", + "piano", + "piccolo", + "saxophone", + "tambourine", + "timpani", + ]: + if wg_acronym in ["bassoon", "celesta"]: + continue # Those WGs have no docs + # The rest have a doc that's not assigned to any ad + WgDraftFactory( + group=Group.objects.get(acronym=wg_acronym), pages=len(wg_acronym) + ) + if wg_acronym[0] > "g": + # Some have a doc assigned to the responsible ad + WgDraftFactory( + group=Group.objects.get(acronym=wg_acronym), + pages=len(wg_acronym), + ad=Role.objects.get(name_id="ad", group__acronym=wg_acronym).person, + ) + # The other AD for an area might be covering a doc + WgDraftFactory( + group=Group.objects.get(acronym="saxophone"), + pages=len("saxophone"), + ad=Person.objects.get(name="Juliet"), + ) + # An Ad not associated with the group or the area is responsible for a doc + WgDraftFactory( + group=Group.objects.get(acronym="bassoon"), + pages=len("bassoon"), + ad=Person.objects.get(name="Juliet"), + ) + + ( + area_summary, + area_totals, + ad_summary, + noad_summary, + ad_totals, + noad_totals, + totals, + wg_summary, + ) = get_wg_dashboard_info() + + self.assertEqual( + area_summary, + [ + { + "area": "red", + "groups_in_area": 1, + "groups_with_docs": 1, + "doc_count": 1, + "page_count": 7, + "group_percent": 6.25, + "doc_percent": 3.571428571428571, + "page_percent": 3.5897435897435894, + }, + { + "area": "orange", + "groups_in_area": 4, + "groups_with_docs": 3, + "doc_count": 4, + "page_count": 29, + "group_percent": 18.75, + "doc_percent": 14.285714285714285, + "page_percent": 14.871794871794872, + }, + { + "area": "yellow", + "groups_in_area": 2, + "groups_with_docs": 2, + "doc_count": 2, + "page_count": 17, + "group_percent": 12.5, + "doc_percent": 7.142857142857142, + "page_percent": 8.717948717948717, + }, + { + "area": "green", + "groups_in_area": 3, + "groups_with_docs": 3, + "doc_count": 5, + "page_count": 22, + "group_percent": 18.75, + "doc_percent": 17.857142857142858, + "page_percent": 11.282051282051283, + }, + { + "area": "blue", + "groups_in_area": 3, + "groups_with_docs": 3, + "doc_count": 6, + "page_count": 40, + "group_percent": 18.75, + "doc_percent": 21.428571428571427, + "page_percent": 20.51282051282051, + }, + { + "area": "violet", + "groups_in_area": 4, + "groups_with_docs": 4, + "doc_count": 10, + "page_count": 80, + "group_percent": 25.0, + "doc_percent": 35.714285714285715, + "page_percent": 41.02564102564102, + }, + ], + ) + self.assertEqual( + area_totals, {"group_count": 16, "doc_count": 28, "page_count": 195} + ) + self.assertEqual( + ad_summary, + [ + { + "ad": "Alpha", + "area": "red", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0.0, + "doc_percent": 0.0, + "page_percent": 0.0, + }, + { + "ad": "Bravo", + "area": "orange", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0.0, + "doc_percent": 0.0, + "page_percent": 0.0, + }, + { + "ad": "Charlie", + "area": "orange", + "ad_group_count": 2, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0.0, + "doc_percent": 0.0, + "page_percent": 0.0, + }, + { + "ad": "Delta", + "area": "yellow", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0.0, + "doc_percent": 0.0, + "page_percent": 0.0, + }, + { + "ad": "Echo", + "area": "yellow", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0.0, + "doc_percent": 0.0, + "page_percent": 0.0, + }, + { + "ad": "Foxtrot", + "area": "green", + "ad_group_count": 2, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0.0, + "doc_percent": 0.0, + "page_percent": 0.0, + }, + { + "ad": "Golf", + "area": "green", + "ad_group_count": 1, + "doc_group_count": 1, + "doc_count": 1, + "page_count": 4, + "group_percent": 8.333333333333332, + "doc_percent": 7.6923076923076925, + "page_percent": 4.395604395604396, + }, + { + "ad": "Hotel", + "area": "blue", + "ad_group_count": 1, + "doc_group_count": 1, + "doc_count": 1, + "page_count": 11, + "group_percent": 8.333333333333332, + "doc_percent": 7.6923076923076925, + "page_percent": 12.087912087912088, + }, + { + "ad": "Indigo", + "area": "blue", + "ad_group_count": 2, + "doc_group_count": 2, + "doc_count": 2, + "page_count": 9, + "group_percent": 16.666666666666664, + "doc_percent": 15.384615384615385, + "page_percent": 9.89010989010989, + }, + { + "ad": "Juliet", + "area": "orange", + "ad_group_count": 1, + "doc_group_count": 1, + "doc_count": 1, + "page_count": 7, + "group_percent": 8.333333333333332, + "doc_percent": 7.6923076923076925, + "page_percent": 7.6923076923076925, + }, + { + "ad": "Juliet", + "area": "red", + "ad_group_count": 0, + "doc_group_count": 1, + "doc_count": 1, + "page_count": 7, + "group_percent": 8.333333333333332, + "doc_percent": 7.6923076923076925, + "page_percent": 7.6923076923076925, + }, + { + "ad": "Juliet", + "area": "violet", + "ad_group_count": 1, + "doc_group_count": 2, + "doc_count": 2, + "page_count": 14, + "group_percent": 16.666666666666664, + "doc_percent": 15.384615384615385, + "page_percent": 15.384615384615385, + }, + { + "ad": "Kilo", + "area": "violet", + "ad_group_count": 1, + "doc_group_count": 1, + "doc_count": 1, + "page_count": 7, + "group_percent": 8.333333333333332, + "doc_percent": 7.6923076923076925, + "page_percent": 7.6923076923076925, + }, + { + "ad": "Lima", + "area": "green", + "ad_group_count": 0, + "doc_group_count": 1, + "doc_count": 1, + "page_count": 4, + "group_percent": 8.333333333333332, + "doc_percent": 7.6923076923076925, + "page_percent": 4.395604395604396, + }, + { + "ad": "Lima", + "area": "violet", + "ad_group_count": 2, + "doc_group_count": 2, + "doc_count": 3, + "page_count": 28, + "group_percent": 16.666666666666664, + "doc_percent": 23.076923076923077, + "page_percent": 30.76923076923077, + }, + ], + ) + self.assertEqual( + noad_summary, + [ + { + "ad": "Alpha", + "area": "red", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0.0, + "doc_percent": 0.0, + "page_percent": 0.0, + }, + { + "ad": "Bravo", + "area": "orange", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0.0, + "doc_percent": 0.0, + "page_percent": 0.0, + }, + { + "ad": "Charlie", + "area": "orange", + "ad_group_count": 2, + "doc_group_count": 2, + "doc_count": 2, + "page_count": 15, + "group_percent": 13.333333333333334, + "doc_percent": 13.333333333333334, + "page_percent": 14.423076923076922, + }, + { + "ad": "Delta", + "area": "yellow", + "ad_group_count": 1, + "doc_group_count": 1, + "doc_count": 1, + "page_count": 5, + "group_percent": 6.666666666666667, + "doc_percent": 6.666666666666667, + "page_percent": 4.807692307692308, + }, + { + "ad": "Echo", + "area": "yellow", + "ad_group_count": 1, + "doc_group_count": 1, + "doc_count": 1, + "page_count": 12, + "group_percent": 6.666666666666667, + "doc_percent": 6.666666666666667, + "page_percent": 11.538461538461538, + }, + { + "ad": "Foxtrot", + "area": "green", + "ad_group_count": 2, + "doc_group_count": 2, + "doc_count": 2, + "page_count": 10, + "group_percent": 13.333333333333334, + "doc_percent": 13.333333333333334, + "page_percent": 9.615384615384617, + }, + { + "ad": "Golf", + "area": "green", + "ad_group_count": 1, + "doc_group_count": 1, + "doc_count": 1, + "page_count": 4, + "group_percent": 6.666666666666667, + "doc_percent": 6.666666666666667, + "page_percent": 3.8461538461538463, + }, + { + "ad": "Hotel", + "area": "blue", + "ad_group_count": 1, + "doc_group_count": 1, + "doc_count": 1, + "page_count": 11, + "group_percent": 6.666666666666667, + "doc_percent": 6.666666666666667, + "page_percent": 10.576923076923077, + }, + { + "ad": "Indigo", + "area": "blue", + "ad_group_count": 2, + "doc_group_count": 2, + "doc_count": 2, + "page_count": 9, + "group_percent": 13.333333333333334, + "doc_percent": 13.333333333333334, + "page_percent": 8.653846153846153, + }, + { + "ad": "Juliet", + "area": "orange", + "ad_group_count": 1, + "doc_group_count": 1, + "doc_count": 1, + "page_count": 7, + "group_percent": 6.666666666666667, + "doc_percent": 6.666666666666667, + "page_percent": 6.730769230769231, + }, + { + "ad": "Juliet", + "area": "violet", + "ad_group_count": 1, + "doc_group_count": 1, + "doc_count": 1, + "page_count": 5, + "group_percent": 6.666666666666667, + "doc_percent": 6.666666666666667, + "page_percent": 4.807692307692308, + }, + { + "ad": "Kilo", + "area": "violet", + "ad_group_count": 1, + "doc_group_count": 1, + "doc_count": 1, + "page_count": 7, + "group_percent": 6.666666666666667, + "doc_percent": 6.666666666666667, + "page_percent": 6.730769230769231, + }, + { + "ad": "Lima", + "area": "violet", + "ad_group_count": 2, + "doc_group_count": 2, + "doc_count": 2, + "page_count": 19, + "group_percent": 13.333333333333334, + "doc_percent": 13.333333333333334, + "page_percent": 18.269230769230766, + }, + ], + ) + self.assertEqual( + ad_totals, + { + "ad_group_count": 17, + "doc_group_count": 12, + "doc_count": 13, + "page_count": 91, + }, + ) + self.assertEqual( + noad_totals, + { + "ad_group_count": 17, + "doc_group_count": 15, + "doc_count": 15, + "page_count": 104, + }, + ) + self.assertEqual( + totals, + { + "group_count": 17, + "doc_count": 28, + "page_count": 195, + "groups_with_docs_count": 16, + }, + ) + self.assertEqual( + wg_summary, + [ + { + "wg": "bassoon", + "area": "red", + "ad": "Alpha", + "doc_count": 1, + "page_count": 7, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "celesta", + "area": "orange", + "ad": "Bravo", + "doc_count": 0, + "page_count": 0, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "clarinet", + "area": "orange", + "ad": "Charlie", + "doc_count": 1, + "page_count": 8, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "cymbals", + "area": "orange", + "ad": "Charlie", + "doc_count": 1, + "page_count": 7, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "flute", + "area": "yellow", + "ad": "Delta", + "doc_count": 1, + "page_count": 5, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "glockenspiel", + "area": "yellow", + "ad": "Echo", + "doc_count": 1, + "page_count": 12, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "gong", + "area": "green", + "ad": "Foxtrot", + "doc_count": 2, + "page_count": 8, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "guitar", + "area": "green", + "ad": "Foxtrot", + "doc_count": 1, + "page_count": 6, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "harp", + "area": "green", + "ad": "Golf", + "doc_count": 2, + "page_count": 8, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "harpsichord", + "area": "blue", + "ad": "Hotel", + "doc_count": 2, + "page_count": 22, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "oboe", + "area": "blue", + "ad": "Indigo", + "doc_count": 2, + "page_count": 8, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "organ", + "area": "blue", + "ad": "Indigo", + "doc_count": 2, + "page_count": 10, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "piano", + "area": "violet", + "ad": "Juliet", + "doc_count": 2, + "page_count": 10, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "piccolo", + "area": "violet", + "ad": "Kilo", + "doc_count": 2, + "page_count": 14, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "saxophone", + "area": "violet", + "ad": "Lima", + "doc_count": 4, + "page_count": 36, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "tambourine", + "area": "violet", + "ad": "Lima", + "doc_count": 2, + "page_count": 20, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "timpani", + "area": "orange", + "ad": "Juliet", + "doc_count": 2, + "page_count": 14, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + ], + ) + + # Make sure the view doesn't _crash_ - the template is a dead-simple rendering of the dicts, but this test doesn't prove that + url = urlreverse("ietf.iesg.views.working_groups") + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + class IESGAgendaTests(TestCase): def setUp(self): diff --git a/ietf/iesg/urls.py b/ietf/iesg/urls.py index d8cfec9f90..5fd9dea0cc 100644 --- a/ietf/iesg/urls.py +++ b/ietf/iesg/urls.py @@ -59,6 +59,7 @@ url(r'^agenda/telechat-(?:%(date)s-)?docs.tgz' % settings.URL_REGEXPS, views.telechat_docs_tarfile), url(r'^discusses/$', views.discusses), url(r'^ietf-activity/$', views.ietf_activity), + url(r'^working-groups/$', views.working_groups), url(r'^milestones/$', views.milestones_needing_review), url(r'^photos/$', views.photos), ] diff --git a/ietf/iesg/utils.py b/ietf/iesg/utils.py index 56571dc753..9051cf92b2 100644 --- a/ietf/iesg/utils.py +++ b/ietf/iesg/utils.py @@ -1,32 +1,45 @@ -from collections import namedtuple +from collections import Counter, defaultdict, namedtuple -import debug # pyflakes:ignore +import datetime + +import debug # pyflakes:ignore + +from django.db import models +from django.utils import timezone from ietf.doc.models import Document, STATUSCHANGE_RELATIONS from ietf.doc.utils_search import fill_in_telechat_date +from ietf.group.models import Group from ietf.iesg.agenda import get_doc_section +from ietf.person.utils import get_active_ads + +TelechatPageCount = namedtuple( + "TelechatPageCount", + ["for_approval", "for_action", "related", "ad_pages_left_to_ballot_on"], +) -TelechatPageCount = namedtuple('TelechatPageCount',['for_approval','for_action','related','ad_pages_left_to_ballot_on']) def telechat_page_count(date=None, docs=None, ad=None): if not date and not docs: return TelechatPageCount(0, 0, 0, 0) if not docs: - candidates = Document.objects.filter(docevent__telechatdocevent__telechat_date=date).distinct() + candidates = Document.objects.filter( + docevent__telechatdocevent__telechat_date=date + ).distinct() fill_in_telechat_date(candidates) - docs = [ doc for doc in candidates if doc.telechat_date()==date ] + docs = [doc for doc in candidates if doc.telechat_date() == date] - for_action =[d for d in docs if get_doc_section(d).endswith('.3')] + for_action = [d for d in docs if get_doc_section(d).endswith(".3")] - for_approval = set(docs)-set(for_action) + for_approval = set(docs) - set(for_action) - drafts = [d for d in for_approval if d.type_id == 'draft'] + drafts = [d for d in for_approval if d.type_id == "draft"] ad_pages_left_to_ballot_on = 0 pages_for_approval = 0 - + for draft in drafts: pages_for_approval += draft.pages or 0 if ad: @@ -39,30 +52,269 @@ def telechat_page_count(date=None, docs=None, ad=None): pages_for_action = 0 for d in for_action: - if d.type_id == 'draft': + if d.type_id == "draft": pages_for_action += d.pages or 0 - elif d.type_id == 'statchg': + elif d.type_id == "statchg": for rel in d.related_that_doc(STATUSCHANGE_RELATIONS): pages_for_action += rel.pages or 0 - elif d.type_id == 'conflrev': - for rel in d.related_that_doc('conflrev'): + elif d.type_id == "conflrev": + for rel in d.related_that_doc("conflrev"): pages_for_action += rel.pages or 0 else: pass related_pages = 0 - for d in for_approval-set(drafts): - if d.type_id == 'statchg': + for d in for_approval - set(drafts): + if d.type_id == "statchg": for rel in d.related_that_doc(STATUSCHANGE_RELATIONS): related_pages += rel.pages or 0 - elif d.type_id == 'conflrev': - for rel in d.related_that_doc('conflrev'): + elif d.type_id == "conflrev": + for rel in d.related_that_doc("conflrev"): related_pages += rel.pages or 0 else: # There's really nothing to rely on to give a reading load estimate for charters pass - - return TelechatPageCount(for_approval=pages_for_approval, - for_action=pages_for_action, - related=related_pages, - ad_pages_left_to_ballot_on=ad_pages_left_to_ballot_on) + + return TelechatPageCount( + for_approval=pages_for_approval, + for_action=pages_for_action, + related=related_pages, + ad_pages_left_to_ballot_on=ad_pages_left_to_ballot_on, + ) + + +def get_wg_dashboard_info(): + docs = ( + Document.objects.filter( + group__type="wg", + group__state="active", + states__type="draft", + states__slug="active", + ) + .filter(models.Q(ad__isnull=True) | models.Q(ad__in=get_active_ads())) + .distinct() + .prefetch_related("group", "group__parent") + .exclude( + states__type="draft-stream-ietf", + states__slug__in=["c-adopt", "wg-cand", "dead", "parked", "info"], + ) + ) + groups = Group.objects.filter(state="active", type="wg") + areas = Group.objects.filter(state="active", type="area") + + total_group_count = groups.count() + total_doc_count = docs.count() + total_page_count = docs.aggregate(models.Sum("pages"))["pages__sum"] or 0 + totals = { + "group_count": total_group_count, + "doc_count": total_doc_count, + "page_count": total_page_count, + } + + # Since this view is primarily about counting subsets of the above docs query and the + # expected number of returned documents is just under 1000 typically - do the totaling + # work in python rather than asking the db to do it. + + groups_for_area = defaultdict(set) + pages_for_area = defaultdict(lambda: 0) + docs_for_area = defaultdict(lambda: 0) + groups_for_ad = defaultdict(lambda: defaultdict(set)) + pages_for_ad = defaultdict(lambda: defaultdict(lambda: 0)) + docs_for_ad = defaultdict(lambda: defaultdict(lambda: 0)) + groups_for_noad = defaultdict(lambda: defaultdict(set)) + pages_for_noad = defaultdict(lambda: defaultdict(lambda: 0)) + docs_for_noad = defaultdict(lambda: defaultdict(lambda: 0)) + docs_for_wg = defaultdict(lambda: 0) + pages_for_wg = defaultdict(lambda: 0) + groups_total = set() + pages_total = 0 + docs_total = 0 + + responsible_for_group = defaultdict(lambda: defaultdict(lambda: "None")) + responsible_count = defaultdict(lambda: defaultdict(lambda: 0)) + for group in groups: + responsible = f"{', '.join([r.person.plain_name() for r in group.role_set.filter(name_id='ad')])}" + docs_for_noad[responsible][group.parent.acronym] = ( + 0 # Ensure these keys are present later + ) + docs_for_ad[responsible][group.parent.acronym] = 0 + responsible_for_group[group.acronym][group.parent.acronym] = responsible + responsible_count[responsible][group.parent.acronym] += 1 + + for doc in docs: + docs_for_wg[doc.group] += 1 + pages_for_wg[doc.group] += doc.pages + groups_for_area[doc.group.area.acronym].add(doc.group.acronym) + pages_for_area[doc.group.area.acronym] += doc.pages + docs_for_area[doc.group.area.acronym] += 1 + + if doc.ad is None: + responsible = responsible_for_group[doc.group.acronym][ + doc.group.parent.acronym + ] + groups_for_noad[responsible][doc.group.parent.acronym].add( + doc.group.acronym + ) + pages_for_noad[responsible][doc.group.parent.acronym] += doc.pages + docs_for_noad[responsible][doc.group.parent.acronym] += 1 + else: + responsible = f"{doc.ad.plain_name()}" + groups_for_ad[responsible][doc.group.parent.acronym].add(doc.group.acronym) + pages_for_ad[responsible][doc.group.parent.acronym] += doc.pages + docs_for_ad[responsible][doc.group.parent.acronym] += 1 + + docs_total += 1 + groups_total.add(doc.group.acronym) + pages_total += doc.pages + + groups_total = len(groups_total) + totals["groups_with_docs_count"] = groups_total + + area_summary = [] + + for area in areas: + group_count = len(groups_for_area[area.acronym]) + doc_count = docs_for_area[area.acronym] + page_count = pages_for_area[area.acronym] + area_summary.append( + { + "area": area.acronym, + "groups_in_area": groups.filter(parent=area).count(), + "groups_with_docs": group_count, + "doc_count": doc_count, + "page_count": page_count, + "group_percent": group_count / groups_total * 100 + if groups_total != 0 + else 0, + "doc_percent": doc_count / docs_total * 100 if docs_total != 0 else 0, + "page_percent": page_count / pages_total * 100 + if pages_total != 0 + else 0, + } + ) + area_totals = { + "group_count": groups_total, + "doc_count": docs_total, + "page_count": pages_total, + } + + noad_summary = [] + noad_totals = { + "ad_group_count": 0, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + } + for ad in docs_for_noad: + for area in docs_for_noad[ad]: + noad_totals["ad_group_count"] += responsible_count[ad][area] + noad_totals["doc_group_count"] += len(groups_for_noad[ad][area]) + noad_totals["doc_count"] += docs_for_noad[ad][area] + noad_totals["page_count"] += pages_for_noad[ad][area] + for ad in docs_for_noad: + for area in docs_for_noad[ad]: + noad_summary.append( + { + "ad": ad, + "area": area, + "ad_group_count": responsible_count[ad][area], + "doc_group_count": len(groups_for_noad[ad][area]), + "doc_count": docs_for_noad[ad][area], + "page_count": pages_for_noad[ad][area], + "group_percent": len(groups_for_noad[ad][area]) + / noad_totals["doc_group_count"] + * 100 + if noad_totals["doc_group_count"] != 0 + else 0, + "doc_percent": docs_for_noad[ad][area] + / noad_totals["doc_count"] + * 100 + if noad_totals["doc_count"] != 0 + else 0, + "page_percent": pages_for_noad[ad][area] + / noad_totals["page_count"] + * 100 + if noad_totals["page_count"] != 0 + else 0, + } + ) + noad_summary.sort(key=lambda r: (r["ad"], r["area"])) + + ad_summary = [] + ad_totals = { + "ad_group_count": 0, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + } + for ad in docs_for_ad: + for area in docs_for_ad[ad]: + ad_totals["ad_group_count"] += responsible_count[ad][area] + ad_totals["doc_group_count"] += len(groups_for_ad[ad][area]) + ad_totals["doc_count"] += docs_for_ad[ad][area] + ad_totals["page_count"] += pages_for_ad[ad][area] + for ad in docs_for_ad: + for area in docs_for_ad[ad]: + ad_summary.append( + { + "ad": ad, + "area": area, + "ad_group_count": responsible_count[ad][area], + "doc_group_count": len(groups_for_ad[ad][area]), + "doc_count": docs_for_ad[ad][area], + "page_count": pages_for_ad[ad][area], + "group_percent": len(groups_for_ad[ad][area]) + / ad_totals["doc_group_count"] + * 100 + if ad_totals["doc_group_count"] != 0 + else 0, + "doc_percent": docs_for_ad[ad][area] / ad_totals["doc_count"] * 100 + if ad_totals["doc_count"] != 0 + else 0, + "page_percent": pages_for_ad[ad][area] + / ad_totals["page_count"] + * 100 + if ad_totals["page_count"] != 0 + else 0, + } + ) + ad_summary.sort(key=lambda r: (r["ad"], r["area"])) + + rfc_counter = Counter( + Document.objects.filter(type="rfc").values_list("group__acronym", flat=True) + ) + recent_rfc_counter = Counter( + Document.objects.filter( + type="rfc", + docevent__type="published_rfc", + docevent__time__gte=timezone.now() - datetime.timedelta(weeks=104), + ).values_list("group__acronym", flat=True) + ) + for wg in set(groups) - set(docs_for_wg.keys()): + docs_for_wg[wg] += 0 + pages_for_wg[wg] += 0 + wg_summary = [] + for wg in docs_for_wg: + wg_summary.append( + { + "wg": wg.acronym, + "area": wg.parent.acronym, + "ad": responsible_for_group[wg.acronym][wg.parent.acronym], + "doc_count": docs_for_wg[wg], + "page_count": pages_for_wg[wg], + "rfc_count": rfc_counter[wg.acronym], + "recent_rfc_count": recent_rfc_counter[wg.acronym], + } + ) + wg_summary.sort(key=lambda r: (r["wg"], r["area"])) + + return ( + area_summary, + area_totals, + ad_summary, + noad_summary, + ad_totals, + noad_totals, + totals, + wg_summary, + ) diff --git a/ietf/iesg/views.py b/ietf/iesg/views.py index ffd4515c98..014b290425 100644 --- a/ietf/iesg/views.py +++ b/ietf/iesg/views.py @@ -61,7 +61,7 @@ from ietf.group.models import GroupMilestone, Role from ietf.iesg.agenda import agenda_data, agenda_sections, fill_in_agenda_docs, get_agenda_date from ietf.iesg.models import TelechatDate, TelechatAgendaContent -from ietf.iesg.utils import telechat_page_count +from ietf.iesg.utils import get_wg_dashboard_info, telechat_page_count from ietf.ietfauth.utils import has_role, role_required, user_is_person from ietf.name.models import TelechatAgendaSectionName from ietf.person.models import Person @@ -626,3 +626,13 @@ def telechat_agenda_content_view(request, section): content=content.text, content_type=f"text/plain; charset={settings.DEFAULT_CHARSET}", ) + +def working_groups(request): + + area_summary, area_totals, ad_summary, noad_summary, ad_totals, noad_totals, totals, wg_summary = get_wg_dashboard_info() + + return render( + request, + "iesg/working_groups.html", + dict(area_summary=area_summary, area_totals=area_totals, ad_summary=ad_summary, noad_summary=noad_summary, ad_totals=ad_totals, noad_totals=noad_totals, totals=totals, wg_summary=wg_summary), + ) diff --git a/ietf/templates/iesg/working_groups.html b/ietf/templates/iesg/working_groups.html new file mode 100644 index 0000000000..b799636857 --- /dev/null +++ b/ietf/templates/iesg/working_groups.html @@ -0,0 +1,159 @@ +{% extends "base.html" %} +{# Copyright The IETF Trust 2025, All Rights Reserved #} +{% load origin static %} +{% block pagehead %} + +{% endblock %} +{% block title %}IESG view of working groups{% endblock %} +{% block content %} + {% origin %} +

    IESG view of working groups

    +

    Area Size and Load

    + + + + + + + + {# (divider) #} + + + + + + {% for area in area_summary %} + + + + + + + + + {% endfor %} + + + + + + + + + + + +
    Area NameWGsI-DsPages% I-Ds% Pages
    {{area.area}}{{area.groups_in_area}}{{area.doc_count}}{{area.page_count}}{{area.doc_percent|floatformat:1}}{{area.page_percent|floatformat:1}}
    Totals{{totals.group_count}}{{totals.doc_count}}{{totals.page_count}}
    + +

    Area Director Load: Documents not yet directly assigned to AD

    +
    Typically these are pre-pubreq documents
    + + + + + + + + + {# (divider) #} + + + + + + {% for ad in noad_summary %} + + + + + + + + + + {% endfor %} + + + + + + + + + + + + +
    ADArea NameWGs for ADI-DsPages% I-Ds% Pages
    {{ad.ad}}{{ad.area}}{{ad.ad_group_count}}{{ad.doc_count}}{{ad.page_count}}{{ad.doc_percent|floatformat:1}}{{ad.page_percent|floatformat:1}}
    Totals{{noad_totals.ad_group_count}}{{noad_totals.doc_count}}{{noad_totals.page_count}}
    + +

    Area Director Load: Documents directly assigned to AD

    + + + + + + + + + {# (divider) #} + + + + + + {% for ad in ad_summary %} + + + + + + + + + + {% endfor %} + + + + + + + + + + + + +
    ADArea NameWGs for ADI-DsPages% I-Ds% Pages
    {{ad.ad}}{{ad.area}}{{ad.ad_group_count}}{{ad.doc_count}}{{ad.page_count}}{{ad.doc_percent|floatformat:1}}{{ad.page_percent|floatformat:1}}
    Totals{{ad_totals.ad_group_count}}{{ad_totals.doc_count}}{{ad_totals.page_count}}
    + +

    Working Group Summary

    + + + + + + + + + + + + + + {% for wg in wg_summary %} + + + + + + + + + + {% endfor %} + +
    WGAreaADI-DsPagesRFCsRFCs in last 2 years
    {{wg.wg}}{{wg.area}}{{wg.ad}}{{wg.doc_count}}{{wg.page_count}}{{wg.rfc_count}}{{wg.recent_rfc_count}}
    +{% endblock %} +{% block js %} + +{% endblock %} \ No newline at end of file From acffceba0b8f61d6a5c972080df41f9e86743919 Mon Sep 17 00:00:00 2001 From: Phil Whipps Date: Thu, 18 Sep 2025 04:46:20 +1000 Subject: [PATCH 204/405] fix: Rev Fix Option 2 - Htmlized url regex (#9538) * Update Rev Regex in settings.py Removing single value revision numbers as that is against the naming standard (https://authors.ietf.org/naming-your-internet-draft#version) and causes issues with htmlized documents with -1 in the name (eg draft-ietf-oauth-v2-1) * Reverse REGEX Change * Update URLS REgex for REV Directly insert Regex for REV rather than reference settings.URL_REGEXPS. This is to resolve issue https://github.com/ietf-tools/datatracker/issues/9533 --- ietf/doc/urls.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ietf/doc/urls.py b/ietf/doc/urls.py index 60255af856..6f1b698a9f 100644 --- a/ietf/doc/urls.py +++ b/ietf/doc/urls.py @@ -75,7 +75,7 @@ # This block should really all be at the idealized docs.ietf.org service url(r'^html/(?Pbcp[0-9]+?)(\.txt|\.html)?/?$', RedirectView.as_view(url=settings.RFC_EDITOR_INFO_BASE_URL+"%(name)s", permanent=False)), url(r'^html/(?Pstd[0-9]+?)(\.txt|\.html)?/?$', RedirectView.as_view(url=settings.RFC_EDITOR_INFO_BASE_URL+"%(name)s", permanent=False)), - url(r'^html/%(name)s(?:-%(rev)s)?(\.txt|\.html)?/?$' % settings.URL_REGEXPS, views_doc.document_html), + url(r'^html/%(name)s(?:-(?P[0-9]{2}(-[0-9]{2})?))?(\.txt|\.html)?/?$' % settings.URL_REGEXPS, views_doc.document_html), url(r'^id/%(name)s(?:-%(rev)s)?(?:\.(?P(txt|html|xml)))?/?$' % settings.URL_REGEXPS, views_doc.document_raw_id), url(r'^pdf/%(name)s(?:-%(rev)s)?(?:\.(?P[a-z]+))?/?$' % settings.URL_REGEXPS, views_doc.document_pdfized), From 6b58aa4bd47fd5fe84750a0cc66dd38b8e801c72 Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Thu, 18 Sep 2025 10:20:03 -0500 Subject: [PATCH 205/405] fix: edit only attachments actually attached to this liaison statement (#9548) * fix: edit only attachments actually attached to this liaison statement * chore: remove unused import --------- Co-authored-by: Jennifer Richards --- ietf/liaisons/tests.py | 29 +++++++++++++++++++++++------ ietf/liaisons/views.py | 11 +++++++---- 2 files changed, 30 insertions(+), 10 deletions(-) diff --git a/ietf/liaisons/tests.py b/ietf/liaisons/tests.py index a1fbf77841..2f86f38789 100644 --- a/ietf/liaisons/tests.py +++ b/ietf/liaisons/tests.py @@ -939,17 +939,34 @@ def test_liaison_add_attachment(self): ) def test_liaison_edit_attachment(self): - - attachment = LiaisonStatementAttachmentFactory(document__name='liaiatt-1') - url = urlreverse('ietf.liaisons.views.liaison_edit_attachment', kwargs=dict(object_id=attachment.statement_id,doc_id=attachment.document_id)) + attachment = LiaisonStatementAttachmentFactory(document__name="liaiatt-1") + url = urlreverse( + "ietf.liaisons.views.liaison_edit_attachment", + kwargs=dict( + object_id=attachment.statement_id, doc_id=attachment.document_id + ), + ) login_testing_unauthorized(self, "secretary", url) r = self.client.get(url) self.assertEqual(r.status_code, 200) - post_data = dict(title='New Title') - r = self.client.post(url,post_data) + post_data = dict(title="New Title") + r = self.client.post(url, post_data) attachment = LiaisonStatementAttachment.objects.get(pk=attachment.pk) self.assertEqual(r.status_code, 302) - self.assertEqual(attachment.document.title,'New Title') + self.assertEqual(attachment.document.title, "New Title") + + # ensure attempts to edit attachments not attached to this liaison statement fail + other_attachment = LiaisonStatementAttachmentFactory(document__name="liaiatt-2") + url = urlreverse( + "ietf.liaisons.views.liaison_edit_attachment", + kwargs=dict( + object_id=attachment.statement_id, doc_id=other_attachment.document_id + ), + ) + r = self.client.get(url) + self.assertEqual(r.status_code, 404) + r = self.client.post(url, dict(title="New Title")) + self.assertEqual(r.status_code, 404) def test_liaison_delete_attachment(self): attachment = LiaisonStatementAttachmentFactory(document__name='liaiatt-1') diff --git a/ietf/liaisons/views.py b/ietf/liaisons/views.py index 9710149c90..f9136a8d14 100644 --- a/ietf/liaisons/views.py +++ b/ietf/liaisons/views.py @@ -7,15 +7,14 @@ from django.contrib import messages from django.urls import reverse as urlreverse -from django.core.exceptions import ValidationError +from django.core.exceptions import ValidationError, ObjectDoesNotExist from django.core.validators import validate_email from django.db.models import Q, Prefetch -from django.http import HttpResponse +from django.http import Http404, HttpResponse from django.shortcuts import render, get_object_or_404, redirect import debug # pyflakes:ignore -from ietf.doc.models import Document from ietf.ietfauth.utils import role_required, has_role from ietf.group.models import Group, Role from ietf.liaisons.models import (LiaisonStatement,LiaisonStatementEvent, @@ -444,7 +443,11 @@ def liaison_edit(request, object_id): def liaison_edit_attachment(request, object_id, doc_id): '''Edit the Liaison Statement attachment title''' liaison = get_object_or_404(LiaisonStatement, pk=object_id) - doc = get_object_or_404(Document, pk=doc_id) + try: + doc = liaison.attachments.get(pk=doc_id) + except ObjectDoesNotExist: + raise Http404 + if not can_edit_liaison(request.user, liaison): permission_denied(request, "You are not authorized for this action.") From 76f56ceabf4a101c7a8f72946778b7bb5b63f570 Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Thu, 18 Sep 2025 10:20:30 -0500 Subject: [PATCH 206/405] fix: adjust anachronystic urls - doc_ids became numeric years ago. (#9549) --- ietf/liaisons/urls.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ietf/liaisons/urls.py b/ietf/liaisons/urls.py index a4afbfef5d..0fbd29425e 100644 --- a/ietf/liaisons/urls.py +++ b/ietf/liaisons/urls.py @@ -26,8 +26,8 @@ url(r'^(?P\d+)/$', views.liaison_detail), url(r'^(?P\d+)/addcomment/$', views.add_comment), url(r'^(?P\d+)/edit/$', views.liaison_edit), - url(r'^(?P\d+)/edit-attachment/(?P[A-Za-z0-9._+-]+)$', views.liaison_edit_attachment), - url(r'^(?P\d+)/delete-attachment/(?P[A-Za-z0-9._+-]+)$', views.liaison_delete_attachment), + url(r'^(?P\d+)/edit-attachment/(?P[0-9]+)$', views.liaison_edit_attachment), + url(r'^(?P\d+)/delete-attachment/(?P[0-9]+)$', views.liaison_delete_attachment), url(r'^(?P\d+)/history/$', views.liaison_history), url(r'^(?P\d+)/reply/$', views.liaison_reply), url(r'^(?P\d+)/resend/$', views.liaison_resend), From ad5823e0c6ebaa88ae6c949e1bdefeab951cb280 Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Thu, 18 Sep 2025 10:22:31 -0500 Subject: [PATCH 207/405] fix: properly guard state transitions (#9554) Co-authored-by: Jennifer Richards --- ietf/liaisons/tests.py | 3 +++ ietf/liaisons/views.py | 32 +++++++++++++++++++------------- 2 files changed, 22 insertions(+), 13 deletions(-) diff --git a/ietf/liaisons/tests.py b/ietf/liaisons/tests.py index 2f86f38789..c3ff9dbe94 100644 --- a/ietf/liaisons/tests.py +++ b/ietf/liaisons/tests.py @@ -363,6 +363,9 @@ def test_approval_process(self): self.assertEqual(len(q('form button[name=approved]')), 0) # check the detail page / authorized + r = self.client.post(url, dict(dead="1")) + self.assertEqual(r.status_code, 403) + mailbox_before = len(outbox) self.client.login(username="ulm-liaiman", password="ulm-liaiman+password") r = self.client.get(url) self.assertEqual(r.status_code, 200) diff --git a/ietf/liaisons/views.py b/ietf/liaisons/views.py index f9136a8d14..6a1e6e3def 100644 --- a/ietf/liaisons/views.py +++ b/ietf/liaisons/views.py @@ -7,7 +7,7 @@ from django.contrib import messages from django.urls import reverse as urlreverse -from django.core.exceptions import ValidationError, ObjectDoesNotExist +from django.core.exceptions import ValidationError, ObjectDoesNotExist, PermissionDenied from django.core.validators import validate_email from django.db.models import Q, Prefetch from django.http import Http404, HttpResponse @@ -404,22 +404,28 @@ def liaison_detail(request, object_id): if request.method == 'POST': - if request.POST.get('approved'): - liaison.change_state(state_id='approved',person=person) - liaison.change_state(state_id='posted',person=person) - send_liaison_by_email(request, liaison) - messages.success(request,'Liaison Statement Approved and Posted') - elif request.POST.get('dead'): - liaison.change_state(state_id='dead',person=person) - messages.success(request,'Liaison Statement Killed') - elif request.POST.get('resurrect'): - liaison.change_state(state_id='pending',person=person) - messages.success(request,'Liaison Statement Resurrected') - elif request.POST.get('do_action_taken') and can_take_care: + if request.POST.get('do_action_taken') and can_take_care: liaison.tags.remove('required') liaison.tags.add('taken') can_take_care = False messages.success(request,'Action handled') + else: + if can_edit: + if request.POST.get('approved'): + liaison.change_state(state_id='approved',person=person) + liaison.change_state(state_id='posted',person=person) + send_liaison_by_email(request, liaison) + messages.success(request,'Liaison Statement Approved and Posted') + elif request.POST.get('dead'): + liaison.change_state(state_id='dead',person=person) + messages.success(request,'Liaison Statement Killed') + elif request.POST.get('resurrect'): + liaison.change_state(state_id='pending',person=person) + messages.success(request,'Liaison Statement Resurrected') + else: + pass + else: + raise PermissionDenied() relations_by = [i.target for i in liaison.source_of_set.filter(target__state__slug='posted')] relations_to = [i.source for i in liaison.target_of_set.filter(source__state__slug='posted')] From e1c75d46161939acaf093bb50cf91af9a2cbb7ea Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Thu, 18 Sep 2025 10:32:26 -0500 Subject: [PATCH 208/405] fix: disable removing liaison attachments pending reimplementation (#9555) --- ietf/liaisons/tests.py | 16 ++++++++-------- ietf/liaisons/views.py | 39 ++++++++++++++++++++++----------------- 2 files changed, 30 insertions(+), 25 deletions(-) diff --git a/ietf/liaisons/tests.py b/ietf/liaisons/tests.py index c3ff9dbe94..fd1c22be77 100644 --- a/ietf/liaisons/tests.py +++ b/ietf/liaisons/tests.py @@ -971,14 +971,14 @@ def test_liaison_edit_attachment(self): r = self.client.post(url, dict(title="New Title")) self.assertEqual(r.status_code, 404) - def test_liaison_delete_attachment(self): - attachment = LiaisonStatementAttachmentFactory(document__name='liaiatt-1') - liaison = attachment.statement - url = urlreverse('ietf.liaisons.views.liaison_delete_attachment', kwargs=dict(object_id=liaison.pk,attach_id=attachment.pk)) - login_testing_unauthorized(self, "secretary", url) - r = self.client.get(url) - self.assertEqual(r.status_code, 302) - self.assertEqual(liaison.liaisonstatementattachment_set.filter(removed=False).count(),0) + # def test_liaison_delete_attachment(self): + # attachment = LiaisonStatementAttachmentFactory(document__name='liaiatt-1') + # liaison = attachment.statement + # url = urlreverse('ietf.liaisons.views.liaison_delete_attachment', kwargs=dict(object_id=liaison.pk,attach_id=attachment.pk)) + # login_testing_unauthorized(self, "secretary", url) + # r = self.client.get(url) + # self.assertEqual(r.status_code, 302) + # self.assertEqual(liaison.liaisonstatementattachment_set.filter(removed=False).count(),0) def test_in_response(self): '''A statement with purpose=in_response must have related statement specified''' diff --git a/ietf/liaisons/views.py b/ietf/liaisons/views.py index 6a1e6e3def..6a6f579714 100644 --- a/ietf/liaisons/views.py +++ b/ietf/liaisons/views.py @@ -17,8 +17,7 @@ from ietf.ietfauth.utils import role_required, has_role from ietf.group.models import Group, Role -from ietf.liaisons.models import (LiaisonStatement,LiaisonStatementEvent, - LiaisonStatementAttachment) +from ietf.liaisons.models import LiaisonStatement,LiaisonStatementEvent from ietf.liaisons.utils import (get_person_for_user, can_add_outgoing_liaison, can_add_incoming_liaison, can_edit_liaison,can_submit_liaison_required, can_add_liaison) @@ -377,23 +376,29 @@ def liaison_history(request, object_id): def liaison_delete_attachment(request, object_id, attach_id): liaison = get_object_or_404(LiaisonStatement, pk=object_id) - attach = get_object_or_404(LiaisonStatementAttachment, pk=attach_id) + if not can_edit_liaison(request.user, liaison): permission_denied(request, "You are not authorized for this action.") - - # FIXME: this view should use POST instead of GET when deleting - attach.removed = True - attach.save() - - # create event - LiaisonStatementEvent.objects.create( - type_id='modified', - by=get_person_for_user(request.user), - statement=liaison, - desc='Attachment Removed: {}'.format(attach.document.title) - ) - messages.success(request, 'Attachment Deleted') - return redirect('ietf.liaisons.views.liaison_detail', object_id=liaison.pk) + else: + permission_denied(request, "This operation is temporarily unavailable. Ask the secretariat to mark the attachment as removed using the admin.") + + # The following will be replaced with a different approach in the next generation of the liaison tool + # attach = get_object_or_404(LiaisonStatementAttachment, pk=attach_id) + + # # FIXME: this view should use POST instead of GET when deleting + # attach.removed = True + # debug.say("Got here") + # attach.save() + + # # create event + # LiaisonStatementEvent.objects.create( + # type_id='modified', + # by=get_person_for_user(request.user), + # statement=liaison, + # desc='Attachment Removed: {}'.format(attach.document.title) + # ) + # messages.success(request, 'Attachment Deleted') + # return redirect('ietf.liaisons.views.liaison_detail', object_id=liaison.pk) def liaison_detail(request, object_id): liaison = get_object_or_404(LiaisonStatement, pk=object_id) From 87e550c74ffef0f5b64b78a6a487321ebe923f11 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Fri, 19 Sep 2025 13:55:14 -0300 Subject: [PATCH 209/405] refactor: compare tokens using compare_digest (#9562) * refactor: compare tokens using compare_digest * test: test new helper * refactor: const-time for auth_token check also --- ietf/submit/tests.py | 31 +++++++++++++++++++++++++++- ietf/submit/views.py | 49 +++++++++++++++++++++++++++++++++++--------- 2 files changed, 69 insertions(+), 11 deletions(-) diff --git a/ietf/submit/tests.py b/ietf/submit/tests.py index 6b9002502b..ede63d2752 100644 --- a/ietf/submit/tests.py +++ b/ietf/submit/tests.py @@ -51,8 +51,9 @@ process_submission_xml, process_uploaded_submission, process_and_validate_submission, apply_yang_checker_to_draft, run_all_yang_model_checks) +from ietf.submit.views import access_token_is_valid, auth_token_is_valid from ietf.utils import tool_version -from ietf.utils.accesstoken import generate_access_token +from ietf.utils.accesstoken import generate_access_token, generate_random_key from ietf.utils.mail import outbox, get_payload_text from ietf.utils.test_runner import TestBlobstoreManager from ietf.utils.test_utils import login_testing_unauthorized, TestCase @@ -3500,3 +3501,31 @@ def test_submissionerror(self, mock_sanitize_message): mock_sanitize_message.call_args_list, [mock.call("hi"), mock.call("there")], ) + + +class HelperTests(TestCase): + def test_access_token_is_valid(self): + submission: Submission = SubmissionFactory() # type: ignore + valid_token = submission.access_token() + access_key = submission.access_key # accept this for backwards compat + invalid_token = "not the valid token" + self.assertTrue(access_token_is_valid(submission, valid_token)) + self.assertTrue(access_token_is_valid(submission, access_key)) + self.assertFalse(access_token_is_valid(submission, invalid_token)) + + def test_auth_token_is_valid(self): + auth_key = generate_random_key() + submission: Submission = SubmissionFactory(auth_key = auth_key) # type: ignore + valid_token = generate_access_token(submission.auth_key) + auth_key = submission.auth_key # accept this for backwards compat + invalid_token = "not the valid token" + self.assertTrue(auth_token_is_valid(submission, valid_token)) + self.assertTrue(auth_token_is_valid(submission, auth_key)) + self.assertFalse(auth_token_is_valid(submission, invalid_token)) + + submission.auth_key = "" + submission.save() + self.assertFalse(auth_token_is_valid(submission, valid_token)) + self.assertFalse(auth_token_is_valid(submission, auth_key)) + self.assertFalse(auth_token_is_valid(submission, invalid_token)) + self.assertFalse(auth_token_is_valid(submission, "")) diff --git a/ietf/submit/views.py b/ietf/submit/views.py index 043b613016..8329a312bb 100644 --- a/ietf/submit/views.py +++ b/ietf/submit/views.py @@ -2,6 +2,7 @@ # -*- coding: utf-8 -*- import re import datetime +from secrets import compare_digest from typing import Optional, cast # pyflakes:ignore from urllib.parse import urljoin @@ -255,19 +256,48 @@ def search_submission(request): ) -def can_edit_submission(user, submission, access_token): - key_matched = access_token and submission.access_token() == access_token - if not key_matched: key_matched = submission.access_key == access_token # backwards-compat - return key_matched or has_role(user, "Secretariat") +def access_token_is_valid(submission: Submission, access_token: str): + """Check whether access_token is valid for submission, in constant time""" + token_matched = compare_digest(submission.access_token(), access_token) + # also compare key directly for backwards compatibility + key_matched = compare_digest(submission.access_key, access_token) + return token_matched or key_matched + + +def auth_token_is_valid(submission: Submission, auth_token: str): + """Check whether auth_token is valid for submission, in constant time""" + auth_key = submission.auth_key + if not auth_key: + # Make the same calls as the other branch to keep constant time, then + # return False because there is no auth key + compare_digest(generate_access_token("fake"), auth_token) + compare_digest("fake", auth_token) + return False + else: + token_matched = compare_digest(generate_access_token(auth_key), auth_token) + # also compare key directly for backwards compatibility + key_matched = compare_digest(auth_key, auth_token) + return token_matched or key_matched + + +def can_edit_submission(user, submission: Submission, access_token: str | None): + if has_role(user, "Secretariat"): + return True + elif not access_token: + return False + return access_token_is_valid(submission, access_token) + def submission_status(request, submission_id, access_token=None): # type: (HttpRequest, str, Optional[str]) -> HttpResponse submission = get_object_or_404(Submission, pk=submission_id) - key_matched = access_token and submission.access_token() == access_token - if not key_matched: key_matched = submission.access_key == access_token # backwards-compat - if access_token and not key_matched: - raise Http404 + if access_token: + key_matched = access_token_is_valid(submission, access_token) + if not key_matched: + raise Http404 + else: + key_matched = False if submission.state.slug == "cancel": errors = {} @@ -621,8 +651,7 @@ def edit_submission(request, submission_id, access_token=None): def confirm_submission(request, submission_id, auth_token): submission = get_object_or_404(Submission, pk=submission_id) - key_matched = submission.auth_key and auth_token == generate_access_token(submission.auth_key) - if not key_matched: key_matched = auth_token == submission.auth_key # backwards-compat + key_matched = submission.auth_key and auth_token_is_valid(submission, auth_token) if request.method == 'POST' and submission.state_id in ("auth", "aut-appr") and key_matched: # Set a temporary state 'confirmed' to avoid entering this code From 4be83ce312dde9b434f86cff928daf5882809239 Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Fri, 19 Sep 2025 11:58:49 -0500 Subject: [PATCH 210/405] fix: remove deprecated iesg docs view, link from ad dashboard to ad view of working groups (#9563) * fix: remove deprecated iesg docs view * fix: link from ad dashboard to ad view of working groups --- ietf/doc/tests.py | 11 --- ietf/doc/urls.py | 4 +- ietf/doc/views_search.py | 27 +----- ietf/templates/doc/ad_list.html | 5 +- .../templates/doc/drafts_in_iesg_process.html | 83 ------------------- 5 files changed, 7 insertions(+), 123 deletions(-) delete mode 100644 ietf/templates/doc/drafts_in_iesg_process.html diff --git a/ietf/doc/tests.py b/ietf/doc/tests.py index fa8c7fa4fc..16dcfb7754 100644 --- a/ietf/doc/tests.py +++ b/ietf/doc/tests.py @@ -449,17 +449,6 @@ def test_drafts_in_last_call(self): self.assertContains(r, draft.title) self.assertContains(r, escape(draft.action_holders.first().name)) - def test_in_iesg_process(self): - doc_in_process = IndividualDraftFactory() - doc_in_process.action_holders.set([PersonFactory()]) - doc_in_process.set_state(State.objects.get(type='draft-iesg', slug='lc')) - doc_not_in_process = IndividualDraftFactory() - r = self.client.get(urlreverse('ietf.doc.views_search.drafts_in_iesg_process')) - self.assertEqual(r.status_code, 200) - self.assertContains(r, doc_in_process.title) - self.assertContains(r, escape(doc_in_process.action_holders.first().name)) - self.assertNotContains(r, doc_not_in_process.title) - def test_indexes(self): draft = IndividualDraftFactory() rfc = WgRfcFactory() diff --git a/ietf/doc/urls.py b/ietf/doc/urls.py index 6f1b698a9f..7b444782d7 100644 --- a/ietf/doc/urls.py +++ b/ietf/doc/urls.py @@ -53,13 +53,13 @@ url(r'^ad/?$', views_search.ad_workload), url(r'^ad/(?P[^/]+)/?$', views_search.docs_for_ad), url(r'^ad2/(?P[\w.-]+)/$', RedirectView.as_view(url='/doc/ad/%(name)s/', permanent=True)), - url(r'^for_iesg/?$', views_search.docs_for_iesg), + url(r'^for_iesg/?$', RedirectView.as_view(pattern_name='ietf.doc.views_search.docs_for_iesg', permanent=False)), url(r'^rfc-status-changes/?$', views_status_change.rfc_status_changes), url(r'^start-rfc-status-change/(?:%(name)s/)?$' % settings.URL_REGEXPS, views_status_change.start_rfc_status_change), url(r'^bof-requests/?$', views_bofreq.bof_requests), url(r'^bof-requests/new/$', views_bofreq.new_bof_request), url(r'^statement/new/$', views_statement.new_statement), - url(r'^iesg/?$', views_search.drafts_in_iesg_process), + url(r'^iesg/?$', views_search.docs_for_iesg), url(r'^email-aliases/?$', views_doc.email_aliases), url(r'^downref/?$', views_downref.downref_registry), url(r'^downref/add/?$', views_downref.downref_registry_add), diff --git a/ietf/doc/views_search.py b/ietf/doc/views_search.py index 67ff0c2f21..2144c23e06 100644 --- a/ietf/doc/views_search.py +++ b/ietf/doc/views_search.py @@ -59,7 +59,7 @@ import debug # pyflakes:ignore from ietf.doc.models import ( Document, DocHistory, State, - LastCallDocEvent, NewRevisionDocEvent, IESG_SUBSTATE_TAGS, + NewRevisionDocEvent, IESG_SUBSTATE_TAGS, IESG_BALLOT_ACTIVE_STATES, IESG_STATCHG_CONFLREV_ACTIVE_STATES, IESG_CHARTER_ACTIVE_STATES ) from ietf.doc.fields import select2_id_doc_name_json @@ -849,31 +849,6 @@ def drafts_in_last_call(request): 'form':form, 'docs':results, 'meta':meta, 'pages':pages }) -def drafts_in_iesg_process(request): - states = State.objects.filter(type="draft-iesg").exclude(slug__in=('idexists', 'pub', 'dead', 'rfcqueue')) - title = "Documents in IESG process" - - grouped_docs = [] - - for s in states.order_by("order"): - docs = Document.objects.filter(type="draft", states=s).distinct().order_by("time").select_related("ad", "group", "group__parent") - if docs: - if s.slug == "lc": - for d in docs: - e = d.latest_event(LastCallDocEvent, type="sent_last_call") - # If we don't have an event, use an arbitrary date in the past (but not datetime.datetime.min, - # which causes problems with timezone conversions) - d.lc_expires = e.expires if e else datetime.datetime(1950, 1, 1) - docs = list(docs) - docs.sort(key=lambda d: d.lc_expires) - - grouped_docs.append((s, docs)) - - return render(request, 'doc/drafts_in_iesg_process.html', { - "grouped_docs": grouped_docs, - "title": title, - }) - def recent_drafts(request, days=7): slowcache = caches['slowpages'] cache_key = f'recentdraftsview{days}' diff --git a/ietf/templates/doc/ad_list.html b/ietf/templates/doc/ad_list.html index 7f7e95a873..cac709021e 100644 --- a/ietf/templates/doc/ad_list.html +++ b/ietf/templates/doc/ad_list.html @@ -33,7 +33,10 @@

    IESG Dashboard

    are only shown to logged-in Area Directors. {% endif %} -

    Documents in IESG Processing

    +

    + Documents in IESG Processing + IESG view of Working Groups +

    {% for dt in metadata %}

    {{ dt.type.1 }} State Counts

    diff --git a/ietf/templates/doc/drafts_in_iesg_process.html b/ietf/templates/doc/drafts_in_iesg_process.html deleted file mode 100644 index d9b09e984e..0000000000 --- a/ietf/templates/doc/drafts_in_iesg_process.html +++ /dev/null @@ -1,83 +0,0 @@ -{% extends "base.html" %} -{# Copyright The IETF Trust 2015, All Rights Reserved #} -{% load origin %} -{% load ietf_filters static %} -{% load textfilters person_filters %} -{% block pagehead %} - -{% endblock %} -{% block title %}{{ title }}{% endblock %} -{% block content %} - {% origin %} -

    {{ title }}

    -

    This view is deprecated, and will soon redirect to a different representation

    - - - - - - - - - - - {% for state, docs in grouped_docs %} - - - - - - - {% for doc in docs %} - - - - - - - - {% endfor %} - - {% endfor %} -
    Area - {% if state.slug == "lc" %} - Expires at - {% else %} - Date - {% endif %} - DocumentIntended levelAD
    {{ state.name }}
    - {% if doc.area_acronym %} - {{ doc.area_acronym }} - {% endif %} - - {% if state.slug == "lc" %} - {% if doc.lc_expires %}{{ doc.lc_expires|date:"Y-m-d" }}{% endif %} - {% else %} - {{ doc.time|date:"Y-m-d" }} - {% endif %} - - {{ doc.name }} -
    - {{ doc.title }} - {% if doc.action_holders_enabled and doc.action_holders.exists %} -
    - Action holder{{ doc.documentactionholder_set.all|pluralize }}: - {% for action_holder in doc.documentactionholder_set.all %} - {% person_link action_holder.person title=action_holder.role_for_doc %}{% if action_holder|action_holder_badge %} {{ action_holder|action_holder_badge }}{% endif %}{% if not forloop.last %},{% endif %} - {% endfor %} - {% endif %} - {% if doc.note %} -
    - Note: {{ doc.note|urlize_ietf_docs|linkify|linebreaksbr }} - {% endif %} -
    - {% if doc.intended_std_level %} - {{ doc.intended_std_level.name }} - {% else %} - (None) - {% endif %} - {% person_link doc.ad %}
    -{% endblock %} -{% block js %} - -{% endblock %} \ No newline at end of file From 5e1f46d05cc23faa95b741f9133f40fe58c1cd46 Mon Sep 17 00:00:00 2001 From: Eric Vyncke Date: Mon, 29 Sep 2025 15:47:23 +0200 Subject: [PATCH 211/405] feat: Distinguish I-Ds on WG plate from I-Ds on IESG plate (#9214) * Add "Outside of the WG Internet-Draft" when IESG state != idexists * No plural forms in the dividers * Use different search_heading * Use the right stream_id * Adding tests_info coverage for prepare_group_documents * fix: move identifying and sorting doxs with IESG into search utility. * fix: improve ordering conditional --------- Co-authored-by: Robert Sparks --- ietf/doc/utils_search.py | 9 ++++++++- ietf/group/tests_info.py | 22 +++++++++++++++++++++- ietf/group/views.py | 1 - 3 files changed, 29 insertions(+), 3 deletions(-) diff --git a/ietf/doc/utils_search.py b/ietf/doc/utils_search.py index cfc8a872f8..a5f461f9bb 100644 --- a/ietf/doc/utils_search.py +++ b/ietf/doc/utils_search.py @@ -108,7 +108,10 @@ def fill_in_document_table_attributes(docs, have_telechat_date=False): d.search_heading = "Withdrawn Internet-Draft" d.expirable = False else: - d.search_heading = "%s Internet-Draft" % d.get_state() + if d.type_id == "draft" and d.stream_id == 'ietf' and d.get_state_slug('draft-iesg') != 'idexists': # values can be: ad-eval idexists approved rfcqueue dead iesg-eva + d.search_heading = "%s with the IESG Internet-Draft" % d.get_state() + else: + d.search_heading = "%s Internet-Draft" % d.get_state() if state_slug == "active": d.expirable = d.pk in expirable_pks else: @@ -221,6 +224,10 @@ def num(i): if d.type_id == "draft": res.append(num(["Active", "Expired", "Replaced", "Withdrawn", "RFC"].index(d.search_heading.split()[0]))) + if "with the IESG" in d.search_heading: + res.append("1") + else: + res.append("0") else: res.append(d.type_id); res.append("-"); diff --git a/ietf/group/tests_info.py b/ietf/group/tests_info.py index eb85860ece..34f8500854 100644 --- a/ietf/group/tests_info.py +++ b/ietf/group/tests_info.py @@ -27,7 +27,7 @@ from ietf.community.models import CommunityList from ietf.community.utils import reset_name_contains_index_for_rule -from ietf.doc.factories import WgDraftFactory, IndividualDraftFactory, CharterFactory, BallotDocEventFactory +from ietf.doc.factories import WgDraftFactory, RgDraftFactory, IndividualDraftFactory, CharterFactory, BallotDocEventFactory from ietf.doc.models import Document, DocEvent, State from ietf.doc.storage_utils import retrieve_str from ietf.doc.utils_charter import charter_name_for_group @@ -413,6 +413,7 @@ def test_group_documents(self): self.assertContains(r, draft3.name) for ah in draft3.action_holders.all(): self.assertContains(r, escape(ah.name)) + self.assertContains(r, "Active with the IESG Internet-Draft") # draft3 is pub-req hence should have such a divider self.assertContains(r, 'for 173 days', count=1) # the old_dah should be tagged self.assertContains(r, draft4.name) self.assertNotContains(r, draft5.name) @@ -425,6 +426,25 @@ def test_group_documents(self): q = PyQuery(r.content) self.assertTrue(any([draft2.name in x.attrib['href'] for x in q('table td a.track-untrack-doc')])) + # Let's also check the IRTF stream + rg = GroupFactory(type_id='rg') + setup_default_community_list_for_group(rg) + rgDraft = RgDraftFactory(group=rg) + rgDraft4 = RgDraftFactory(group=rg) + rgDraft4.set_state(State.objects.get(slug='irsg-w')) + rgDraft7 = RgDraftFactory(group=rg) + rgDraft7.set_state(State.objects.get(type='draft-stream-%s' % rgDraft7.stream_id, slug='dead')) + for url in group_urlreverse_list(rg, 'ietf.group.views.group_documents'): + with self.settings(DOC_ACTION_HOLDER_MAX_AGE_DAYS=20): + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + self.assertContains(r, rgDraft.name) + self.assertContains(r, rg.name) + self.assertContains(r, rg.acronym) + self.assertNotContains(r, draft3.name) # As draft3 is a WG draft, it should not be listed here + self.assertContains(r, rgDraft4.name) + self.assertNotContains(r, rgDraft7.name) + # test the txt version too while we're at it for url in group_urlreverse_list(group, 'ietf.group.views.group_documents_txt'): r = self.client.get(url) diff --git a/ietf/group/views.py b/ietf/group/views.py index bc785ff81e..efe3eca15d 100644 --- a/ietf/group/views.py +++ b/ietf/group/views.py @@ -443,7 +443,6 @@ def prepare_group_documents(request, group, clist): return docs, meta, docs_related, meta_related - def get_leadership(group_type): people = Person.objects.filter( role__name__slug="chair", From ba8b73190df413c39deaa6b546ad2bc5405fd86c Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Tue, 30 Sep 2025 13:40:33 -0300 Subject: [PATCH 212/405] ci: DB persistence for blobdb, too --- k8s/settings_local.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/k8s/settings_local.py b/k8s/settings_local.py index c1436e158b..c09bd70c86 100644 --- a/k8s/settings_local.py +++ b/k8s/settings_local.py @@ -114,15 +114,17 @@ def _multiline_to_list(s): # Configure persistent connections. A setting of 0 is Django's default. _conn_max_age = os.environ.get("DATATRACKER_DB_CONN_MAX_AGE", "0") -# A string "none" means unlimited age. -DATABASES["default"]["CONN_MAX_AGE"] = ( - None if _conn_max_age.lower() == "none" else int(_conn_max_age) -) +for dbname in ["default", "blobdb"]: + # A string "none" means unlimited age. + DATABASES[dbname]["CONN_MAX_AGE"] = ( + None if _conn_max_age.lower() == "none" else int(_conn_max_age) + ) # Enable connection health checks if DATATRACKER_DB_CONN_HEALTH_CHECK is the string "true" _conn_health_checks = bool( os.environ.get("DATATRACKER_DB_CONN_HEALTH_CHECKS", "false").lower() == "true" ) -DATABASES["default"]["CONN_HEALTH_CHECKS"] = _conn_health_checks +for dbname in ["default", "blobdb"]: + DATABASES[dbname]["CONN_HEALTH_CHECKS"] = _conn_health_checks # DATATRACKER_ADMINS is a newline-delimited list of addresses parseable by email.utils.parseaddr _admins_str = os.environ.get("DATATRACKER_ADMINS", None) From d1cbdcb2afca5987706165a1928fece3da25a5ee Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Tue, 7 Oct 2025 14:54:08 -0300 Subject: [PATCH 213/405] chore: fix docker-compose comment (#9679) Allows the commented-out options to work if uncommented. --- docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index 100119c464..8c6e0ea486 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -36,7 +36,7 @@ services: db: image: ghcr.io/ietf-tools/datatracker-db:latest # build: - # context: .. + # context: . # dockerfile: docker/db.Dockerfile restart: unless-stopped volumes: From a8e8b9e95bedececcda0f54bedbe4de8f69d90a2 Mon Sep 17 00:00:00 2001 From: Absit Iniuria Date: Tue, 7 Oct 2025 19:02:37 +0100 Subject: [PATCH 214/405] feat: split liaison_statement_posted mailtrigger into outgoing and incoming (#9553) * fix: add new fixtures and mt slugs * fix: edit mt reverse func * chore: edit multiline and hash comments * fix: adjust migration * chore: remove stray whitespace --------- Co-authored-by: Robert Sparks --- ietf/liaisons/mails.py | 5 +- ietf/liaisons/tests.py | 39 +++++----- ietf/liaisons/views.py | 76 ++----------------- ..._statement_incoming_and_outgoing_posted.py | 72 ++++++++++++++++++ ietf/mailtrigger/utils.py | 71 +++++++++++++++++ ietf/name/fixtures/names.json | 45 ++++++++++- 6 files changed, 217 insertions(+), 91 deletions(-) create mode 100644 ietf/mailtrigger/migrations/0008_liaison_statement_incoming_and_outgoing_posted.py diff --git a/ietf/liaisons/mails.py b/ietf/liaisons/mails.py index 8708c8a078..878aada576 100644 --- a/ietf/liaisons/mails.py +++ b/ietf/liaisons/mails.py @@ -14,7 +14,10 @@ def send_liaison_by_email(request, liaison): subject = 'New Liaison Statement, "%s"' % (liaison.title) from_email = settings.LIAISON_UNIVERSAL_FROM - (to_email, cc) = gather_address_lists('liaison_statement_posted',liaison=liaison) + if liaison.is_outgoing(): + (to_email, cc) = gather_address_lists('liaison_statement_posted_outgoing',liaison=liaison) + else: + (to_email, cc) = gather_address_lists('liaison_statement_posted_incoming',liaison=liaison) bcc = ['statements@ietf.org'] body = render_to_string('liaisons/liaison_mail.txt', dict(liaison=liaison)) diff --git a/ietf/liaisons/tests.py b/ietf/liaisons/tests.py index fd1c22be77..5478f6c302 100644 --- a/ietf/liaisons/tests.py +++ b/ietf/liaisons/tests.py @@ -112,61 +112,61 @@ def test_help_pages(self): class UnitTests(TestCase): - def test_get_cc(self): - from ietf.liaisons.views import get_cc,EMAIL_ALIASES + def test_get_contacts_for_liaison_messages_for_group_primary(self): + from ietf.mailtrigger.utils import get_contacts_for_liaison_messages_for_group_primary,EMAIL_ALIASES # test IETF - cc = get_cc(Group.objects.get(acronym='ietf')) + cc = get_contacts_for_liaison_messages_for_group_primary(Group.objects.get(acronym='ietf')) self.assertTrue(EMAIL_ALIASES['IESG'] in cc) self.assertTrue(EMAIL_ALIASES['IETFCHAIR'] in cc) # test IAB - cc = get_cc(Group.objects.get(acronym='iab')) + cc = get_contacts_for_liaison_messages_for_group_primary(Group.objects.get(acronym='iab')) self.assertTrue(EMAIL_ALIASES['IAB'] in cc) self.assertTrue(EMAIL_ALIASES['IABCHAIR'] in cc) # test an Area area = Group.objects.filter(type='area').first() - cc = get_cc(area) + cc = get_contacts_for_liaison_messages_for_group_primary(area) self.assertTrue(EMAIL_ALIASES['IETFCHAIR'] in cc) self.assertTrue(contacts_from_roles([area.ad_role()]) in cc) # test a Working Group wg = Group.objects.filter(type='wg').first() - cc = get_cc(wg) + cc = get_contacts_for_liaison_messages_for_group_primary(wg) self.assertTrue(contacts_from_roles([wg.parent.ad_role()]) in cc) self.assertTrue(contacts_from_roles([wg.get_chair()]) in cc) # test an SDO sdo = RoleFactory(name_id='liaiman',group__type_id='sdo',).group - cc = get_cc(sdo) + cc = get_contacts_for_liaison_messages_for_group_primary(sdo) self.assertTrue(contacts_from_roles([sdo.role_set.filter(name='liaiman').first()]) in cc) # test a cc_contact role cc_contact_role = RoleFactory(name_id='liaison_cc_contact', group=sdo) - cc = get_cc(sdo) + cc = get_contacts_for_liaison_messages_for_group_primary(sdo) self.assertIn(contact_email_from_role(cc_contact_role), cc) - def test_get_contacts_for_group(self): - from ietf.liaisons.views import get_contacts_for_group, EMAIL_ALIASES + def test_get_contacts_for_liaison_messages_for_group_secondary(self): + from ietf.mailtrigger.utils import get_contacts_for_liaison_messages_for_group_secondary,EMAIL_ALIASES - # test explicit + # test explicit group contacts sdo = GroupFactory(type_id='sdo') contact_email = RoleFactory(name_id='liaison_contact', group=sdo).email.address - contacts = get_contacts_for_group(sdo) + contacts = get_contacts_for_liaison_messages_for_group_secondary(sdo) self.assertIsNotNone(contact_email) self.assertIn(contact_email, contacts) # test area area = Group.objects.filter(type='area').first() - contacts = get_contacts_for_group(area) + contacts = get_contacts_for_liaison_messages_for_group_secondary(area) self.assertTrue(area.ad_role().email.address in contacts) # test wg wg = Group.objects.filter(type='wg').first() - contacts = get_contacts_for_group(wg) + contacts = get_contacts_for_liaison_messages_for_group_secondary(wg) self.assertTrue(wg.get_chair().email.address in contacts) # test ietf - contacts = get_contacts_for_group(Group.objects.get(acronym='ietf')) + contacts = get_contacts_for_liaison_messages_for_group_secondary(Group.objects.get(acronym='ietf')) self.assertTrue(EMAIL_ALIASES['IETFCHAIR'] in contacts) # test iab - contacts = get_contacts_for_group(Group.objects.get(acronym='iab')) + contacts = get_contacts_for_liaison_messages_for_group_secondary(Group.objects.get(acronym='iab')) self.assertTrue(EMAIL_ALIASES['IABCHAIR'] in contacts) # test iesg - contacts = get_contacts_for_group(Group.objects.get(acronym='iesg')) + contacts = get_contacts_for_liaison_messages_for_group_secondary(Group.objects.get(acronym='iesg')) self.assertTrue(EMAIL_ALIASES['IESG'] in contacts) def test_needs_approval(self): @@ -786,8 +786,11 @@ def test_add_incoming_liaison(self): self.assertTrue("Liaison Statement" in outbox[-1]["Subject"]) self.assertTrue('to_contacts@' in outbox[-1]['To']) + self.assertTrue(submitter.email_address(), outbox[-1]['To']) self.assertTrue('cc@' in outbox[-1]['Cc']) + + def test_add_outgoing_liaison(self): RoleFactory(name_id='liaiman',group__type_id='sdo', person__user__username='ulm-liaiman') wg = RoleFactory(name_id='chair',person__user__username='marschairman',group__acronym='mars').group @@ -867,6 +870,8 @@ def test_add_outgoing_liaison(self): self.assertEqual(len(outbox), mailbox_before + 1) self.assertTrue("Liaison Statement" in outbox[-1]["Subject"]) self.assertTrue('aread@' in outbox[-1]['To']) + self.assertTrue(submitter.email_address(), outbox[-1]['Cc']) + def test_add_outgoing_liaison_unapproved_post_only(self): RoleFactory(name_id='liaiman',group__type_id='sdo', person__user__username='ulm-liaiman') diff --git a/ietf/liaisons/views.py b/ietf/liaisons/views.py index 6a6f579714..f54a023357 100644 --- a/ietf/liaisons/views.py +++ b/ietf/liaisons/views.py @@ -27,14 +27,6 @@ from ietf.name.models import LiaisonStatementTagName from ietf.utils.response import permission_denied -EMAIL_ALIASES = { - "IETFCHAIR": "The IETF Chair ", - "IESG": "The IESG ", - "IAB": "The IAB ", - "IABCHAIR": "The IAB Chair ", -} - - # ------------------------------------------------- # Helper Functions # ------------------------------------------------- @@ -94,64 +86,6 @@ def contacts_from_roles(roles): emails = [ contact_email_from_role(r) for r in roles ] return ','.join(emails) -def get_cc(group): - '''Returns list of emails to use as CC for group. Simplified refactor of IETFHierarchy - get_cc() and get_from_cc() - ''' - emails = [] - - # role based CCs - if group.acronym in ('ietf','iesg'): - emails.append(EMAIL_ALIASES['IESG']) - emails.append(EMAIL_ALIASES['IETFCHAIR']) - elif group.acronym in ('iab'): - emails.append(EMAIL_ALIASES['IAB']) - emails.append(EMAIL_ALIASES['IABCHAIR']) - elif group.type_id == 'area': - emails.append(EMAIL_ALIASES['IETFCHAIR']) - ad_roles = group.role_set.filter(name='ad') - emails.extend([ contact_email_from_role(r) for r in ad_roles ]) - elif group.type_id == 'wg': - ad_roles = group.parent.role_set.filter(name='ad') - emails.extend([ contact_email_from_role(r) for r in ad_roles ]) - chair_roles = group.role_set.filter(name='chair') - emails.extend([ contact_email_from_role(r) for r in chair_roles ]) - if group.list_email: - emails.append('{} Discussion List <{}>'.format(group.name,group.list_email)) - elif group.type_id == 'sdo': - liaiman_roles = group.role_set.filter(name='liaiman') - emails.extend([ contact_email_from_role(r) for r in liaiman_roles ]) - - # explicit CCs - liaison_cc_roles = group.role_set.filter(name='liaison_cc_contact') - emails.extend([ contact_email_from_role(r) for r in liaison_cc_roles ]) - - return emails - -def get_contacts_for_group(group): - '''Returns default contacts for groups as a comma separated string''' - # use explicit default contacts if defined - explicit_contacts = contacts_from_roles(group.role_set.filter(name='liaison_contact')) - if explicit_contacts: - return explicit_contacts - - # otherwise construct based on group type - contacts = [] - if group.type_id == 'area': - roles = group.role_set.filter(name='ad') - contacts.append(contacts_from_roles(roles)) - elif group.type_id == 'wg': - roles = group.role_set.filter(name='chair') - contacts.append(contacts_from_roles(roles)) - elif group.acronym == 'ietf': - contacts.append(EMAIL_ALIASES['IETFCHAIR']) - elif group.acronym == 'iab': - contacts.append(EMAIL_ALIASES['IABCHAIR']) - elif group.acronym == 'iesg': - contacts.append(EMAIL_ALIASES['IESG']) - - return ','.join(contacts) - def get_details_tabs(stmt, selected): return [ t + (t[0].lower() == selected.lower(),) @@ -207,6 +141,8 @@ def post_only(group,person): # ------------------------------------------------- @can_submit_liaison_required def ajax_get_liaison_info(request): + from ietf.mailtrigger.utils import get_contacts_for_liaison_messages_for_group_primary,get_contacts_for_liaison_messages_for_group_secondary + '''Returns dictionary of info to update entry form given the groups that have been selected ''' @@ -229,14 +165,14 @@ def ajax_get_liaison_info(request): result = {'response_contacts':[],'to_contacts': [], 'cc': [], 'needs_approval': False, 'post_only': False, 'full_list': []} for group in from_groups: - cc.extend(get_cc(group)) + cc.extend(get_contacts_for_liaison_messages_for_group_primary(group)) does_need_approval.append(needs_approval(group,person)) can_post_only.append(post_only(group,person)) - response_contacts.append(get_contacts_for_group(group)) + response_contacts.append(get_contacts_for_liaison_messages_for_group_secondary(group)) for group in to_groups: - cc.extend(get_cc(group)) - to_contacts.append(get_contacts_for_group(group)) + cc.extend(get_contacts_for_liaison_messages_for_group_primary(group)) + to_contacts.append(get_contacts_for_liaison_messages_for_group_secondary(group)) # if there are from_groups and any need approval if does_need_approval: diff --git a/ietf/mailtrigger/migrations/0008_liaison_statement_incoming_and_outgoing_posted.py b/ietf/mailtrigger/migrations/0008_liaison_statement_incoming_and_outgoing_posted.py new file mode 100644 index 0000000000..189a783a2e --- /dev/null +++ b/ietf/mailtrigger/migrations/0008_liaison_statement_incoming_and_outgoing_posted.py @@ -0,0 +1,72 @@ +# Copyright The IETF Trust 2025, All Rights Reserved + +from django.db import migrations + + +def forward(apps, schema_editor): + Mailtrigger = apps.get_model("mailtrigger", "MailTrigger") + Recipient = apps.get_model("mailtrigger", "Recipient") + recipients_to = Recipient.objects.get(pk="liaison_to_contacts") + recipients_cc = list( + Recipient.objects.filter( + slug__in=( + "liaison_cc", + "liaison_coordinators", + "liaison_response_contacts", + "liaison_technical_contacts", + ) + ) + ) + recipient_from = Recipient.objects.get(pk="liaison_from_contact") + + liaison_posted_outgoing = Mailtrigger.objects.create( + slug="liaison_statement_posted_outgoing", + desc="Recipients for a message when a new outgoing liaison statement is posted", + ) + liaison_posted_outgoing.to.add(recipients_to) + liaison_posted_outgoing.cc.add(*recipients_cc) + liaison_posted_outgoing.cc.add(recipient_from) + + liaison_posted_incoming = Mailtrigger.objects.create( + slug="liaison_statement_posted_incoming", + desc="Recipients for a message when a new incoming liaison statement is posted", + ) + liaison_posted_incoming.to.add(recipients_to) + liaison_posted_incoming.cc.add(*recipients_cc) + + Mailtrigger.objects.filter(slug=("liaison_statement_posted")).delete() + + +def reverse(apps, schema_editor): + Mailtrigger = apps.get_model("mailtrigger", "MailTrigger") + Recipient = apps.get_model("mailtrigger", "Recipient") + + Mailtrigger.objects.filter( + slug__in=( + "liaison_statement_posted_outgoing", + "liaison_statement_posted_incoming", + ) + ).delete() + + liaison_statement_posted = Mailtrigger.objects.create( + slug="liaison_statement_posted", + desc="Recipients for a message when a new liaison statement is posted", + ) + + liaison_to_contacts = Recipient.objects.get(slug="liaison_to_contacts") + recipients_ccs = Recipient.objects.filter( + slug__in=( + "liaison_cc", + "liaison_coordinators", + "liaison_response_contacts", + "liaison_technical_contacts", + ) + ) + liaison_statement_posted.to.add(liaison_to_contacts) + liaison_statement_posted.cc.add(*recipients_ccs) + + +class Migration(migrations.Migration): + dependencies = [("mailtrigger", "0007_historicalrecipient_historicalmailtrigger")] + + operations = [migrations.RunPython(forward, reverse)] diff --git a/ietf/mailtrigger/utils.py b/ietf/mailtrigger/utils.py index 9915eae3fd..bcdaf5e44e 100644 --- a/ietf/mailtrigger/utils.py +++ b/ietf/mailtrigger/utils.py @@ -9,6 +9,14 @@ from ietf.utils.mail import excludeaddrs +EMAIL_ALIASES = { + "IETFCHAIR": "The IETF Chair ", + "IESG": "The IESG ", + "IAB": "The IAB ", + "IABCHAIR": "The IAB Chair ", +} + + class AddrLists(namedtuple("AddrLists", ["to", "cc"])): __slots__ = () @@ -66,6 +74,69 @@ def get_mailtrigger(slug, create_from_slug_if_not_exists, desc_if_not_exists): return mailtrigger +def get_contacts_for_liaison_messages_for_group_primary(group): + from ietf.liaisons.views import contact_email_from_role + + '''Returns list of emails to use in liaison message for group + ''' + emails = [] + + # role based emails + if group.acronym in ('ietf','iesg'): + emails.append(EMAIL_ALIASES['IESG']) + emails.append(EMAIL_ALIASES['IETFCHAIR']) + elif group.acronym in ('iab'): + emails.append(EMAIL_ALIASES['IAB']) + emails.append(EMAIL_ALIASES['IABCHAIR']) + elif group.type_id == 'area': + emails.append(EMAIL_ALIASES['IETFCHAIR']) + ad_roles = group.role_set.filter(name='ad') + emails.extend([ contact_email_from_role(r) for r in ad_roles ]) + elif group.type_id == 'wg': + ad_roles = group.parent.role_set.filter(name='ad') + emails.extend([ contact_email_from_role(r) for r in ad_roles ]) + chair_roles = group.role_set.filter(name='chair') + emails.extend([ contact_email_from_role(r) for r in chair_roles ]) + if group.list_email: + emails.append('{} Discussion List <{}>'.format(group.name,group.list_email)) + elif group.type_id == 'sdo': + liaiman_roles = group.role_set.filter(name='liaiman') + emails.extend([ contact_email_from_role(r) for r in liaiman_roles ]) + + # explicit CCs + liaison_cc_roles = group.role_set.filter(name='liaison_cc_contact') + emails.extend([ contact_email_from_role(r) for r in liaison_cc_roles ]) + + return emails + + +def get_contacts_for_liaison_messages_for_group_secondary(group): + from ietf.liaisons.views import contacts_from_roles + + '''Returns default contacts for groups as a comma separated string''' + # use explicit default contacts if defined + explicit_contacts = contacts_from_roles(group.role_set.filter(name='liaison_contact')) + if explicit_contacts: + return explicit_contacts + + # otherwise construct based on group type + contacts = [] + if group.type_id == 'area': + roles = group.role_set.filter(name='ad') + contacts.append(contacts_from_roles(roles)) + elif group.type_id == 'wg': + roles = group.role_set.filter(name='chair') + contacts.append(contacts_from_roles(roles)) + elif group.acronym == 'ietf': + contacts.append(EMAIL_ALIASES['IETFCHAIR']) + elif group.acronym == 'iab': + contacts.append(EMAIL_ALIASES['IABCHAIR']) + elif group.acronym == 'iesg': + contacts.append(EMAIL_ALIASES['IESG']) + + return ','.join(contacts) + + def gather_relevant_expansions(**kwargs): def starts_with(prefix): return MailTrigger.objects.filter(slug__startswith=prefix).values_list( diff --git a/ietf/name/fixtures/names.json b/ietf/name/fixtures/names.json index c94e15a459..58deb01f0c 100644 --- a/ietf/name/fixtures/names.json +++ b/ietf/name/fixtures/names.json @@ -2638,11 +2638,24 @@ "order": 0, "slug": "historic", "type": "statement", - "used": true + "used": false }, "model": "doc.state", "pk": 182 }, + { + "fields": { + "desc": "The statement is no longer active", + "name": "Inactive", + "next_states": [], + "order": 0, + "slug": "inactive", + "type": "statement", + "used": true + }, + "model": "doc.state", + "pk": 183 + }, { "fields": { "label": "State" @@ -5520,13 +5533,31 @@ "liaison_response_contacts", "liaison_technical_contacts" ], - "desc": "Recipient for a message when a new liaison statement is posted", + "desc": "Recipients for a message when a new incoming liaison statement is posted", "to": [ + "liaison_from_contact", "liaison_to_contacts" ] }, "model": "mailtrigger.mailtrigger", - "pk": "liaison_statement_posted" + "pk": "liaison_statement_posted_incoming" + }, + { + "fields": { + "cc": [ + "liaison_cc", + "liaison_coordinators", + "liaison_from_contact", + "liaison_response_contacts", + "liaison_technical_contacts" + ], + "desc": "Recipients for a message when a new outgoing liaison statement is posted", + "to": [ + "liaison_to_contacts" + ] + }, + "model": "mailtrigger.mailtrigger", + "pk": "liaison_statement_posted_outgoing" }, { "fields": { @@ -7068,6 +7099,14 @@ "model": "mailtrigger.recipient", "pk": "liaison_coordinators" }, + { + "fields": { + "desc": "Email address of the formal sender of the statement", + "template": "{{liaison.from_contact}}" + }, + "model": "mailtrigger.recipient", + "pk": "liaison_from_contact" + }, { "fields": { "desc": "The assigned liaison manager for an external group ", From 8fbbc55ec3cb87f528953da33e8c7194c2b75afd Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Tue, 7 Oct 2025 15:13:16 -0300 Subject: [PATCH 215/405] fix: keep day visible in timeslot editor (#9653) --- ietf/templates/meeting/timeslot_edit.html | 44 ++++++++++++----------- 1 file changed, 24 insertions(+), 20 deletions(-) diff --git a/ietf/templates/meeting/timeslot_edit.html b/ietf/templates/meeting/timeslot_edit.html index 11691ba6dd..3259dba9da 100644 --- a/ietf/templates/meeting/timeslot_edit.html +++ b/ietf/templates/meeting/timeslot_edit.html @@ -11,20 +11,22 @@ {% endcomment %} .timeslot-edit { overflow: auto; height: max(30rem, calc(100vh - 25rem));} .tstable { width: 100%; border-collapse: separate; } {# "separate" to ensure sticky cells keep their borders #} -.tstable thead { position: sticky; top: 0; z-index: 3; background-color: white;} -.tstable th:first-child, .tstable td:first-child { - background-color: white; {# needs to match the lighter of the striped-table colors! #} -position: sticky; -left: 0; - z-index: 2; {# render above other cells / borders but below thead (z-index 3, above) #} -} -.tstable tbody > tr:nth-of-type(odd) > th:first-child { - background-color: rgb(249, 249, 249); {# needs to match the darker of the striped-table colors! #} -} -.tstable th { white-space: nowrap;} -.tstable td { white-space: nowrap;} -.capacity { font-size:80%; font-weight: normal;} -a.new-timeslot-link { color: lightgray; font-size: large;} + .tstable tr th:first-child { min-width: 25rem; max-width: 25rem; overflow: hidden; } + .tstable thead { position: sticky; top: 0; z-index: 3; background-color: white;} + .tstable thead th span.day { position: sticky; left: 25.5rem; } + .tstable th:first-child, .tstable td:first-child { + background-color: white; {# needs to match the lighter of the striped-table colors! #} + position: sticky; + left: 0; + z-index: 2; {# render above other cells / borders but below thead (z-index 3, above) #} + } + .tstable tbody > tr:nth-of-type(odd) > th:first-child { + background-color: rgb(249, 249, 249); {# needs to match the darker of the striped-table colors! #} + } + .tstable th { white-space: nowrap;} + .tstable td { white-space: nowrap;} + .capacity { font-size:80%; font-weight: normal;} + a.new-timeslot-link { color: lightgray; font-size: large;} {% endblock %} {% block content %} {% origin %} @@ -84,12 +86,14 @@

  • - {{ day|date:'D' }} ({{ day }}) - - + + {{ day|date:'D' }} ({{ day }}) + + +
    - - - - - - - - - - {% for entry in activities %} - - - - - - - {% endfor %} - -
    DateTimeActionName
    {{ entry.act_date }}{{ entry.act_time }}{{ entry.activity }}{{ entry.act_by }}
    - diff --git a/ietf/secr/templates/includes/buttons_next_cancel.html b/ietf/secr/templates/includes/buttons_next_cancel.html deleted file mode 100644 index 95d25f55bc..0000000000 --- a/ietf/secr/templates/includes/buttons_next_cancel.html +++ /dev/null @@ -1,6 +0,0 @@ -
    -
      -
    • -
    • -
    -
    diff --git a/ietf/secr/templates/includes/buttons_submit_cancel.html b/ietf/secr/templates/includes/buttons_submit_cancel.html deleted file mode 100644 index df40c98255..0000000000 --- a/ietf/secr/templates/includes/buttons_submit_cancel.html +++ /dev/null @@ -1,6 +0,0 @@ -
    -
      -
    • -
    • -
    -
    diff --git a/ietf/secr/templates/includes/sessions_footer.html b/ietf/secr/templates/includes/sessions_footer.html deleted file mode 100755 index 2a26440047..0000000000 --- a/ietf/secr/templates/includes/sessions_footer.html +++ /dev/null @@ -1,5 +0,0 @@ - \ No newline at end of file diff --git a/ietf/secr/templates/includes/sessions_request_form.html b/ietf/secr/templates/includes/sessions_request_form.html deleted file mode 100755 index 61b1673357..0000000000 --- a/ietf/secr/templates/includes/sessions_request_form.html +++ /dev/null @@ -1,130 +0,0 @@ -* Required Field -{% csrf_token %} - {{ form.session_forms.management_form }} - {% if form.non_field_errors %} - {{ form.non_field_errors }} - {% endif %} - - - - - - {% if group.features.acts_like_wg %} - - {% if not is_virtual %} - - {% endif %} - - {% else %}{# else not group.features.acts_like_wg #} - {% for session_form in form.session_forms %} - - {% endfor %} - {% endif %} - - - - - - - - - - {% if not is_virtual %} - - - - - - - - - - - - - - - - - - - {% endif %} - - - - - - {% if form.notifications_optional %} - - - - - {% endif %} - -
    Working Group Name:{{ group.name }} ({{ group.acronym }})
    Area Name:{% if group.parent %}{{ group.parent.name }} ({{ group.parent.acronym }}){% endif %}
    Number of Sessions:*{{ form.num_session.errors }}{{ form.num_session }}
    Session 1:*{% include 'meeting/session_details_form.html' with form=form.session_forms.0 hide_onsite_tool_prompt=True only %}
    Session 2:*{% include 'meeting/session_details_form.html' with form=form.session_forms.1 hide_onsite_tool_prompt=True only %}
    Time between two sessions:{{ form.session_time_relation.errors }}{{ form.session_time_relation }}
    Additional Session Request:{{ form.third_session }} Check this box to request an additional session.
    - Additional slot may be available after agenda scheduling has closed and with the approval of an Area Director.
    -
    - Third Session: - {% include 'meeting/session_details_form.html' with form=form.session_forms.2 hide_onsite_tool_prompt=True only %} -
    -
    Session {{ forloop.counter }}:*{% include 'meeting/session_details_form.html' with form=session_form only %}
    Number of Attendees:{% if not is_virtual %}*{% endif %}{{ form.attendees.errors }}{{ form.attendees }}
    Participants who must be present: - {{ form.bethere.errors }} - {{ form.bethere }} -

    - Do not include Area Directors and WG Chairs; the system already tracks their availability. -

    -
    Conflicts to Avoid: - - - - - - - {% for cname, cfield, cselector in form.wg_constraint_fields %} - - {% if forloop.first %}{% endif %} - - - - {% empty %}{# shown if there are no constraint fields #} - - {% endfor %} - {% if form.inactive_wg_constraints %} - {% for cname, value, field in form.inactive_wg_constraints %} - - {% if forloop.first %} - - {% endif %} - - - - {% endfor %} - {% endif %} - - - - - -
    Other WGs that included {{ group.name }} in their conflict lists:{{ session_conflicts.inbound|default:"None" }}
    WG Sessions:
    You may select multiple WGs within each category
    {{ cname|title }}{{ cselector }} -
    - {{ cfield.errors }}{{ cfield }} -
    No constraints are enabled for this meeting.
    - Disabled for this meeting - {{ cname|title }}
    {{ field }} {{ field.label }}
    BOF Sessions:If the sessions can not be found in the fields above, please enter free form requests in the Special Requests field below.
    -
    Resources requested: - {{ form.resources.errors }} {{ form.resources }} -
    Times during which this WG can not meet:
    Please explain any selections in Special Requests below.
    {{ form.timeranges.errors }}{{ form.timeranges }}
    - Plan session adjacent with another WG:
    - (Immediately before or after another WG, no break in between, in the same room.) -
    {{ form.adjacent_with_wg.errors }}{{ form.adjacent_with_wg }}
    - Joint session with:
    - (To request one session for multiple WGs together.) -
    To request a joint session with another group, please contact the secretariat.
    Special Requests:
     
    i.e. restrictions on meeting times / days, etc.
    (limit 200 characters)
    {{ form.comments.errors }}{{ form.comments }}
    {{ form.send_notifications.label }}{{ form.send_notifications.errors }}{{ form.send_notifications }}
    - -
    -
      -
    • -
    • -
    -
    - \ No newline at end of file diff --git a/ietf/secr/templates/includes/sessions_request_view.html b/ietf/secr/templates/includes/sessions_request_view.html deleted file mode 100644 index bc6aef0611..0000000000 --- a/ietf/secr/templates/includes/sessions_request_view.html +++ /dev/null @@ -1,73 +0,0 @@ -{% load ams_filters %} - - - - - - {% if form %} - {% include 'includes/sessions_request_view_formset.html' with formset=form.session_forms group=group session=session only %} - {% else %} - {% include 'includes/sessions_request_view_session_set.html' with session_set=sessions group=group session=session only %} - {% endif %} - - - - - - - - - - {% if not is_virtual %} - - - - - {% endif %} - - - - - - - - - {% if not is_virtual %} - - - - - - - - - {% endif %} - - {% if form and form.notifications_optional %} - - - - - {% endif %} - -
    Working Group Name:{{ group.name }} ({{ group.acronym }})
    Area Name:{{ group.parent }}
    Number of Sessions Requested:{% if session.third_session %}3{% else %}{{ session.num_session }}{% endif %}
    Number of Attendees:{{ session.attendees }}
    Conflicts to Avoid: - {% if session_conflicts.outbound %} - - - {% for conflict in session_conflicts.outbound %} - - {% endfor %} - -
    {{ conflict.name|title }}: {{ conflict.groups }}
    - {% else %}None{% endif %} -
    Other WGs that included {{ group }} in their conflict list:{% if session_conflicts.inbound %}{{ session_conflicts.inbound }}{% else %}None so far{% endif %}
    Resources requested:{% if session.resources %}
      {% for resource in session.resources %}
    • {{ resource.desc }}
    • {% endfor %}
    {% else %}None so far{% endif %}
    Participants who must be present:{% if session.bethere %}
      {% for person in session.bethere %}
    • {{ person }}
    • {% endfor %}
    {% else %}None{% endif %}
    Can not meet on:{% if session.timeranges_display %}{{ session.timeranges_display|join:', ' }}{% else %}No constraints{% endif %}
    Adjacent with WG:{{ session.adjacent_with_wg|default:'No preference' }}
    Joint session: - {% if session.joint_with_groups %} - {{ session.joint_for_session_display }} with: {{ session.joint_with_groups }} - {% else %} - Not a joint session - {% endif %} -
    Special Requests:{{ session.comments }}
    - {{ form.send_notifications.label}} - - {% if form.cleaned_data.send_notifications %}Yes{% else %}No{% endif %} -
    \ No newline at end of file diff --git a/ietf/secr/templates/includes/sessions_request_view_formset.html b/ietf/secr/templates/includes/sessions_request_view_formset.html deleted file mode 100644 index 80cad8d829..0000000000 --- a/ietf/secr/templates/includes/sessions_request_view_formset.html +++ /dev/null @@ -1,32 +0,0 @@ -{% load ams_filters %}{# keep this in sync with sessions_request_view_session_set.html #} -{% for sess_form in formset %}{% if sess_form.cleaned_data and not sess_form.cleaned_data.DELETE %} - - Session {{ forloop.counter }}: - -
    -
    Length
    -
    {{ sess_form.cleaned_data.requested_duration.total_seconds|display_duration }}
    - {% if sess_form.cleaned_data.name %} -
    Name
    -
    {{ sess_form.cleaned_data.name }}
    {% endif %} - {% if sess_form.cleaned_data.purpose.slug != 'regular' %} -
    Purpose
    -
    - {{ sess_form.cleaned_data.purpose }} - {% if sess_form.cleaned_data.purpose.timeslot_types|length > 1 %}({{ sess_form.cleaned_data.type }} - ){% endif %} -
    -
    Onsite tool?
    -
    {{ sess_form.cleaned_data.has_onsite_tool|yesno }}
    - {% endif %} -
    - - - {% if group.features.acts_like_wg and forloop.counter == 2 and not is_virtual %} - - Time between sessions: - {% if session.session_time_relation_display %}{{ session.session_time_relation_display }}{% else %}No - preference{% endif %} - - {% endif %} -{% endif %}{% endfor %} \ No newline at end of file diff --git a/ietf/secr/templates/includes/sessions_request_view_session_set.html b/ietf/secr/templates/includes/sessions_request_view_session_set.html deleted file mode 100644 index a434b9d22b..0000000000 --- a/ietf/secr/templates/includes/sessions_request_view_session_set.html +++ /dev/null @@ -1,32 +0,0 @@ -{% load ams_filters %}{# keep this in sync with sessions_request_view_formset.html #} -{% for sess in session_set %} - - Session {{ forloop.counter }}: - -
    -
    Length
    -
    {{ sess.requested_duration.total_seconds|display_duration }}
    - {% if sess.name %} -
    Name
    -
    {{ sess.name }}
    {% endif %} - {% if sess.purpose.slug != 'regular' %} -
    Purpose
    -
    - {{ sess.purpose }} - {% if sess.purpose.timeslot_types|length > 1 %}({{ sess.type }} - ){% endif %} -
    -
    Onsite tool?
    -
    {{ sess.has_onsite_tool|yesno }}
    - {% endif %} -
    - - - {% if group.features.acts_like_wg and forloop.counter == 2 and not is_virtual %} - - Time between sessions: - {% if session.session_time_relation_display %}{{ session.session_time_relation_display }}{% else %}No - preference{% endif %} - - {% endif %} -{% endfor %} \ No newline at end of file diff --git a/ietf/secr/templates/index.html b/ietf/secr/templates/index.html index 05fa3db41f..9ea7021279 100644 --- a/ietf/secr/templates/index.html +++ b/ietf/secr/templates/index.html @@ -1,11 +1,11 @@ -{# Copyright The IETF Trust 2007, All Rights Reserved #} +{# Copyright The IETF Trust 2007-2025, All Rights Reserved #} {% extends "base.html" %} {% load static %} {% load ietf_filters %} {% block title %}Secretariat Dashboard{% endblock %} {% block content %}

    Secretariat Dashboard

    -
    +
    {% if user|has_role:"Secretariat" %}

    IESG

      @@ -20,12 +20,10 @@

      IDs and WGs Process

      Meetings and Proceedings

      {% else %} {% endif %} diff --git a/ietf/secr/templates/sreq/confirm.html b/ietf/secr/templates/sreq/confirm.html deleted file mode 100755 index 025375af32..0000000000 --- a/ietf/secr/templates/sreq/confirm.html +++ /dev/null @@ -1,57 +0,0 @@ -{% extends "base_site.html" %} -{% load static %} - -{% block title %}Sessions - Confirm{% endblock %} - -{% block extrastyle %} - -{% endblock %} - -{% block extrahead %}{{ block.super }} - - {{ form.media }} -{% endblock %} - -{% block breadcrumbs %}{{ block.super }} - » Sessions - » New - » Session Request Confirmation -{% endblock %} - -{% block content %} - -
      -

      Sessions - Confirm

      - - {% include "includes/sessions_request_view.html" %} - - {% if group.features.acts_like_wg and form.session_forms.forms_to_keep|length > 2 %} -
      -

      - - Note: Your request for a third session must be approved by an area director before - being submitted to agenda@ietf.org. Click "Submit" below to email an approval - request to the area directors. - -

      -
      - {% endif %} - -
      - {% csrf_token %} - {{ form }} - {{ form.session_forms.management_form }} - {% for sf in form.session_forms %} - {% include 'meeting/session_details_form.html' with form=sf hidden=True only %} - {% endfor %} - {% include "includes/buttons_submit_cancel.html" %} -
      - -
      - -{% endblock %} \ No newline at end of file diff --git a/ietf/secr/templates/sreq/edit.html b/ietf/secr/templates/sreq/edit.html deleted file mode 100755 index f6e62104b0..0000000000 --- a/ietf/secr/templates/sreq/edit.html +++ /dev/null @@ -1,39 +0,0 @@ -{% extends "base_site.html" %} -{% load static %} -{% block title %}Sessions - Edit{% endblock %} - -{% block extrahead %}{{ block.super }} - - - {{ form.media }} - -{% endblock %} - -{% block breadcrumbs %}{{ block.super }} - » Sessions - » {{ group.acronym }} - » Edit -{% endblock %} - -{% block instructions %} - Instructions -{% endblock %} - -{% block content %} -
      -

      IETF {{ meeting.number }}: Edit Session Request

      - -
      -{% endblock %} - -{% block footer-extras %} - {% include "includes/sessions_footer.html" %} -{% endblock %} \ No newline at end of file diff --git a/ietf/secr/templates/sreq/locked.html b/ietf/secr/templates/sreq/locked.html deleted file mode 100755 index c27cf578ed..0000000000 --- a/ietf/secr/templates/sreq/locked.html +++ /dev/null @@ -1,30 +0,0 @@ -{% extends "base_site.html" %} -{% load static %} - -{% block title %}Sessions{% endblock %} - -{% block extrahead %}{{ block.super }} - -{% endblock %} - -{% block breadcrumbs %}{{ block.super }} - » Sessions (Locked) -{% endblock %} - -{% block content %} -

      » View list of timeslot requests

      -
      -

      Sessions - Status

      - -

      {{ message }}

      - -
      -
        -
      • -
      -
      - - -
      - -{% endblock %} \ No newline at end of file diff --git a/ietf/secr/templates/sreq/main.html b/ietf/secr/templates/sreq/main.html deleted file mode 100755 index a6695cd4f3..0000000000 --- a/ietf/secr/templates/sreq/main.html +++ /dev/null @@ -1,65 +0,0 @@ -{% extends "base_site.html" %} -{% load ietf_filters %} -{% load static %} - -{% block title %}Sessions{% endblock %} - -{% block extrahead %}{{ block.super }} - -{% endblock %} - -{% block breadcrumbs %}{{ block.super }} - » Sessions -{% endblock %} -{% block instructions %} - Instructions -{% endblock %} - -{% block content %} -

      » View list of timeslot requests

      -
      -

      - Sessions Request Tool: IETF {{ meeting.number }} - {% if user|has_role:"Secretariat" %} - {% if is_locked %} - Tool Status: Locked - {% else %} - Tool Status: Unlocked - {% endif %} - {% endif %} -

      - -
      - -
      - -{% endblock %} - -{% block footer-extras %} - {% include "includes/sessions_footer.html" %} -{% endblock %} \ No newline at end of file diff --git a/ietf/secr/templates/sreq/new.html b/ietf/secr/templates/sreq/new.html deleted file mode 100755 index 3f46e6f897..0000000000 --- a/ietf/secr/templates/sreq/new.html +++ /dev/null @@ -1,43 +0,0 @@ -{% extends "base_site.html" %} -{% load static %} - -{% block title %}Sessions- New{% endblock %} - -{% block extrahead %}{{ block.super }} - - - {{ form.media }} - -{% endblock %} - -{% block breadcrumbs %}{{ block.super }} - » Sessions - » New Session Request -{% endblock %} - -{% block instructions %} - Instructions -{% endblock %} - -{% block content %} -
      -

      IETF {{ meeting.number }}: New Session Request

      - - {% include "includes/sessions_request_form.html" %} - -
      - -{% endblock %} - -{% block footer-extras %} - {% include "includes/sessions_footer.html" %} -{% endblock %} \ No newline at end of file diff --git a/ietf/secr/templates/sreq/tool_status.html b/ietf/secr/templates/sreq/tool_status.html deleted file mode 100755 index b91e73a129..0000000000 --- a/ietf/secr/templates/sreq/tool_status.html +++ /dev/null @@ -1,42 +0,0 @@ -{% extends "base_site.html" %} -{% load static %} - -{% block title %}Sessions{% endblock %} - -{% block extrahead %}{{ block.super }} - -{% endblock %} - -{% block breadcrumbs %}{{ block.super }} - » Sessions - » Session Status -{% endblock %} - -{% block content %} - -
      -

      Sessions - Status

      -

      Enter the message that you would like displayed to the WG Chair when this tool is locked.

      -
      {% csrf_token %} - - - - {{ form.as_table }} - -
      -
      -
        - {% if is_locked %} -
      • - {% else %} -
      • - {% endif %} -
      • -
      -
      - -
      - -
      - -{% endblock %} diff --git a/ietf/secr/templates/sreq/view.html b/ietf/secr/templates/sreq/view.html deleted file mode 100644 index 9a0a3b01c1..0000000000 --- a/ietf/secr/templates/sreq/view.html +++ /dev/null @@ -1,55 +0,0 @@ -{% extends "base_site.html" %} -{% load static %} - -{% block title %}Sessions - View{% endblock %} - -{% block extrahead %}{{ block.super }} - -{% endblock %} - -{% block extrastyle %} - -{% endblock %} - -{% block breadcrumbs %}{{ block.super }} - » Sessions - » {{ group.acronym }} -{% endblock %} - -{% block instructions %} - Instructions -{% endblock %} - -{% block content %} - -
      -

      Sessions - View (meeting: {{ meeting.number }})

      - - {% include "includes/sessions_request_view.html" %} - -
      - - {% include "includes/activities.html" %} - -
      -
        -
      • - {% if show_approve_button %} -
      • - {% endif %} -
      • -
      • -
      -
      -
      - -{% endblock %} - -{% block footer-extras %} - {% include "includes/sessions_footer.html" %} -{% endblock %} diff --git a/ietf/secr/urls.py b/ietf/secr/urls.py index 4a3e5b0363..ab21046654 100644 --- a/ietf/secr/urls.py +++ b/ietf/secr/urls.py @@ -1,11 +1,22 @@ +# Copyright The IETF Trust 2025, All Rights Reserved + +from django.conf import settings from django.urls import re_path, include from django.views.generic import TemplateView +from django.views.generic.base import RedirectView urlpatterns = [ re_path(r'^$', TemplateView.as_view(template_name='index.html'), name='ietf.secr'), re_path(r'^announcement/', include('ietf.secr.announcement.urls')), re_path(r'^meetings/', include('ietf.secr.meetings.urls')), re_path(r'^rolodex/', include('ietf.secr.rolodex.urls')), - re_path(r'^sreq/', include('ietf.secr.sreq.urls')), + # remove these redirects after 125 + re_path(r'^sreq/$', RedirectView.as_view(url='/meeting/session/request/', permanent=True)), + re_path(r'^sreq/%(acronym)s/$' % settings.URL_REGEXPS, RedirectView.as_view(url='/meeting/session/request/%(acronym)s/view/', permanent=True)), + re_path(r'^sreq/%(acronym)s/edit/$' % settings.URL_REGEXPS, RedirectView.as_view(url='/meeting/session/request/%(acronym)s/edit/', permanent=True)), + re_path(r'^sreq/%(acronym)s/new/$' % settings.URL_REGEXPS, RedirectView.as_view(url='/meeting/session/request/%(acronym)s/new/', permanent=True)), + re_path(r'^sreq/(?P[A-Za-z0-9_\-\+]+)/%(acronym)s/view/$' % settings.URL_REGEXPS, RedirectView.as_view(url='/meeting/%(num)s/session/request/%(acronym)s/view/', permanent=True)), + re_path(r'^sreq/(?P[A-Za-z0-9_\-\+]+)/%(acronym)s/edit/$' % settings.URL_REGEXPS, RedirectView.as_view(url='/meeting/%(num)s/session/request/%(acronym)s/edit/', permanent=True)), + # --------------------------------- re_path(r'^telechat/', include('ietf.secr.telechat.urls')), ] diff --git a/ietf/secr/utils/group.py b/ietf/secr/utils/group.py deleted file mode 100644 index 40a9065ace..0000000000 --- a/ietf/secr/utils/group.py +++ /dev/null @@ -1,50 +0,0 @@ -# Copyright The IETF Trust 2013-2020, All Rights Reserved -# -*- coding: utf-8 -*- - - -# Python imports - -# Django imports -from django.core.exceptions import ObjectDoesNotExist - -# Datatracker imports -from ietf.group.models import Group -from ietf.ietfauth.utils import has_role - - -def get_my_groups(user,conclude=False): - ''' - Takes a Django user object (from request) - Returns a list of groups the user has access to. Rules are as follows - secretariat - has access to all groups - area director - has access to all groups in their area - wg chair or secretary - has access to their own group - chair of irtf has access to all irtf groups - - If user=None than all groups are returned. - concluded=True means include concluded groups. Need this to upload materials for groups - after they've been concluded. it happens. - ''' - my_groups = set() - states = ['bof','proposed','active'] - if conclude: - states.extend(['conclude','bof-conc']) - - all_groups = Group.objects.filter(type__features__has_meetings=True, state__in=states).order_by('acronym') - if user == None or has_role(user,'Secretariat'): - return all_groups - - try: - person = user.person - except ObjectDoesNotExist: - return list() - - for group in all_groups: - if group.role_set.filter(person=person,name__in=('chair','secr','ad')): - my_groups.add(group) - continue - if group.parent and group.parent.role_set.filter(person=person,name__in=('ad','chair')): - my_groups.add(group) - continue - - return list(my_groups) diff --git a/ietf/settings.py b/ietf/settings.py index d6be1d1e0f..9a213c1a73 100644 --- a/ietf/settings.py +++ b/ietf/settings.py @@ -537,7 +537,6 @@ def skip_unreadable_post(record): 'ietf.secr.announcement', 'ietf.secr.meetings', 'ietf.secr.rolodex', - 'ietf.secr.sreq', 'ietf.secr.telechat', ] diff --git a/ietf/static/js/custom_striped.js b/ietf/static/js/custom_striped.js new file mode 100644 index 0000000000..480ad7cf82 --- /dev/null +++ b/ietf/static/js/custom_striped.js @@ -0,0 +1,16 @@ +// Copyright The IETF Trust 2025, All Rights Reserved + +document.addEventListener('DOMContentLoaded', () => { + // add stripes + const firstRow = document.querySelector('.custom-stripe .row') + if (firstRow) { + const parent = firstRow.parentElement; + const allRows = Array.from(parent.children).filter(child => child.classList.contains('row')) + allRows.forEach((row, index) => { + row.classList.remove('bg-light') + if (index % 2 === 1) { + row.classList.add('bg-light') + } + }) + } +}) diff --git a/ietf/secr/static/js/session_form.js b/ietf/static/js/session_form.js similarity index 92% rename from ietf/secr/static/js/session_form.js rename to ietf/static/js/session_form.js index 6f28f16db4..bd61293d7c 100644 --- a/ietf/secr/static/js/session_form.js +++ b/ietf/static/js/session_form.js @@ -1,4 +1,4 @@ -/* Copyright The IETF Trust 2021, All Rights Reserved +/* Copyright The IETF Trust 2021-2025, All Rights Reserved * * JS support for the SessionForm * */ diff --git a/ietf/secr/static/js/sessions.js b/ietf/static/js/session_request.js similarity index 90% rename from ietf/secr/static/js/sessions.js rename to ietf/static/js/session_request.js index a2770e6262..dfb169f675 100644 --- a/ietf/secr/static/js/sessions.js +++ b/ietf/static/js/session_request.js @@ -1,4 +1,4 @@ -// Copyright The IETF Trust 2015-2021, All Rights Reserved +// Copyright The IETF Trust 2015-2025, All Rights Reserved /* global alert */ var ietf_sessions; // public interface @@ -38,7 +38,7 @@ var ietf_sessions; // public interface const only_one_session = (val === 1); if (document.form_post.session_time_relation) { document.form_post.session_time_relation.disabled = only_one_session; - document.form_post.session_time_relation.closest('tr').hidden = only_one_session; + document.form_post.session_time_relation.closest('div.row').hidden = only_one_session; } if (document.form_post.joint_for_session) { document.form_post.joint_for_session.disabled = only_one_session; @@ -129,6 +129,11 @@ var ietf_sessions; // public interface } } + function wg_constraint_delete_clicked(event) { + const constraint_name = event.currentTarget.dataset.constraint_name; + delete_last_wg_constraint(constraint_name); + } + /* Initialization */ function on_load() { // Attach event handler to session count select @@ -146,6 +151,9 @@ var ietf_sessions; // public interface selectors[index].addEventListener('change', wg_constraint_selector_changed, false) } + // Attach event handler to constraint delete buttons + document.querySelectorAll('.wg_constraint_delete') + .forEach(btn => btn.addEventListener('click', wg_constraint_delete_clicked)); } // initialize after page loads diff --git a/ietf/templates/base/menu.html b/ietf/templates/base/menu.html index bd8c0bf3cd..1e7c1688ff 100644 --- a/ietf/templates/base/menu.html +++ b/ietf/templates/base/menu.html @@ -1,4 +1,4 @@ -{# Copyright The IETF Trust 2015-2022, All Rights Reserved #} +{# Copyright The IETF Trust 2015-2025, All Rights Reserved #} {% load origin %} {% origin %} {% load ietf_filters managed_groups wg_menu active_groups_menu group_filters cache meetings_filters %} @@ -304,7 +304,7 @@
    • + href="{% url 'ietf.meeting.views_session_request.list_view' %}"> Request a session
    • diff --git a/ietf/templates/group/meetings-row.html b/ietf/templates/group/meetings-row.html index 25605ec0f1..8927eb61a2 100644 --- a/ietf/templates/group/meetings-row.html +++ b/ietf/templates/group/meetings-row.html @@ -1,3 +1,4 @@ +{# Copyright The IETF Trust 2025, All Rights Reserved #} {% load origin tz %} {% origin %} {% for s in sessions %} @@ -25,7 +26,7 @@ {% if show_request and s.meeting.type_id == 'ietf' %} {% if can_edit %} + href="{% url 'ietf.meeting.views_session_request.view_request' num=s.meeting.number acronym=s.group.acronym %}"> Edit Session Request {% endif %} diff --git a/ietf/templates/group/meetings.html b/ietf/templates/group/meetings.html index bee8111025..30f478da13 100644 --- a/ietf/templates/group/meetings.html +++ b/ietf/templates/group/meetings.html @@ -1,3 +1,4 @@ +{# Copyright The IETF Trust 2025, All Rights Reserved #} {% extends "group/group_base.html" %} {% load origin static %} {% block title %} @@ -9,7 +10,7 @@ Session requests {% if can_edit or can_always_edit %} - Request a session + Request a session Request an interim meeting diff --git a/ietf/templates/meeting/important_dates_for_meeting.ics b/ietf/templates/meeting/important_dates_for_meeting.ics index df5fe46818..e6d403da93 100644 --- a/ietf/templates/meeting/important_dates_for_meeting.ics +++ b/ietf/templates/meeting/important_dates_for_meeting.ics @@ -1,3 +1,4 @@ +{# Copyright The IETF Trust 2025, All Rights Reserved #} {% load tz ietf_filters %}{% for d in meeting.important_dates %}BEGIN:VEVENT UID:ietf-{{ meeting.number }}-{{ d.name_id }}-{{ d.date.isoformat }} SUMMARY:IETF {{ meeting.number }}: {{ d.name.name }} @@ -8,11 +9,11 @@ TRANSP:TRANSPARENT DESCRIPTION:{{ d.name.desc }}{% if first and d.name.slug == 'openreg' or first and d.name.slug == 'earlybird' %}\n Register here: https://www.ietf.org/how/meetings/register/{% endif %}{% if d.name.slug == 'opensched' %}\n To request a Working Group session, use the IETF Meeting Session Request Tool:\n - {{ request.scheme }}://{{ request.get_host}}{% url 'ietf.secr.sreq.views.main' %}\n + {{ request.scheme }}://{{ request.get_host}}{% url 'ietf.meeting.views_session_request.list_view' %}\n If you are working on a BOF request, it is highly recommended to tell the IESG\n now by sending an email to iesg@ietf.org to get advance help with the request.{% endif %}{% if d.name.slug == 'cutoffwgreq' %}\n To request a Working Group session, use the IETF Meeting Session Request Tool:\n - {{ request.scheme }}://{{ request.get_host }}{% url 'ietf.secr.sreq.views.main' %}{% endif %}{% if d.name.slug == 'cutoffbofreq' %}\n + {{ request.scheme }}://{{ request.get_host }}{% url 'ietf.meeting.views_session_request.list_view' %}{% endif %}{% if d.name.slug == 'cutoffbofreq' %}\n To request a BOF, please see instructions on Requesting a BOF:\n https://www.ietf.org/how/bofs/bof-procedures/{% endif %}{% if d.name.slug == 'idcutoff' %}\n Upload using the I-D Submission Tool:\n diff --git a/ietf/templates/meeting/requests.html b/ietf/templates/meeting/requests.html index 3008ceb662..0abee95887 100644 --- a/ietf/templates/meeting/requests.html +++ b/ietf/templates/meeting/requests.html @@ -1,5 +1,5 @@ {% extends "base.html" %} -{# Copyright The IETF Trust 2015, All Rights Reserved #} +{# Copyright The IETF Trust 2015-2025, All Rights Reserved #} {% load origin %} {% load ietf_filters static person_filters textfilters %} {% block pagehead %} @@ -151,7 +151,7 @@

      {% endifchanged %} - + {{ session.group.acronym }} {% if session.purpose_id != "regular" and session.purpose_id != "none" %} diff --git a/ietf/secr/templates/sreq/session_approval_notification.txt b/ietf/templates/meeting/session_approval_notification.txt similarity index 56% rename from ietf/secr/templates/sreq/session_approval_notification.txt rename to ietf/templates/meeting/session_approval_notification.txt index 7bb63aa3fa..74eca09bd8 100644 --- a/ietf/secr/templates/sreq/session_approval_notification.txt +++ b/ietf/templates/meeting/session_approval_notification.txt @@ -1,3 +1,4 @@ +{# Copyright The IETF Trust 2025, All Rights Reserved #} Dear {{ group.parent }} Director(s): {{ header }} meeting session request has just been @@ -5,11 +6,11 @@ submitted by {{ requester }}. The third session requires your approval. To approve the session go to the session request view here: -{{ settings.IDTRACKER_BASE_URL }}{% url "ietf.secr.sreq.views.view" acronym=group.acronym %} +{{ settings.IDTRACKER_BASE_URL }}{% url "ietf.meeting.views_session_request.view_request" acronym=group.acronym %} and click "Approve Third Session". Regards, The IETF Secretariat. -{% include "includes/session_info.txt" %} +{% include "meeting/session_request_info.txt" %} diff --git a/ietf/secr/templates/sreq/session_cancel_notification.txt b/ietf/templates/meeting/session_cancel_notification.txt similarity index 71% rename from ietf/secr/templates/sreq/session_cancel_notification.txt rename to ietf/templates/meeting/session_cancel_notification.txt index 8aee6c89db..3de67fc8f4 100644 --- a/ietf/secr/templates/sreq/session_cancel_notification.txt +++ b/ietf/templates/meeting/session_cancel_notification.txt @@ -1,3 +1,4 @@ +{# Copyright The IETF Trust 2025, All Rights Reserved #} {% autoescape off %}{% load ams_filters %} A request to cancel a meeting session has just been submitted by {{ requester }}.{% endautoescape %} diff --git a/ietf/templates/meeting/session_details_form.html b/ietf/templates/meeting/session_details_form.html index 6b59e7dacd..9cd1b6e85c 100644 --- a/ietf/templates/meeting/session_details_form.html +++ b/ietf/templates/meeting/session_details_form.html @@ -1,42 +1,48 @@ -{# Copyright The IETF Trust 2007-2020, All Rights Reserved #} +{# Copyright The IETF Trust 2007-2025, All Rights Reserved #} +{% load django_bootstrap5 %} +
      {% if hidden %} {{ form.name.as_hidden }}{{ form.purpose.as_hidden }}{{ form.type.as_hidden }}{{ form.requested_duration.as_hidden }} {{ form.has_onsite_tool.as_hidden }} {% else %} - - {% comment %} The form-group class is used by session_details_form.js to identify the correct element to hide the name / purpose / type fields when not needed. This is a bootstrap class - the secr app does not use it, so this (and the hidden class, also needed by session_details_form.js) are defined in edit.html and new.html as a kludge to make this work. {% endcomment %} - - - - - - - - - - - - - {% if not hide_onsite_tool_prompt %} - - - - - {% endif %} - -
      {{ form.name.label_tag }}{{ form.name }}{{ form.purpose.errors }}
      {{ form.purpose.label_tag }} - {{ form.purpose }}
      {{ form.type }}
      - {{ form.purpose.errors }}{{ form.type.errors }} -
      {{ form.requested_duration.label_tag }}{{ form.requested_duration }}{{ form.requested_duration.errors }}
      {{ form.has_onsite_tool.label_tag }}{{ form.has_onsite_tool }}{{ form.has_onsite_tool.errors }}
      - {% if hide_onsite_tool_prompt %}{{ form.has_onsite_tool.as_hidden }}{% endif %} + +
      + {% bootstrap_field form.name layout="horizontal" %} +
      + +
      +
      + +
      {{ form.purpose }}
      +
      {{ form.type }}
      + {{ form.purpose.errors }}{{ form.type.errors }} +
      +
      + + {% bootstrap_field form.requested_duration layout="horizontal" %} + {% if not hide_onsite_tool_prompt %} + {% bootstrap_field form.has_onsite_tool layout="horizontal" %} + {% endif %} + + {% if hide_onsite_tool_prompt %} + {{ form.has_onsite_tool.as_hidden }} + {% endif %} {% endif %} + {# hidden fields included whether or not the whole form is hidden #} - {{ form.attendees.as_hidden }}{{ form.comments.as_hidden }}{{ form.id.as_hidden }}{{ form.on_agenda.as_hidden }}{{ form.DELETE.as_hidden }}{{ form.remote_instructions.as_hidden }}{{ form.short.as_hidden }}{{ form.agenda_note.as_hidden }} -
      \ No newline at end of file + {{ form.attendees.as_hidden }} + {{ form.comments.as_hidden }} + {{ form.id.as_hidden }} + {{ form.on_agenda.as_hidden }} + {{ form.DELETE.as_hidden }} + {{ form.remote_instructions.as_hidden }} + {{ form.short.as_hidden }} + {{ form.agenda_note.as_hidden }} +

    diff --git a/ietf/secr/templates/sreq/not_meeting_notification.txt b/ietf/templates/meeting/session_not_meeting_notification.txt similarity index 83% rename from ietf/secr/templates/sreq/not_meeting_notification.txt rename to ietf/templates/meeting/session_not_meeting_notification.txt index 1120f8480c..0e5c940708 100644 --- a/ietf/secr/templates/sreq/not_meeting_notification.txt +++ b/ietf/templates/meeting/session_not_meeting_notification.txt @@ -1,3 +1,4 @@ +{# Copyright The IETF Trust 2025, All Rights Reserved #} {% load ams_filters %} {{ login|smart_login }} {{ group.acronym }} working group, indicated that the {{ group.acronym }} working group does not plan to hold a session at IETF {{ meeting.number }}. diff --git a/ietf/templates/meeting/session_request_confirm.html b/ietf/templates/meeting/session_request_confirm.html new file mode 100644 index 0000000000..09043d3d0c --- /dev/null +++ b/ietf/templates/meeting/session_request_confirm.html @@ -0,0 +1,38 @@ +{# Copyright The IETF Trust 2025, All Rights Reserved #} +{% extends "base.html" %} +{% load static ietf_filters django_bootstrap5 %} +{% block title %}Confirm Session Request{% endblock %} + +{% block content %} +

    Confirm Session Request - IETF {{ meeting.number }}

    + + + +
    + +
    + + {% include "meeting/session_request_view_table.html" %} + +
    + {% csrf_token %} + {{ form }} + {{ form.session_forms.management_form }} + {% for sf in form.session_forms %} + {% include 'meeting/session_details_form.html' with form=sf hidden=True only %} + {% endfor %} + + + + +
    + +
    + +{% endblock %} + +{% block js %} + +{% endblock %} \ No newline at end of file diff --git a/ietf/templates/meeting/session_request_form.html b/ietf/templates/meeting/session_request_form.html new file mode 100644 index 0000000000..ecf5cb7268 --- /dev/null +++ b/ietf/templates/meeting/session_request_form.html @@ -0,0 +1,206 @@ +{# Copyright The IETF Trust 2025, All Rights Reserved #} +{% extends "base.html" %} +{% load static ietf_filters django_bootstrap5 %} +{% block title %}{% if is_create %}New {% else %}Edit {% endif %}Session Request{% endblock %} +{% block morecss %}{{ block.super }} + .hidden {display: none !important;} + div.form-group {display: inline;} +{% endblock %} +{% block content %} +

    {% if is_create %}New {% else %}Edit {% endif %}Session Request

    + + {% if is_create %} + + {% endif %} + +
    + +
    + {% csrf_token %} + {{ form.session_forms.management_form }} + {% if form.non_field_errors %} +
    {{ form.non_field_errors }}
    + {% endif %} + +
    + +
    + +
    +
    + +
    + +
    + +
    +
    + + {% bootstrap_field form.num_session layout="horizontal" %} + + {% if group.features.acts_like_wg %} + +
    +
    Session 1
    +
    + {% include 'meeting/session_details_form.html' with form=form.session_forms.0 hide_onsite_tool_prompt=True only %} +
    +
    + +
    +
    Session 2
    +
    + {% include 'meeting/session_details_form.html' with form=form.session_forms.1 hide_onsite_tool_prompt=True only %} +
    +
    + + {% if not is_virtual %} + {% bootstrap_field form.session_time_relation layout="horizontal" %} + {% endif %} + +
    +
    Additional Session Request
    +
    +
    + {{ form.third_session }} + +
    Additional slot may be available after agenda scheduling has closed and with the approval of an Area Director.
    +
    + +
    +
    + +
    +
    Third session request
    +
    + {% include 'meeting/session_details_form.html' with form=form.session_forms.2 hide_onsite_tool_prompt=True only %} +
    +
    + + {% else %}{# else not group.features.acts_like_wg #} + {% for session_form in form.session_forms %} +
    +
    Session {{ forloop.counter }}
    +
    + {% include 'meeting/session_details_form.html' with form=session_form only %} +
    +
    + {% endfor %} + {% endif %} + + {% bootstrap_field form.attendees layout="horizontal" %} + + {% bootstrap_field form.bethere layout="horizontal" %} + +
    +
    Conflicts to avoid
    +
    +
    +
    Other WGs that included {{ group.acronym }} in their conflict lists
    +
    {{ session_conflicts.inbound|default:"None" }}
    +
    +
    +
    WG Sessions
    You may select multiple WGs within each category
    +
    + {% for cname, cfield, cselector in form.wg_constraint_fields %} +
    +
    +
    +
    +
    + {{ cselector }} +
    +
    + +
    +
    +
    +
    + {{ cfield.errors }}{{ cfield }} +
    +
    +
    +
    + {% empty %}{# shown if there are no constraint fields #} +
    +
    No constraints are enabled for this meeting.
    + {% endfor %} +
    +
    + + {% if form.inactive_wg_constraint_count %} +
    +
    Disabled for this meeting
    +
    + {% for cname, value, field in form.inactive_wg_constraints %} +
    +
    {{ cname|title }}
    +
    +
    +
    + +
    +
    + + +
    +
    +
    +
    + {% endfor %} +
    +
    + {% endif %} + +
    +
    BOF Sessions
    +
    If the sessions can not be found in the fields above, please enter free form requests in the Special Requests field below.
    +
    +
    +
    + + {% if not is_virtual %} + + {% bootstrap_field form.resources layout="horizontal" %} + + {% bootstrap_field form.timeranges layout="horizontal" %} + + {% bootstrap_field form.adjacent_with_wg layout="horizontal" %} + +
    +
    Joint session with: (To request one session for multiple WGs together)
    +
    To request a joint session with another group, please contact the secretariat.
    +
    + + {% endif %} + + {% bootstrap_field form.comments layout="horizontal" %} + + {% if form.notifications_optional %} +
    + +
    +
    + + +
    +
    +
    + {% endif %} + + + Cancel +
    + +{% endblock %} +{% block js %} + + {{ form.media }} +{% endblock %} \ No newline at end of file diff --git a/ietf/templates/meeting/session_request_info.txt b/ietf/templates/meeting/session_request_info.txt new file mode 100644 index 0000000000..2e96efb31f --- /dev/null +++ b/ietf/templates/meeting/session_request_info.txt @@ -0,0 +1,26 @@ +{# Copyright The IETF Trust 2025, All Rights Reserved #} +{% load ams_filters %} +--------------------------------------------------------- +Working Group Name: {{ group.name }} +Area Name: {{ group.parent }} +Session Requester: {{ login }} +{% if session.joint_with_groups %}{{ session.joint_for_session_display }} joint with: {{ session.joint_with_groups }}{% endif %} + +Number of Sessions: {{ session.num_session }} +Length of Session(s): {% for session_length in session_lengths %}{{ session_length.total_seconds|display_duration }}{% if not forloop.last %}, {% endif %}{% endfor %} +Number of Attendees: {{ session.attendees }} +Conflicts to Avoid: +{% for line in session.outbound_conflicts %} {{line}} +{% endfor %}{% if session.session_time_relation_display %} {{ session.session_time_relation_display }}{% endif %} +{% if session.adjacent_with_wg %} Adjacent with WG: {{ session.adjacent_with_wg }}{% endif %} +{% if session.timeranges_display %} Can't meet: {{ session.timeranges_display|join:", " }}{% endif %} + +Participants who must be present: +{% for person in session.bethere %} {{ person.ascii_name }} +{% endfor %} +Resources Requested: +{% for resource in session.resources %} {{ resource.desc }} +{% endfor %} +Special Requests: + {{ session.comments }} +--------------------------------------------------------- diff --git a/ietf/templates/meeting/session_request_list.html b/ietf/templates/meeting/session_request_list.html new file mode 100644 index 0000000000..789b7006e5 --- /dev/null +++ b/ietf/templates/meeting/session_request_list.html @@ -0,0 +1,65 @@ +{# Copyright The IETF Trust 2025, All Rights Reserved #} +{% extends "base.html" %} +{% load static %} +{% load ietf_filters %} +{% load django_bootstrap5 %} +{% block title %}Session Requests{% endblock %} +{% block content %} +

    Session Requests IETF {{ meeting.number }}

    + +
    + Instructions + + View list of timeslot requests + {% if user|has_role:"Secretariat" %} + {% if is_locked %} + Unlock Tool + {% else %} + Lock Tool + {% endif %} + {% endif %} +
    + +
    +
    + Request New Session +
    +
    +

    The list below includes those working groups that you currently chair which do not already have a session scheduled. You can click on an acronym to complete a request for a new session at the upcoming IETF meeting. Click "Group will not meet" to send a notification that the group does not plan to meet.

    +
      + {% for group in unscheduled_groups %} +
    • + {{ group.acronym }} + {% if group.not_meeting %} + (Currently, this group does not plan to hold a session at IETF {{ meeting.number }}) + {% endif %} +
    • + {% empty %} +
    • NONE
    • + {% endfor %} +
    +
    +
    + + +
    +
    + Edit / Cancel Previously Requested Sessions +
    +
    +

    The list below includes those working groups for which you or your co-chair has requested sessions at the upcoming IETF meeting. You can click on an acronym to initiate changes to a session, or cancel a session.

    + +
    +
    + +{% endblock %} + +{% block footer-extras %} + {% include "includes/sessions_footer.html" %} +{% endblock %} \ No newline at end of file diff --git a/ietf/templates/meeting/session_request_locked.html b/ietf/templates/meeting/session_request_locked.html new file mode 100644 index 0000000000..15c023ce33 --- /dev/null +++ b/ietf/templates/meeting/session_request_locked.html @@ -0,0 +1,21 @@ +{# Copyright The IETF Trust 2025, All Rights Reserved #} +{% extends "base.html" %} +{% load static ietf_filters django_bootstrap5 %} +{% block title %}Session Request{% endblock %} + +{% block content %} +

    Session Request - IETF {{ meeting.number }}

    + + View list of timeslot requests + +
    + +
    +

    {{ message }}

    + +
    + +
    +
    + +{% endblock %} diff --git a/ietf/secr/templates/sreq/session_request_notification.txt b/ietf/templates/meeting/session_request_notification.txt similarity index 56% rename from ietf/secr/templates/sreq/session_request_notification.txt rename to ietf/templates/meeting/session_request_notification.txt index 75f2cbbae4..49dbbfc42c 100644 --- a/ietf/secr/templates/sreq/session_request_notification.txt +++ b/ietf/templates/meeting/session_request_notification.txt @@ -1,5 +1,6 @@ +{# Copyright The IETF Trust 2025, All Rights Reserved #} {% autoescape off %}{% load ams_filters %} {% filter wordwrap:78 %}{{ header }} meeting session request has just been submitted by {{ requester }}.{% endfilter %} -{% include "includes/session_info.txt" %}{% endautoescape %} +{% include "meeting/session_request_info.txt" %}{% endautoescape %} diff --git a/ietf/templates/meeting/session_request_status.html b/ietf/templates/meeting/session_request_status.html new file mode 100644 index 0000000000..65e98d6d23 --- /dev/null +++ b/ietf/templates/meeting/session_request_status.html @@ -0,0 +1,28 @@ +{# Copyright The IETF Trust 2025, All Rights Reserved #} +{% extends "base.html" %} +{% load static %} +{% load ietf_filters %} +{% load django_bootstrap5 %} +{% block title %}Session Request Status{% endblock %} +{% block content %} +

    Session Request Status

    + +
    +
    + Session Request Status +
    +
    +

    Enter the message that you would like displayed to the WG Chair when this tool is locked.

    +
    {% csrf_token %} + {% bootstrap_form form %} + {% if is_locked %} + + {% else %} + + {% endif %} + +
    +
    +
    + +{% endblock %} diff --git a/ietf/templates/meeting/session_request_view.html b/ietf/templates/meeting/session_request_view.html new file mode 100644 index 0000000000..3db16f56cb --- /dev/null +++ b/ietf/templates/meeting/session_request_view.html @@ -0,0 +1,59 @@ +{# Copyright The IETF Trust 2025, All Rights Reserved #} +{% extends "base.html" %} +{% load static ietf_filters django_bootstrap5 %} +{% block title %}Session Request{% endblock %} + +{% block content %} +

    Session Request - IETF {{ meeting.number }}

    + + + +
    + +
    + + {% include "meeting/session_request_view_table.html" %} + +
    + +

    Activities Log

    +
    + + + + + + + + + + + {% for entry in activities %} + + + + + + + {% endfor %} + +
    DateTimeActionName
    {{ entry.act_date }}{{ entry.act_time }}{{ entry.activity }}{{ entry.act_by }}
    +
    + + + + {% if show_approve_button %} + Approve Third Session + {% endif %} + + Back + +
    + +{% endblock %} + +{% block js %} + +{% endblock %} \ No newline at end of file diff --git a/ietf/templates/meeting/session_request_view_formset.html b/ietf/templates/meeting/session_request_view_formset.html new file mode 100644 index 0000000000..72811b8c2c --- /dev/null +++ b/ietf/templates/meeting/session_request_view_formset.html @@ -0,0 +1,49 @@ +{# Copyright The IETF Trust 2025, All Rights Reserved #} +{% load ams_filters %}{# keep this in sync with sessions_request_view_session_set.html #} +{% for sess_form in formset %} + {% if sess_form.cleaned_data and not sess_form.cleaned_data.DELETE %} +
    +
    + Session {{ forloop.counter }} +
    +
    +
    +
    Length
    +
    {{ sess_form.cleaned_data.requested_duration.total_seconds|display_duration }}
    +
    + {% if sess_form.cleaned_data.name %} +
    +
    Name
    +
    {{ sess_form.cleaned_data.name }}
    +
    + {% endif %} + {% if sess_form.cleaned_data.purpose.slug != 'regular' %} +
    +
    Purpose
    +
    + {{ sess_form.cleaned_data.purpose }} + {% if sess_form.cleaned_data.purpose.timeslot_types|length > 1 %}({{ sess_form.cleaned_data.type }} + ){% endif %} +
    +
    +
    +
    Onsite tool?
    +
    {{ sess_form.cleaned_data.has_onsite_tool|yesno }}
    +
    + {% endif %} +
    +
    + + {% if group.features.acts_like_wg and forloop.counter == 2 and not is_virtual %} +
    +
    + Time between sessions +
    +
    + {% if session.session_time_relation_display %}{{ session.session_time_relation_display }}{% else %}No + preference{% endif %} +
    +
    + {% endif %} + {% endif %} +{% endfor %} \ No newline at end of file diff --git a/ietf/templates/meeting/session_request_view_session_set.html b/ietf/templates/meeting/session_request_view_session_set.html new file mode 100644 index 0000000000..0b8412b04f --- /dev/null +++ b/ietf/templates/meeting/session_request_view_session_set.html @@ -0,0 +1,47 @@ +{# Copyright The IETF Trust 2025, All Rights Reserved #} +{% load ams_filters %}{# keep this in sync with sessions_request_view_formset.html #} +{% for sess in session_set %} +
    +
    + Session {{ forloop.counter }} +
    +
    +
    +
    Length
    +
    {{ sess.requested_duration.total_seconds|display_duration }}
    +
    + {% if sess.name %} +
    +
    Name
    +
    {{ sess.name }}
    +
    + {% endif %} + {% if sess.purpose.slug != 'regular' %} +
    +
    Purpose
    +
    + {{ sess.purpose }} + {% if sess.purpose.timeslot_types|length > 1 %}({{ sess.type }}){% endif %} +
    +
    +
    +
    Onsite tool?
    +
    {{ sess.has_onsite_tool|yesno }}
    +
    + {% endif %} +
    +
    + +{% if group.features.acts_like_wg and forloop.counter == 2 and not is_virtual %} +
    +
    + Time between sessions +
    +
    + {% if session.session_time_relation_display %}{{ session.session_time_relation_display }}{% else %}No + preference{% endif %} +
    +
    +{% endif %} + +{% endfor %} \ No newline at end of file diff --git a/ietf/templates/meeting/session_request_view_table.html b/ietf/templates/meeting/session_request_view_table.html new file mode 100644 index 0000000000..a5cb85c252 --- /dev/null +++ b/ietf/templates/meeting/session_request_view_table.html @@ -0,0 +1,146 @@ +{# Copyright The IETF Trust 2025, All Rights Reserved #} +{% load ams_filters %} + +
    +
    + Working Group Name +
    +
    + {{ group.name }} ({{ group.acronym }}) +
    +
    + +
    +
    + Area Name +
    +
    + {{ group.parent }} +
    +
    + +
    +
    + Number of Sessions Requested +
    +
    + {% if session.third_session %}3{% else %}{{ session.num_session }}{% endif %} +
    +
    + +{% if form %} + {% include 'meeting/session_request_view_formset.html' with formset=form.session_forms group=group session=session only %} +{% else %} + {% include 'meeting/session_request_view_session_set.html' with session_set=sessions group=group session=session only %} +{% endif %} + + +
    +
    + Number of Attendees +
    +
    + {{ session.attendees }} +
    +
    + +
    +
    + Conflicts to Avoid +
    +
    + {% if session_conflicts.outbound %} + {% for conflict in session_conflicts.outbound %} +
    +
    + {{ conflict.name|title }} +
    +
    + {{ conflict.groups }} +
    +
    + {% endfor %} + {% else %}None{% endif %} +
    +
    + +
    +
    + Other WGs that included {{ group }} in their conflict list +
    +
    + {% if session_conflicts.inbound %}{{ session_conflicts.inbound }}{% else %}None so far{% endif %} +
    +
    + +{% if not is_virtual %} +
    +
    + Resources requested +
    +
    + {% if session.resources %}
      {% for resource in session.resources %}
    • {{ resource.desc }}
    • {% endfor %}
    {% else %}None so far{% endif %} +
    +
    +{% endif %} + +
    +
    + Participants who must be present +
    +
    + {% if session.bethere %}
      {% for person in session.bethere %}
    • {{ person }}
    • {% endfor %}
    {% else %}None{% endif %} +
    +
    + +
    +
    + Can not meet on +
    +
    + {% if session.timeranges_display %}{{ session.timeranges_display|join:', ' }}{% else %}No constraints{% endif %} +
    +
    + +{% if not is_virtual %} +
    +
    + Adjacent with WG +
    +
    + {{ session.adjacent_with_wg|default:'No preference' }} +
    +
    +
    +
    + Joint session +
    +
    + {% if session.joint_with_groups %} + {{ session.joint_for_session_display }} with: {{ session.joint_with_groups }} + {% else %} + Not a joint session + {% endif %} +
    +
    +{% endif %} + +
    +
    + Special Requests +
    +
    + {{ session.comments }} +
    +
    + +{% if form and form.notifications_optional %} +
    +
    + {{ form.send_notifications.label}} +
    +
    + {% if form.cleaned_data.send_notifications %}Yes{% else %}No{% endif %} +
    +
    +{% endif %} diff --git a/package.json b/package.json index e3e89288e7..e2e6fd7dab 100644 --- a/package.json +++ b/package.json @@ -118,6 +118,7 @@ "ietf/static/js/complete-review.js", "ietf/static/js/create_timeslot.js", "ietf/static/js/create_timeslot.js", + "ietf/static/js/custom_striped.js", "ietf/static/js/d3.js", "ietf/static/js/datepicker.js", "ietf/static/js/doc-search.js", @@ -148,6 +149,8 @@ "ietf/static/js/password_strength.js", "ietf/static/js/select2.js", "ietf/static/js/session_details_form.js", + "ietf/static/js/session_form.js", + "ietf/static/js/session_request.js", "ietf/static/js/sortable.js", "ietf/static/js/stats.js", "ietf/static/js/status-change-edit-relations.js", @@ -208,8 +211,6 @@ "ietf/secr/static/images/tooltag-arrowright.webp", "ietf/secr/static/images/tooltag-arrowright_over.webp", "ietf/secr/static/js/dynamic_inlines.js", - "ietf/secr/static/js/session_form.js", - "ietf/secr/static/js/sessions.js", "ietf/secr/static/js/utils.js" ] } From 4961f376756de40ca1fe1d2db6a4ec7ff32b92a9 Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Tue, 14 Oct 2025 14:51:43 -0500 Subject: [PATCH 219/405] feat: preview ballot email before save (#9646) (#9721) * feat: skeleton for modal email preview * fix: rudimentary transfer of the comment and discuss text * fix: put. the buttons. back. * fix: transfer of the data on the current form to the modal * fix: pull cc_select and additional_cc foward in the workflow UI * fix: refactor where ballot email is sent * fix: refactor build_position_email * chore: remove abandoned imports * chore: remove abandoned template --- ietf/doc/tests_ballot.py | 103 +++----- ietf/doc/tests_draft.py | 77 +++++- ietf/doc/tests_irsg_ballot.py | 63 +++-- ietf/doc/tests_rsab_ballot.py | 51 +--- ietf/doc/urls.py | 3 +- ietf/doc/views_ballot.py | 247 ++++++++++-------- ietf/mailtrigger/forms.py | 1 + ietf/templates/doc/ballot/edit_position.html | 89 ++++++- .../doc/ballot/send_ballot_comment.html | 44 ---- 9 files changed, 393 insertions(+), 285 deletions(-) delete mode 100644 ietf/templates/doc/ballot/send_ballot_comment.html diff --git a/ietf/doc/tests_ballot.py b/ietf/doc/tests_ballot.py index 810ee598f6..8420e411e2 100644 --- a/ietf/doc/tests_ballot.py +++ b/ietf/doc/tests_ballot.py @@ -25,7 +25,6 @@ from ietf.group.models import Group, Role from ietf.group.factories import GroupFactory, RoleFactory, ReviewTeamFactory from ietf.ipr.factories import HolderIprDisclosureFactory -from ietf.name.models import BallotPositionName from ietf.iesg.models import TelechatDate from ietf.person.models import Person from ietf.person.factories import PersonFactory, PersonalApiKeyFactory @@ -37,9 +36,18 @@ class EditPositionTests(TestCase): + + # N.B. This test needs to be rewritten to exercise all types of ballots (iesg, irsg, rsab) + # and test against the output of the mailtriggers instead of looking for hardcoded values + # in the To and CC results. See #7864 def test_edit_position(self): ad = Person.objects.get(user__username="ad") - draft = IndividualDraftFactory(ad=ad,stream_id='ietf') + draft = WgDraftFactory( + ad=ad, + stream_id="ietf", + notify="somebody@example.com", + group__acronym="mars", + ) ballot = create_ballot_if_not_open(None, draft, ad, 'approve') url = urlreverse('ietf.doc.views_ballot.edit_position', kwargs=dict(name=draft.name, ballot_id=ballot.pk)) @@ -55,11 +63,20 @@ def test_edit_position(self): self.assertEqual(len(q('form textarea[name=comment]')), 1) # vote + empty_outbox() events_before = draft.docevent_set.count() - - r = self.client.post(url, dict(position="discuss", - discuss=" This is a discussion test. \n ", - comment=" This is a test. \n ")) + + r = self.client.post( + url, + dict( + position="discuss", + discuss=" This is a discussion test. \n ", + comment=" This is a test. \n ", + additional_cc="test298347@example.com", + cc_choices=["doc_notify", "doc_group_chairs"], + send_mail=1, + ), + ) self.assertEqual(r.status_code, 302) pos = draft.latest_event(BallotPositionDocEvent, balloter=ad) @@ -70,6 +87,22 @@ def test_edit_position(self): self.assertTrue(pos.comment_time != None) self.assertTrue("New position" in pos.desc) self.assertEqual(draft.docevent_set.count(), events_before + 3) + self.assertEqual(len(outbox),1) + m = outbox[0] + self.assertTrue("COMMENT" in m['Subject']) + self.assertTrue("DISCUSS" in m['Subject']) + self.assertTrue(draft.name in m['Subject']) + self.assertTrue("This is a discussion test." in str(m)) + self.assertTrue("This is a test" in str(m)) + self.assertTrue("iesg@" in m['To']) + # cc_choice doc_group_chairs + self.assertTrue("mars-chairs@" in m['Cc']) + # cc_choice doc_notify + self.assertTrue("somebody@example.com" in m['Cc']) + # cc_choice doc_group_email_list was not selected + self.assertFalse(draft.group.list_email in m['Cc']) + # extra-cc + self.assertTrue("test298347@example.com" in m['Cc']) # recast vote events_before = draft.docevent_set.count() @@ -230,64 +263,6 @@ def test_cannot_edit_position_as_pre_ad(self): r = self.client.post(url, dict(position="discuss", discuss="Test discuss text")) self.assertEqual(r.status_code, 403) - # N.B. This test needs to be rewritten to exercise all types of ballots (iesg, irsg, rsab) - # and test against the output of the mailtriggers instead of looking for hardcoded values - # in the To and CC results. See #7864 - def test_send_ballot_comment(self): - ad = Person.objects.get(user__username="ad") - draft = WgDraftFactory(ad=ad,group__acronym='mars') - draft.notify = "somebody@example.com" - draft.save_with_history([DocEvent.objects.create(doc=draft, rev=draft.rev, type="changed_document", by=Person.objects.get(user__username="secretary"), desc="Test")]) - - ballot = create_ballot_if_not_open(None, draft, ad, 'approve') - - BallotPositionDocEvent.objects.create( - doc=draft, rev=draft.rev, type="changed_ballot_position", - by=ad, balloter=ad, ballot=ballot, pos=BallotPositionName.objects.get(slug="discuss"), - discuss="This draft seems to be lacking a clearer title?", - discuss_time=timezone.now(), - comment="Test!", - comment_time=timezone.now()) - - url = urlreverse('ietf.doc.views_ballot.send_ballot_comment', kwargs=dict(name=draft.name, - ballot_id=ballot.pk)) - login_testing_unauthorized(self, "ad", url) - - # normal get - r = self.client.get(url) - self.assertEqual(r.status_code, 200) - q = PyQuery(r.content) - self.assertTrue(len(q('form input[name="extra_cc"]')) > 0) - - # send - mailbox_before = len(outbox) - - r = self.client.post(url, dict(extra_cc="test298347@example.com", cc_choices=['doc_notify','doc_group_chairs'])) - self.assertEqual(r.status_code, 302) - - self.assertEqual(len(outbox), mailbox_before + 1) - m = outbox[-1] - self.assertTrue("COMMENT" in m['Subject']) - self.assertTrue("DISCUSS" in m['Subject']) - self.assertTrue(draft.name in m['Subject']) - self.assertTrue("clearer title" in str(m)) - self.assertTrue("Test!" in str(m)) - self.assertTrue("iesg@" in m['To']) - # cc_choice doc_group_chairs - self.assertTrue("mars-chairs@" in m['Cc']) - # cc_choice doc_notify - self.assertTrue("somebody@example.com" in m['Cc']) - # cc_choice doc_group_email_list was not selected - self.assertFalse(draft.group.list_email in m['Cc']) - # extra-cc - self.assertTrue("test298347@example.com" in m['Cc']) - - r = self.client.post(url, dict(cc="")) - self.assertEqual(r.status_code, 302) - self.assertEqual(len(outbox), mailbox_before + 2) - m = outbox[-1] - self.assertTrue("iesg@" in m['To']) - self.assertFalse(m['Cc'] and draft.group.list_email in m['Cc']) class BallotWriteupsTests(TestCase): diff --git a/ietf/doc/tests_draft.py b/ietf/doc/tests_draft.py index ab33acebe6..4d262c5a2f 100644 --- a/ietf/doc/tests_draft.py +++ b/ietf/doc/tests_draft.py @@ -2,6 +2,7 @@ # -*- coding: utf-8 -*- +import json import os import datetime import io @@ -11,7 +12,7 @@ from pathlib import Path from pyquery import PyQuery -from django.db.models import Q +from django.db.models import Max, Q from django.urls import reverse as urlreverse from django.conf import settings from django.utils import timezone @@ -2391,3 +2392,77 @@ def test_editorial_metadata(self): top_level_metadata_headings = q("tbody>tr>th:first-child").text() self.assertNotIn("IESG", top_level_metadata_headings) self.assertNotIn("IANA", top_level_metadata_headings) + +class BallotEmailAjaxTests(TestCase): + def test_ajax_build_position_email(self): + def _post_json(self, url, json_to_post): + r = self.client.post( + url, json.dumps(json_to_post), content_type="application/json" + ) + self.assertEqual(r.status_code, 200) + return json.loads(r.content) + + doc = WgDraftFactory() + ad = RoleFactory( + name_id="ad", group=doc.group, person__name="Some Areadirector" + ).person + url = urlreverse("ietf.doc.views_ballot.ajax_build_position_email") + login_testing_unauthorized(self, "secretary", url) + r = self.client.get(url) + self.assertEqual(r.status_code, 405) + response = _post_json(self, url, {}) + self.assertFalse(response["success"]) + self.assertEqual(response["errors"], ["post_data not provided"]) + response = _post_json(self, url, {"dictis": "not empty"}) + self.assertFalse(response["success"]) + self.assertEqual(response["errors"], ["post_data not provided"]) + response = _post_json(self, url, {"post_data": {}}) + self.assertFalse(response["success"]) + self.assertEqual(len(response["errors"]), 7) + response = _post_json( + self, + url, + { + "post_data": { + "discuss": "aaaaaa", + "comment": "bbbbbb", + "position": "discuss", + "balloter": Person.objects.aggregate(maxpk=Max("pk") + 1)["maxpk"], + "docname": "this-draft-does-not-exist", + "cc_choices": ["doc_group_mail_list"], + "additional_cc": "foo@example.com", + } + }, + ) + self.assertFalse(response["success"]) + self.assertEqual( + response["errors"], + ["No person found matching balloter", "No document found matching docname"], + ) + response = _post_json( + self, + url, + { + "post_data": { + "discuss": "aaaaaa", + "comment": "bbbbbb", + "position": "discuss", + "balloter": ad.pk, + "docname": doc.name, + "cc_choices": ["doc_group_mail_list"], + "additional_cc": "foo@example.com", + } + }, + ) + self.assertTrue(response["success"]) + for snippet in [ + "aaaaaa", + "bbbbbb", + "DISCUSS", + ad.plain_name(), + doc.name, + doc.group.list_email, + "foo@example.com", + ]: + self.assertIn(snippet, response["text"]) + diff --git a/ietf/doc/tests_irsg_ballot.py b/ietf/doc/tests_irsg_ballot.py index aa62d8aaf9..d96cf9dbef 100644 --- a/ietf/doc/tests_irsg_ballot.py +++ b/ietf/doc/tests_irsg_ballot.py @@ -355,28 +355,35 @@ def test_issue_ballot(self): def test_take_and_email_position(self): draft = RgDraftFactory() ballot = IRSGBallotDocEventFactory(doc=draft) - url = urlreverse('ietf.doc.views_ballot.edit_position', kwargs=dict(name=draft.name, ballot_id=ballot.pk)) + self.balloter + url = ( + urlreverse( + "ietf.doc.views_ballot.edit_position", + kwargs=dict(name=draft.name, ballot_id=ballot.pk), + ) + + self.balloter + ) empty_outbox() login_testing_unauthorized(self, self.username, url) r = self.client.get(url) self.assertEqual(r.status_code, 200) - r = self.client.post(url, dict(position='yes', comment='oib239sb', send_mail='Save and send email')) + empty_outbox() + r = self.client.post( + url, + dict( + position="yes", + comment="oib239sb", + send_mail="Save and send email", + cc_choices=["doc_authors", "doc_group_chairs", "doc_group_mail_list"], + ), + ) self.assertEqual(r.status_code, 302) e = draft.latest_event(BallotPositionDocEvent) - self.assertEqual(e.pos.slug,'yes') - self.assertEqual(e.comment, 'oib239sb') - - url = urlreverse('ietf.doc.views_ballot.send_ballot_comment', kwargs=dict(name=draft.name, ballot_id=ballot.pk)) + self.balloter - - r = self.client.get(url) - self.assertEqual(r.status_code, 200) - - r = self.client.post(url, dict(cc_choices=['doc_authors','doc_group_chairs','doc_group_mail_list'], body="Stuff")) - self.assertEqual(r.status_code, 302) - self.assertEqual(len(outbox),1) - self.assertNotIn('discuss-criteria', get_payload_text(outbox[0])) + self.assertEqual(e.pos.slug, "yes") + self.assertEqual(e.comment, "oib239sb") + self.assertEqual(len(outbox), 1) + self.assertNotIn("discuss-criteria", get_payload_text(outbox[0])) def test_close_ballot(self): draft = RgDraftFactory() @@ -482,27 +489,31 @@ def test_cant_take_position_on_iesg_ballot(self): def test_take_and_email_position(self): draft = RgDraftFactory() ballot = IRSGBallotDocEventFactory(doc=draft) - url = urlreverse('ietf.doc.views_ballot.edit_position', kwargs=dict(name=draft.name, ballot_id=ballot.pk)) + url = urlreverse( + "ietf.doc.views_ballot.edit_position", + kwargs=dict(name=draft.name, ballot_id=ballot.pk), + ) empty_outbox() login_testing_unauthorized(self, self.username, url) r = self.client.get(url) self.assertEqual(r.status_code, 200) - r = self.client.post(url, dict(position='yes', comment='oib239sb', send_mail='Save and send email')) + r = self.client.post( + url, + dict( + position="yes", + comment="oib239sb", + send_mail="Save and send email", + cc_choices=["doc_authors", "doc_group_chairs", "doc_group_mail_list"], + ), + ) self.assertEqual(r.status_code, 302) e = draft.latest_event(BallotPositionDocEvent) - self.assertEqual(e.pos.slug,'yes') - self.assertEqual(e.comment, 'oib239sb') - - url = urlreverse('ietf.doc.views_ballot.send_ballot_comment', kwargs=dict(name=draft.name, ballot_id=ballot.pk)) - - r = self.client.get(url) - self.assertEqual(r.status_code, 200) - - r = self.client.post(url, dict(cc_choices=['doc_authors','doc_group_chairs','doc_group_mail_list'], body="Stuff")) + self.assertEqual(e.pos.slug, "yes") + self.assertEqual(e.comment, "oib239sb") self.assertEqual(r.status_code, 302) - self.assertEqual(len(outbox),1) + self.assertEqual(len(outbox), 1) class IESGMemberTests(TestCase): diff --git a/ietf/doc/tests_rsab_ballot.py b/ietf/doc/tests_rsab_ballot.py index 028f548232..9086106ba9 100644 --- a/ietf/doc/tests_rsab_ballot.py +++ b/ietf/doc/tests_rsab_ballot.py @@ -333,34 +333,19 @@ def test_take_and_email_position(self): r = self.client.get(url) self.assertEqual(r.status_code, 200) - r = self.client.post( - url, - dict(position="yes", comment="oib239sb", send_mail="Save and send email"), - ) - self.assertEqual(r.status_code, 302) - e = draft.latest_event(BallotPositionDocEvent) - self.assertEqual(e.pos.slug, "yes") - self.assertEqual(e.comment, "oib239sb") - - url = ( - urlreverse( - "ietf.doc.views_ballot.send_ballot_comment", - kwargs=dict(name=draft.name, ballot_id=ballot.pk), - ) - + self.balloter - ) - - r = self.client.get(url) - self.assertEqual(r.status_code, 200) - r = self.client.post( url, dict( + position="yes", + comment="oib239sb", + send_mail="Save and send email", cc_choices=["doc_authors", "doc_group_chairs", "doc_group_mail_list"], - body="Stuff", ), ) self.assertEqual(r.status_code, 302) + e = draft.latest_event(BallotPositionDocEvent) + self.assertEqual(e.pos.slug, "yes") + self.assertEqual(e.comment, "oib239sb") self.assertEqual(len(outbox), 1) self.assertNotIn("discuss-criteria", get_payload_text(outbox[0])) @@ -532,31 +517,19 @@ def test_take_and_email_position(self): r = self.client.get(url) self.assertEqual(r.status_code, 200) - r = self.client.post( - url, - dict(position="yes", comment="oib239sb", send_mail="Save and send email"), - ) - self.assertEqual(r.status_code, 302) - e = draft.latest_event(BallotPositionDocEvent) - self.assertEqual(e.pos.slug, "yes") - self.assertEqual(e.comment, "oib239sb") - - url = urlreverse( - "ietf.doc.views_ballot.send_ballot_comment", - kwargs=dict(name=draft.name, ballot_id=ballot.pk), - ) - - r = self.client.get(url) - self.assertEqual(r.status_code, 200) - r = self.client.post( url, dict( + position="yes", + comment="oib239sb", + send_mail="Save and send email", cc_choices=["doc_authors", "doc_group_chairs", "doc_group_mail_list"], - body="Stuff", ), ) self.assertEqual(r.status_code, 302) + e = draft.latest_event(BallotPositionDocEvent) + self.assertEqual(e.pos.slug, "yes") + self.assertEqual(e.comment, "oib239sb") self.assertEqual(len(outbox), 1) diff --git a/ietf/doc/urls.py b/ietf/doc/urls.py index 7b444782d7..8e9c0569e2 100644 --- a/ietf/doc/urls.py +++ b/ietf/doc/urls.py @@ -93,6 +93,8 @@ url(r'^ballots/irsg/$', views_ballot.irsg_ballot_status), url(r'^ballots/rsab/$', views_ballot.rsab_ballot_status), + url(r'^build-position-email/$', views_ballot.ajax_build_position_email), + url(r'^(?P(bcp|std|fyi))/?$', views_search.index_subseries), url(r'^%(name)s(?:/%(rev)s)?/$' % settings.URL_REGEXPS, views_doc.document_main), @@ -111,7 +113,6 @@ url(r'^%(name)s/ballot/rsab/$' % settings.URL_REGEXPS, views_doc.document_rsab_ballot), url(r'^%(name)s/ballot/(?P[0-9]+)/$' % settings.URL_REGEXPS, views_doc.document_ballot), url(r'^%(name)s/ballot/(?P[0-9]+)/position/$' % settings.URL_REGEXPS, views_ballot.edit_position), - url(r'^%(name)s/ballot/(?P[0-9]+)/emailposition/$' % settings.URL_REGEXPS, views_ballot.send_ballot_comment), url(r'^%(name)s/(?:%(rev)s/)?doc.json$' % settings.URL_REGEXPS, views_doc.document_json), url(r'^%(name)s/ballotpopup/(?P[0-9]+)/$' % settings.URL_REGEXPS, views_doc.ballot_popup), url(r'^(?P[A-Za-z0-9._+-]+)/reviewrequest/', include("ietf.doc.urls_review")), diff --git a/ietf/doc/views_ballot.py b/ietf/doc/views_ballot.py index 0ba340890d..03cf01a4a1 100644 --- a/ietf/doc/views_ballot.py +++ b/ietf/doc/views_ballot.py @@ -4,18 +4,18 @@ # Directors and Secretariat -import datetime, json +import datetime +import json from django import forms from django.conf import settings -from django.http import HttpResponse, HttpResponseRedirect, Http404, HttpResponseBadRequest +from django.http import HttpResponse, HttpResponseNotAllowed, HttpResponseRedirect, Http404, HttpResponseBadRequest from django.shortcuts import render, get_object_or_404, redirect from django.template.defaultfilters import striptags from django.template.loader import render_to_string from django.urls import reverse as urlreverse from django.views.decorators.csrf import csrf_exempt from django.utils.html import escape -from urllib.parse import urlencode as urllib_urlencode import debug # pyflakes:ignore @@ -34,14 +34,15 @@ from ietf.doc.templatetags.ietf_filters import can_ballot from ietf.iesg.models import TelechatDate from ietf.ietfauth.utils import has_role, role_required, is_authorized_in_doc_stream +from ietf.mailtrigger.models import Recipient from ietf.mailtrigger.utils import gather_address_lists from ietf.mailtrigger.forms import CcSelectForm from ietf.message.utils import infer_message from ietf.name.models import BallotPositionName, DocTypeName from ietf.person.models import Person -from ietf.utils.fields import ModelMultipleChoiceField +from ietf.utils.fields import ModelMultipleChoiceField, MultiEmailField from ietf.utils.http import validate_return_to_path -from ietf.utils.mail import send_mail_text, send_mail_preformatted +from ietf.utils.mail import decode_header_value, send_mail_text, send_mail_preformatted from ietf.utils.decorators import require_api_key from ietf.utils.response import permission_denied from ietf.utils.timezone import date_today, datetime_from_date, DEADLINE_TZINFO @@ -179,6 +180,9 @@ def save_position(form, doc, ballot, balloter, login=None, send_email=False): return pos +class AdditionalCCForm(forms.Form): + additional_cc = MultiEmailField(required=False) + @role_required("Area Director", "Secretariat", "IRSG Member", "RSAB Member") def edit_position(request, name, ballot_id): """Vote and edit discuss and comment on document""" @@ -199,50 +203,67 @@ def edit_position(request, name, ballot_id): raise Http404 balloter = get_object_or_404(Person, pk=balloter_id) + if doc.stream_id == 'irtf': + mailtrigger_slug='irsg_ballot_saved' + elif doc.stream_id == 'editorial': + mailtrigger_slug='rsab_ballot_saved' + else: + mailtrigger_slug='iesg_ballot_saved' + if request.method == 'POST': old_pos = None if not has_role(request.user, "Secretariat") and not can_ballot(request.user, doc): # prevent pre-ADs from taking a position permission_denied(request, "Must be an active member (not a pre-AD for example) of the balloting body to take a position") + if request.POST.get("Defer") and doc.stream.slug != "irtf": + return redirect('ietf.doc.views_ballot.defer_ballot', name=doc) + elif request.POST.get("Undefer") and doc.stream.slug != "irtf": + return redirect('ietf.doc.views_ballot.undefer_ballot', name=doc) + form = EditPositionForm(request.POST, ballot_type=ballot.ballot_type) - if form.is_valid(): + cc_select_form = CcSelectForm(data=request.POST,mailtrigger_slug=mailtrigger_slug,mailtrigger_context={'doc':doc}) + additional_cc_form = AdditionalCCForm(request.POST) + if form.is_valid() and cc_select_form.is_valid() and additional_cc_form.is_valid(): send_mail = True if request.POST.get("send_mail") else False - save_position(form, doc, ballot, balloter, login, send_mail) - + pos = save_position(form, doc, ballot, balloter, login, send_mail) if send_mail: - query = {} - if request.GET.get('balloter'): - query['balloter'] = request.GET.get('balloter') - if request.GET.get('ballot_edit_return_point'): - query['ballot_edit_return_point'] = request.GET.get('ballot_edit_return_point') - qstr = "" - if len(query) > 0: - qstr = "?" + urllib_urlencode(query, safe='/') - return HttpResponseRedirect(urlreverse('ietf.doc.views_ballot.send_ballot_comment', kwargs=dict(name=doc.name, ballot_id=ballot_id)) + qstr) - elif request.POST.get("Defer") and doc.stream.slug != "irtf": - return redirect('ietf.doc.views_ballot.defer_ballot', name=doc) - elif request.POST.get("Undefer") and doc.stream.slug != "irtf": - return redirect('ietf.doc.views_ballot.undefer_ballot', name=doc) - else: - return HttpResponseRedirect(return_to_url) + addrs, frm, subject, body = build_position_email(balloter, doc, pos) + if doc.stream_id == 'irtf': + mailtrigger_slug='irsg_ballot_saved' + elif doc.stream_id == 'editorial': + mailtrigger_slug='rsab_ballot_saved' + else: + mailtrigger_slug='iesg_ballot_saved' + cc = [] + cc.extend(cc_select_form.get_selected_addresses()) + extra_cc = additional_cc_form.cleaned_data["additional_cc"] + if extra_cc: + cc.extend(extra_cc) + cc_set = set(cc) + cc_set.discard("") + cc = sorted(list(cc_set)) + send_mail_text(request, addrs.to, frm, subject, body, cc=", ".join(cc)) + return redirect(return_to_url) else: initial = {} old_pos = doc.latest_event(BallotPositionDocEvent, type="changed_ballot_position", balloter=balloter, ballot=ballot) if old_pos: initial['position'] = old_pos.pos.slug initial['discuss'] = old_pos.discuss - initial['comment'] = old_pos.comment - + initial['comment'] = old_pos.comment form = EditPositionForm(initial=initial, ballot_type=ballot.ballot_type) + cc_select_form = CcSelectForm(mailtrigger_slug=mailtrigger_slug,mailtrigger_context={'doc':doc}) + additional_cc_form = AdditionalCCForm() blocking_positions = dict((p.pk, p.name) for p in form.fields["position"].queryset.all() if p.blocking) - ballot_deferred = doc.active_defer_event() return render(request, 'doc/ballot/edit_position.html', dict(doc=doc, form=form, + cc_select_form=cc_select_form, + additional_cc_form=additional_cc_form, balloter=balloter, return_to_url=return_to_url, old_pos=old_pos, @@ -301,21 +322,98 @@ def err(code, text): ) -def build_position_email(balloter, doc, pos): +@role_required("Area Director", "Secretariat") +@csrf_exempt +def ajax_build_position_email(request): + if request.method != "POST": + return HttpResponseNotAllowed(["POST"]) + errors = list() + try: + json_body = json.loads(request.body) + except json.decoder.JSONDecodeError: + errors.append("Post body is not valid json") + if len(errors) == 0: + post_data = json_body.get("post_data") + if post_data is None: + errors.append("post_data not provided") + else: + for key in [ + "discuss", + "comment", + "position", + "balloter", + "docname", + "cc_choices", + "additional_cc", + ]: + if key not in post_data: + errors.append(f"{key} not found in post_data") + if len(errors) == 0: + person = Person.objects.filter(pk=post_data.get("balloter")).first() + if person is None: + errors.append("No person found matching balloter") + doc = Document.objects.filter(name=post_data.get("docname")).first() + if doc is None: + errors.append("No document found matching docname") + if len(errors) > 0: + response = { + "success": False, + "errors": errors, + } + else: + wanted = dict() # consider named tuple instead + wanted["discuss"] = post_data.get("discuss") + wanted["comment"] = post_data.get("comment") + wanted["position_name"] = post_data.get("position") + wanted["balloter"] = person + wanted["doc"] = doc + addrs, frm, subject, body = build_position_email_from_dict(wanted) + + recipient_slugs = post_data.get("cc_choices") + # Consider refactoring gather_address_lists so this isn't duplicated from there + cc_addrs = set() + for r in Recipient.objects.filter(slug__in=recipient_slugs): + cc_addrs.update(r.gather(doc=doc)) + additional_cc = post_data.get("additional_cc") + for addr in additional_cc.split(","): + cc_addrs.add(addr.strip()) + cc_addrs.discard("") + cc_addrs = sorted(list(cc_addrs)) + + response_text = "\n".join( + [ + f"From: {decode_header_value(frm)}", + f"To: {', '.join([decode_header_value(addr) for addr in addrs.to])}", + f"Cc: {', '.join([decode_header_value(addr) for addr in cc_addrs])}", + f"Subject: {subject}", + "", + body, + ] + ) + + response = { + "success": True, + "text": response_text, + } + return HttpResponse(json.dumps(response), content_type="application/json") + +def build_position_email_from_dict(pos_dict): + doc = pos_dict["doc"] subj = [] d = "" blocking_name = "DISCUSS" - if pos.pos.blocking and pos.discuss: - d = pos.discuss - blocking_name = pos.pos.name.upper() + pos_name = BallotPositionName.objects.filter(slug=pos_dict["position_name"]).first() + if pos_name.blocking and pos_dict.get("discuss"): + d = pos_dict.get("discuss") + blocking_name = pos_name.name.upper() subj.append(blocking_name) c = "" - if pos.comment: - c = pos.comment + if pos_dict.get("comment"): + c = pos_dict.get("comment") subj.append("COMMENT") - + balloter = pos_dict.get("balloter") balloter_name_genitive = balloter.plain_name() + "'" if balloter.plain_name().endswith('s') else balloter.plain_name() + "'s" - subject = "%s %s on %s" % (balloter_name_genitive, pos.pos.name if pos.pos else "No Position", doc.name + "-" + doc.rev) + subject = "%s %s on %s" % (balloter_name_genitive, pos_name.name if pos_name else "No Position", doc.name + "-" + doc.rev) if subj: subject += ": (with %s)" % " and ".join(subj) @@ -324,7 +422,7 @@ def build_position_email(balloter, doc, pos): comment=c, balloter=balloter.plain_name(), doc=doc, - pos=pos.pos, + pos=pos_name, blocking_name=blocking_name, settings=settings)) frm = balloter.role_email("ad").formatted_email() @@ -338,79 +436,16 @@ def build_position_email(balloter, doc, pos): return addrs, frm, subject, body -@role_required('Area Director','Secretariat','IRSG Member', 'RSAB Member') -def send_ballot_comment(request, name, ballot_id): - """Email document ballot position discuss/comment for Area Director.""" - doc = get_object_or_404(Document, name=name) - ballot = get_object_or_404(BallotDocEvent, type="created_ballot", pk=ballot_id, doc=doc) - if not has_role(request.user, 'Secretariat'): - if any([ - doc.stream_id == 'ietf' and not has_role(request.user, 'Area Director'), - doc.stream_id == 'irtf' and not has_role(request.user, 'IRSG Member'), - doc.stream_id == 'editorial' and not has_role(request.user, 'RSAB Member'), - ]): - raise Http404 - - balloter = request.user.person - - try: - return_to_url = parse_ballot_edit_return_point(request.GET.get('ballot_edit_return_point'), doc.name, ballot_id) - except ValueError: - return HttpResponseBadRequest('ballot_edit_return_point is invalid') - - if 'HTTP_REFERER' in request.META: - back_url = request.META['HTTP_REFERER'] - else: - back_url = urlreverse("ietf.doc.views_doc.document_ballot", kwargs=dict(name=doc.name, ballot_id=ballot_id)) - - # if we're in the Secretariat, we can select a balloter (such as an AD) to act as stand-in for - if has_role(request.user, "Secretariat"): - balloter_id = request.GET.get('balloter') - if not balloter_id: - raise Http404 - balloter = get_object_or_404(Person, pk=balloter_id) - - pos = doc.latest_event(BallotPositionDocEvent, type="changed_ballot_position", balloter=balloter, ballot=ballot) - if not pos: - raise Http404 - - addrs, frm, subject, body = build_position_email(balloter, doc, pos) - - if doc.stream_id == 'irtf': - mailtrigger_slug='irsg_ballot_saved' - elif doc.stream_id == 'editorial': - mailtrigger_slug='rsab_ballot_saved' - else: - mailtrigger_slug='iesg_ballot_saved' - - if request.method == 'POST': - cc = [] - cc_select_form = CcSelectForm(data=request.POST,mailtrigger_slug=mailtrigger_slug,mailtrigger_context={'doc':doc}) - if cc_select_form.is_valid(): - cc.extend(cc_select_form.get_selected_addresses()) - extra_cc = [x.strip() for x in request.POST.get("extra_cc","").split(',') if x.strip()] - if extra_cc: - cc.extend(extra_cc) - - send_mail_text(request, addrs.to, frm, subject, body, cc=", ".join(cc)) - - return HttpResponseRedirect(return_to_url) - - else: +def build_position_email(balloter, doc, pos): - cc_select_form = CcSelectForm(mailtrigger_slug=mailtrigger_slug,mailtrigger_context={'doc':doc}) - - return render(request, 'doc/ballot/send_ballot_comment.html', - dict(doc=doc, - subject=subject, - body=body, - frm=frm, - to=addrs.as_strings().to, - balloter=balloter, - back_url=back_url, - cc_select_form = cc_select_form, - )) + pos_dict=dict() + pos_dict["doc"]=doc + pos_dict["position_name"]=pos.pos.slug + pos_dict["discuss"]=pos.discuss + pos_dict["comment"]=pos.comment + pos_dict["balloter"]=balloter + return build_position_email_from_dict(pos_dict) @role_required('Area Director','Secretariat') def clear_ballot(request, name, ballot_type_slug): diff --git a/ietf/mailtrigger/forms.py b/ietf/mailtrigger/forms.py index 366c429d8c..8d13c5edf3 100644 --- a/ietf/mailtrigger/forms.py +++ b/ietf/mailtrigger/forms.py @@ -11,6 +11,7 @@ class CcSelectForm(forms.Form): expansions = dict() # type: Dict[str, List[str]] cc_choices = forms.MultipleChoiceField( + required=False, label='Cc', choices=[], widget=forms.CheckboxSelectMultiple(), diff --git a/ietf/templates/doc/ballot/edit_position.html b/ietf/templates/doc/ballot/edit_position.html index 293c186112..b57e9a3652 100644 --- a/ietf/templates/doc/ballot/edit_position.html +++ b/ietf/templates/doc/ballot/edit_position.html @@ -20,24 +20,48 @@

    Ballot deferred by {{ ballot_deferred.by }} on {{ ballot_deferred.time|date:"Y-m-d" }}.

    {% endif %} +
    +
    + {% if form.errors or cc_select_form.errors or additional_cc_form.errors %} +
    + There were errors in the submitted form -- see below. Please correct these and resubmit. +
    + {% if form.errors %} +
    Position entry
    + {% bootstrap_form_errors form %} + {% endif %} + {% if cc_select_form.errors %} +
    CC selection
    + {% bootstrap_form_errors cc_select_form %} + {% endif %} + {% if additional_cc_form.errors %} +
    Additional Cc Addresses
    + {% bootstrap_form_errors additional_cc_form %} + {% endif %} + {% endif %}
    {% csrf_token %} {% for field in form %} {% if field.name == "discuss" %}
    {% endif %} {% bootstrap_field field %} {% if field.name == "discuss" and old_pos and old_pos.discuss_time %} -
    Last edited {{ old_pos.discuss_time }}
    +
    Last saved {{ old_pos.discuss_time }}
    {% elif field.name == "comment" and old_pos and old_pos.comment_time %} -
    Last edited {{ old_pos.comment_time }}
    +
    Last saved {{ old_pos.comment_time }}
    {% endif %} {% if field.name == "discuss" %}
    {% endif %} {% endfor %} + {% bootstrap_form cc_select_form %} + {% bootstrap_form additional_cc_form %}
    + - + {% if doc.type_id == "draft" or doc.type_id == "conflrev" %} {% if doc.stream.slug != "irtf" %} {% if ballot_deferred %} @@ -58,7 +82,24 @@

    Back

    -
    + + + {% endblock %} {% block js %} + + {% endblock %} \ No newline at end of file diff --git a/ietf/templates/doc/ballot/send_ballot_comment.html b/ietf/templates/doc/ballot/send_ballot_comment.html deleted file mode 100644 index 1c5f521859..0000000000 --- a/ietf/templates/doc/ballot/send_ballot_comment.html +++ /dev/null @@ -1,44 +0,0 @@ -{% extends "base.html" %} -{# Copyright The IETF Trust 2015, All Rights Reserved #} -{% load origin %} -{% load django_bootstrap5 %} -{% load ietf_filters %} -{% block title %}Send ballot position for {{ balloter }} on {{ doc }}{% endblock %} -{% block content %} - {% origin %} -

    - Send ballot position for {{ balloter }} -
    - {{ doc }} -

    -
    - {% csrf_token %} -
    - - -
    -
    - - -
    - {% bootstrap_form cc_select_form %} -
    - - -
    Separate email addresses with commas.
    -
    -
    - - -
    -
    -

    Body

    -
    {{ body|maybewordwrap }}
    -
    - - - Back - -
    -{% endblock %} From 8f2feef631acbd8b181a845140647c2c83a9299f Mon Sep 17 00:00:00 2001 From: NGPixel Date: Tue, 14 Oct 2025 18:57:50 -0400 Subject: [PATCH 220/405] ci: update build workflow to deploy to dev --- .github/workflows/build.yml | 71 ++--- dev/k8s-get-deploy-name/.editorconfig | 7 + dev/k8s-get-deploy-name/.gitignore | 1 + dev/k8s-get-deploy-name/.npmrc | 3 + dev/k8s-get-deploy-name/README.md | 16 ++ dev/k8s-get-deploy-name/cli.js | 22 ++ dev/k8s-get-deploy-name/package-lock.json | 303 ++++++++++++++++++++++ dev/k8s-get-deploy-name/package.json | 8 + 8 files changed, 396 insertions(+), 35 deletions(-) create mode 100644 dev/k8s-get-deploy-name/.editorconfig create mode 100644 dev/k8s-get-deploy-name/.gitignore create mode 100644 dev/k8s-get-deploy-name/.npmrc create mode 100644 dev/k8s-get-deploy-name/README.md create mode 100644 dev/k8s-get-deploy-name/cli.js create mode 100644 dev/k8s-get-deploy-name/package-lock.json create mode 100644 dev/k8s-get-deploy-name/package.json diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 8567446cae..15eaba48d1 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -16,13 +16,13 @@ on: - Skip - Staging Only - Staging + Prod - sandbox: - description: 'Deploy to Sandbox' + dev: + description: 'Deploy to Dev' default: true required: true type: boolean - sandboxNoDbRefresh: - description: 'Sandbox Disable Daily DB Refresh' + devNoDbRefresh: + description: 'Dev Disable Daily DB Refresh' default: false required: true type: boolean @@ -392,44 +392,45 @@ jobs: value: "Failed" # ----------------------------------------------------------------- - # SANDBOX + # DEV # ----------------------------------------------------------------- - sandbox: - name: Deploy to Sandbox - if: ${{ !failure() && !cancelled() && github.event.inputs.sandbox == 'true' }} + dev: + name: Deploy to Dev + if: ${{ !failure() && !cancelled() && github.event.inputs.dev == 'true' }} needs: [prepare, release] - runs-on: [self-hosted, dev-server] + runs-on: ubuntu-latest environment: - name: sandbox + name: dev env: PKG_VERSION: ${{needs.prepare.outputs.pkg_version}} steps: - - uses: actions/checkout@v4 - - - name: Download a Release Artifact - uses: actions/download-artifact@v4.3.0 - with: - name: release-${{ env.PKG_VERSION }} - - - name: Deploy to containers - env: - DEBIAN_FRONTEND: noninteractive - run: | - echo "Reset production flags in settings.py..." - sed -i -r -e 's/^DEBUG *= *.*$/DEBUG = True/' -e "s/^SERVER_MODE *= *.*\$/SERVER_MODE = 'development'/" ietf/settings.py - echo "Install Deploy to Container CLI dependencies..." - cd dev/deploy-to-container - npm ci - cd ../.. - echo "Start Deploy..." - node ./dev/deploy-to-container/cli.js --branch ${{ github.ref_name }} --domain dev.ietf.org --appversion ${{ env.PKG_VERSION }} --commit ${{ github.sha }} --ghrunid ${{ github.run_id }} --nodbrefresh ${{ github.event.inputs.sandboxNoDbRefresh }} - - - name: Cleanup old docker resources - env: - DEBIAN_FRONTEND: noninteractive - run: | - docker image prune -a -f + - uses: actions/checkout@v4 + with: + ref: main + + - name: Get Deploy Name + env: + DEBIAN_FRONTEND: noninteractive + run: | + echo "Install Get Deploy Name CLI dependencies..." + cd dev/k8s-get-deploy-name + npm ci + echo "Get Deploy Name..." + echo "DEPLOY_NAMESPACE=$(node cli.js --branch ${{ github.ref_name }})" >> "$GITHUB_ENV" + + - name: Deploy to dev + uses: the-actions-org/workflow-dispatch@v4 + with: + workflow: deploy-dev.yml + repo: ietf-tools/infra-k8s + ref: main + token: ${{ secrets.GH_INFRA_K8S_TOKEN }} + inputs: '{ "app":"datatracker", "appVersion":"${{ env.PKG_VERSION }}", "remoteRef":"${{ github.sha }}", "namespace":"${{ env.DEPLOY_NAMESPACE }}" }' + wait-for-completion: true + wait-for-completion-timeout: 30m + wait-for-completion-interval: 30s + display-workflow-run-url: false # ----------------------------------------------------------------- # STAGING diff --git a/dev/k8s-get-deploy-name/.editorconfig b/dev/k8s-get-deploy-name/.editorconfig new file mode 100644 index 0000000000..fec5c66519 --- /dev/null +++ b/dev/k8s-get-deploy-name/.editorconfig @@ -0,0 +1,7 @@ +[*] +indent_size = 2 +indent_style = space +charset = utf-8 +trim_trailing_whitespace = false +end_of_line = lf +insert_final_newline = true diff --git a/dev/k8s-get-deploy-name/.gitignore b/dev/k8s-get-deploy-name/.gitignore new file mode 100644 index 0000000000..07e6e472cc --- /dev/null +++ b/dev/k8s-get-deploy-name/.gitignore @@ -0,0 +1 @@ +/node_modules diff --git a/dev/k8s-get-deploy-name/.npmrc b/dev/k8s-get-deploy-name/.npmrc new file mode 100644 index 0000000000..580a68c499 --- /dev/null +++ b/dev/k8s-get-deploy-name/.npmrc @@ -0,0 +1,3 @@ +audit = false +fund = false +save-exact = true diff --git a/dev/k8s-get-deploy-name/README.md b/dev/k8s-get-deploy-name/README.md new file mode 100644 index 0000000000..a6605e4dd2 --- /dev/null +++ b/dev/k8s-get-deploy-name/README.md @@ -0,0 +1,16 @@ +# Datatracker Get Deploy Name + +This tool process and slugify a git branch into an appropriate subdomain name. + +## Usage + +1. From the `dev/k8s-get-deploy-name` directory, install the dependencies: +```sh +npm install +``` +2. Run the command: (replacing the `branch` argument) +```sh +node /cli.js --branch feat/fooBar-123 +``` + +The subdomain name will be output. It can then be used in a workflow as a namespace name and subdomain value. diff --git a/dev/k8s-get-deploy-name/cli.js b/dev/k8s-get-deploy-name/cli.js new file mode 100644 index 0000000000..b6c3b5119e --- /dev/null +++ b/dev/k8s-get-deploy-name/cli.js @@ -0,0 +1,22 @@ +#!/usr/bin/env node + +import yargs from 'yargs/yargs' +import { hideBin } from 'yargs/helpers' +import slugify from 'slugify' + +const argv = yargs(hideBin(process.argv)).argv + +let branch = argv.branch +if (!branch) { + throw new Error('Missing --branch argument!') +} +if (branch.indexOf('/') >= 0) { + branch = branch.split('/').slice(1).join('-') +} +branch = slugify(branch, { lower: true, strict: true }) +if (branch.length < 1) { + throw new Error('Branch name is empty!') +} +process.stdout.write(`dt-${branch}`) + +process.exit(0) diff --git a/dev/k8s-get-deploy-name/package-lock.json b/dev/k8s-get-deploy-name/package-lock.json new file mode 100644 index 0000000000..e492a4cd38 --- /dev/null +++ b/dev/k8s-get-deploy-name/package-lock.json @@ -0,0 +1,303 @@ +{ + "name": "k8s-get-deploy-name", + "lockfileVersion": 2, + "requires": true, + "packages": { + "": { + "name": "k8s-get-deploy-name", + "dependencies": { + "slugify": "1.6.6", + "yargs": "17.7.2" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + }, + "node_modules/escalade": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "engines": { + "node": ">=6" + } + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "engines": { + "node": ">=8" + } + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/slugify": { + "version": "1.6.6", + "resolved": "https://registry.npmjs.org/slugify/-/slugify-1.6.6.tgz", + "integrity": "sha512-h+z7HKHYXj6wJU+AnS/+IH8Uh9fdcX1Lrhg1/VMdf9PwoBQXFcXiAdsy2tSK0P6gKwJLXp02r90ahUCqHk9rrw==", + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "engines": { + "node": ">=10" + } + }, + "node_modules/yargs": { + "version": "17.7.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", + "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", + "dependencies": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "engines": { + "node": ">=12" + } + } + }, + "dependencies": { + "ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" + }, + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "requires": { + "color-convert": "^2.0.1" + } + }, + "cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "requires": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + }, + "emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + }, + "escalade": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==" + }, + "get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==" + }, + "is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==" + }, + "require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==" + }, + "slugify": { + "version": "1.6.6", + "resolved": "https://registry.npmjs.org/slugify/-/slugify-1.6.6.tgz", + "integrity": "sha512-h+z7HKHYXj6wJU+AnS/+IH8Uh9fdcX1Lrhg1/VMdf9PwoBQXFcXiAdsy2tSK0P6gKwJLXp02r90ahUCqHk9rrw==" + }, + "string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + } + }, + "strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "requires": { + "ansi-regex": "^5.0.1" + } + }, + "wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "requires": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + } + }, + "y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==" + }, + "yargs": { + "version": "17.7.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", + "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", + "requires": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + } + }, + "yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==" + } + } +} diff --git a/dev/k8s-get-deploy-name/package.json b/dev/k8s-get-deploy-name/package.json new file mode 100644 index 0000000000..849f5d9b8d --- /dev/null +++ b/dev/k8s-get-deploy-name/package.json @@ -0,0 +1,8 @@ +{ + "name": "k8s-get-deploy-name", + "type": "module", + "dependencies": { + "slugify": "1.6.6", + "yargs": "17.7.2" + } +} From 5a7be260dd6dfd9c484bc7c50ef991642fa8ad8e Mon Sep 17 00:00:00 2001 From: NGPixel Date: Wed, 15 Oct 2025 03:07:07 -0400 Subject: [PATCH 221/405] chore: add disableDailyDbRefresh flag to build workflow --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 15eaba48d1..4c70456a73 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -426,7 +426,7 @@ jobs: repo: ietf-tools/infra-k8s ref: main token: ${{ secrets.GH_INFRA_K8S_TOKEN }} - inputs: '{ "app":"datatracker", "appVersion":"${{ env.PKG_VERSION }}", "remoteRef":"${{ github.sha }}", "namespace":"${{ env.DEPLOY_NAMESPACE }}" }' + inputs: '{ "app":"datatracker", "appVersion":"${{ env.PKG_VERSION }}", "remoteRef":"${{ github.sha }}", "namespace":"${{ env.DEPLOY_NAMESPACE }}", "disableDailyDbRefresh":${{ inputs.devNoDbRefresh }} }' wait-for-completion: true wait-for-completion-timeout: 30m wait-for-completion-interval: 30s From 93c1124c21267556625df760c68f35f6d4ae8139 Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Wed, 15 Oct 2025 13:06:45 -0500 Subject: [PATCH 222/405] ci: add ruff to devcontainer (#9731) --- .devcontainer/devcontainer.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 6b0fd79bb3..bf28550084 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -23,7 +23,6 @@ "dbaeumer.vscode-eslint", "eamodio.gitlens", "editorconfig.editorconfig", - // Newer volar >=3.0.0 causes crashes in devcontainers "vue.volar@2.2.10", "mrmlnc.vscode-duplicate", "ms-azuretools.vscode-docker", @@ -35,7 +34,8 @@ "redhat.vscode-yaml", "spmeesseman.vscode-taskexplorer", "visualstudioexptteam.vscodeintellicode", - "ms-python.pylint" + "ms-python.pylint", + "charliermarsh.ruff" ], "settings": { "terminal.integrated.defaultProfile.linux": "zsh", From d5660ab8e953fec25dbb20025aba73b2e58f0609 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Wed, 15 Oct 2025 18:30:18 -0300 Subject: [PATCH 223/405] fix: unbreak EmptyAwareJSONField (#9732) * fix: specify default form_class correctly * style: ruff ruff --- ietf/utils/db.py | 63 ++++++++++++++++++++++++++++++------------------ 1 file changed, 40 insertions(+), 23 deletions(-) diff --git a/ietf/utils/db.py b/ietf/utils/db.py index 865c9b999f..49c89da13a 100644 --- a/ietf/utils/db.py +++ b/ietf/utils/db.py @@ -1,33 +1,44 @@ -# Copyright The IETF Trust 2021, All Rights Reserved -# -*- coding: utf-8 -*- - -# Taken from/inspired by -# https://stackoverflow.com/questions/55147169/django-admin-jsonfield-default-empty-dict-wont-save-in-admin -# -# JSONField should recognize {}, (), and [] as valid, non-empty JSON -# values. However, the base Field class excludes them +# Copyright The IETF Trust 2021-2025, All Rights Reserved import jsonfield from django.db import models -from ietf.utils.fields import IETFJSONField as FormIETFJSONField, EmptyAwareJSONField as FormEmptyAwareJSONField +from ietf.utils.fields import ( + IETFJSONField as FormIETFJSONField, + EmptyAwareJSONField as FormEmptyAwareJSONField, +) class EmptyAwareJSONField(models.JSONField): - form_class = FormEmptyAwareJSONField + """JSONField that allows empty JSON values when model specifies empty=False + + Taken from/inspired by + https://stackoverflow.com/questions/55147169/django-admin-jsonfield-default-empty-dict-wont-save-in-admin + + JSONField should recognize {}, (), and [] as valid, non-empty JSON values. - def __init__(self, *args, empty_values=FormEmptyAwareJSONField.empty_values, accepted_empty_values=None, **kwargs): + If customizing the formfield, the field must accept the `empty_values` argument. + """ + + def __init__( + self, + *args, + empty_values=FormEmptyAwareJSONField.empty_values, + accepted_empty_values=None, + **kwargs, + ): if accepted_empty_values is None: accepted_empty_values = [] - self.empty_values = [x - for x in empty_values - if x not in accepted_empty_values] + self.empty_values = [x for x in empty_values if x not in accepted_empty_values] super().__init__(*args, **kwargs) def formfield(self, **kwargs): - if 'form_class' not in kwargs or issubclass(kwargs['form_class'], FormEmptyAwareJSONField): - kwargs.setdefault('empty_values', self.empty_values) - return super().formfield(**{**kwargs}) + defaults = { + "form_class": FormEmptyAwareJSONField, + "empty_values": self.empty_values, + } + defaults.update(kwargs) + return super().formfield(**defaults) class IETFJSONField(jsonfield.JSONField): # pragma: no cover @@ -36,15 +47,21 @@ class IETFJSONField(jsonfield.JSONField): # pragma: no cover # Remove this class when migrations are squashed and it is no longer referenced form_class = FormIETFJSONField - def __init__(self, *args, empty_values=FormIETFJSONField.empty_values, accepted_empty_values=None, **kwargs): + def __init__( + self, + *args, + empty_values=FormIETFJSONField.empty_values, + accepted_empty_values=None, + **kwargs, + ): if accepted_empty_values is None: accepted_empty_values = [] - self.empty_values = [x - for x in empty_values - if x not in accepted_empty_values] + self.empty_values = [x for x in empty_values if x not in accepted_empty_values] super().__init__(*args, **kwargs) def formfield(self, **kwargs): - if 'form_class' not in kwargs or issubclass(kwargs['form_class'], FormIETFJSONField): - kwargs.setdefault('empty_values', self.empty_values) + if "form_class" not in kwargs or issubclass( + kwargs["form_class"], FormIETFJSONField + ): + kwargs.setdefault("empty_values", self.empty_values) return super().formfield(**{**kwargs}) From 1d2d304fa5c99db6cd2a944328246ce900c73b3c Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Thu, 16 Oct 2025 12:39:04 -0300 Subject: [PATCH 224/405] fix: improve proceedings caching/performance (#9733) * refactor: speed up get_attendance() * fix: avoid cache invalidation by later draft rev * fix: guard against empty value * feat: freeze cache key for final proceedings --- ietf/meeting/models.py | 28 +++++++++++++++++++++------- ietf/meeting/utils.py | 39 +++++++++++++++++++++++++++++++++++---- 2 files changed, 56 insertions(+), 11 deletions(-) diff --git a/ietf/meeting/models.py b/ietf/meeting/models.py index f3df23e916..9e44df33b7 100644 --- a/ietf/meeting/models.py +++ b/ietf/meeting/models.py @@ -250,25 +250,39 @@ def get_attendance(self): # MeetingRegistration.attended started conflating badge-pickup and session attendance before IETF 114. # We've separated session attendance off to ietf.meeting.Attended, but need to report attendance at older # meetings correctly. - + # + # Looking up by registration and attendance records separately and joining in + # python is far faster than combining the Q objects in the query (~100x). + # Further optimization may be possible, but the queries are tricky... attended_per_meeting_registration = ( Q(registration__meeting=self) & ( Q(registration__attended=True) | Q(registration__checkedin=True) ) ) + attendees_by_reg = set( + Person.objects.filter(attended_per_meeting_registration).values_list( + "pk", flat=True + ) + ) + attended_per_meeting_attended = ( Q(attended__session__meeting=self) # Note that we are not filtering to plenary, wg, or rg sessions # as we do for nomcom eligibility - if picking up a badge (see above) # is good enough, just attending e.g. a training session is also good enough ) - attended = Person.objects.filter( - attended_per_meeting_registration | attended_per_meeting_attended - ).distinct() - - onsite = set(attended.filter(registration__meeting=self, registration__tickets__attendance_type__slug='onsite')) - remote = set(attended.filter(registration__meeting=self, registration__tickets__attendance_type__slug='remote')) + attendees_by_att = set( + Person.objects.filter(attended_per_meeting_attended).values_list( + "pk", flat=True + ) + ) + + attendees = Person.objects.filter( + pk__in=attendees_by_att | attendees_by_reg + ) + onsite = set(attendees.filter(registration__meeting=self, registration__tickets__attendance_type__slug='onsite')) + remote = set(attendees.filter(registration__meeting=self, registration__tickets__attendance_type__slug='remote')) remote.difference_update(onsite) return Attendance( diff --git a/ietf/meeting/utils.py b/ietf/meeting/utils.py index f6925269aa..feadb0c7fd 100644 --- a/ietf/meeting/utils.py +++ b/ietf/meeting/utils.py @@ -1027,10 +1027,41 @@ def generate_proceedings_content(meeting, force_refresh=False): :force_refresh: true to force regeneration and cache refresh """ cache = caches["default"] - cache_version = Document.objects.filter(session__meeting__number=meeting.number).aggregate(Max('time'))["time__max"] - # Include proceedings_final in the bare_key so we'll always reflect that accurately, even at the cost of - # a recomputation in the view - bare_key = f"proceedings.{meeting.number}.{cache_version}.final={meeting.proceedings_final}" + key_components = [ + "proceedings", + str(meeting.number), + ] + if meeting.proceedings_final: + # Freeze the cache key once proceedings are finalized. Further changes will + # not be picked up until the cache expires or is refreshed by the + # proceedings_content_refresh_task() + key_components.append("final") + else: + # Build a cache key that changes when materials are modified. For all but drafts, + # use the last modification time of the document. Exclude drafts from this because + # revisions long after the meeting ends will otherwise show up as changes and + # incorrectly invalidate the cache. Instead, include an ordered list of the + # drafts linked to the meeting so adding or removing drafts will trigger a + # recalculation. The list is long but that doesn't matter because we hash it into + # a fixed-length key. + meeting_docs = Document.objects.filter(session__meeting__number=meeting.number) + last_materials_update = ( + meeting_docs.exclude(type_id="draft") + .filter(session__meeting__number=meeting.number) + .aggregate(Max("time"))["time__max"] + ) + draft_names = ( + meeting_docs + .filter(type_id="draft") + .order_by("name") + .values_list("name", flat=True) + ) + key_components += [ + last_materials_update.isoformat() if last_materials_update else "-", + ",".join(draft_names), + ] + + bare_key = ".".join(key_components) cache_key = sha384(bare_key.encode("utf8")).hexdigest() if not force_refresh: cached_content = cache.get(cache_key, None) From 2cfbaf90c3504a53135d61f9bf976bab3b388eb9 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Thu, 16 Oct 2025 14:28:13 -0300 Subject: [PATCH 225/405] ci: drop caching from build images step (#9738) --- .github/workflows/build.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 4c70456a73..7eac7b1c64 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -278,8 +278,6 @@ jobs: tags: | ghcr.io/ietf-tools/datatracker:${{ env.PKG_VERSION }} ${{ env.FEATURE_LATEST_TAG && format('ghcr.io/ietf-tools/datatracker:{0}-latest', env.FEATURE_LATEST_TAG) || null }} - cache-from: type=gha - cache-to: type=gha,mode=max - name: Update CHANGELOG id: changelog From b0ec8c4b27d6225c6ffa6cac27ce554ec4a49a7c Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Fri, 17 Oct 2025 13:08:11 -0300 Subject: [PATCH 226/405] chore: remove unused variables (#9742) --- ietf/meeting/models.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/ietf/meeting/models.py b/ietf/meeting/models.py index 9e44df33b7..7d9e318aab 100644 --- a/ietf/meeting/models.py +++ b/ietf/meeting/models.py @@ -956,8 +956,6 @@ class Meta: def __str__(self): return u"%s -> %s-%s" % (self.session, self.document.name, self.rev) -constraint_cache_uses = 0 -constraint_cache_initials = 0 class SessionQuerySet(models.QuerySet): def with_current_status(self): From 62f720ceaf951fba91b5a818473d798663dfbf1d Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Tue, 21 Oct 2025 12:31:39 -0300 Subject: [PATCH 227/405] ci: imagePullPolicy for migration container (#9764) --- k8s/datatracker.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/k8s/datatracker.yaml b/k8s/datatracker.yaml index 3d9e86a29d..50a2c69687 100644 --- a/k8s/datatracker.yaml +++ b/k8s/datatracker.yaml @@ -115,6 +115,7 @@ spec: initContainers: - name: migration image: "ghcr.io/ietf-tools/datatracker:$APP_IMAGE_TAG" + imagePullPolicy: Always env: - name: "CONTAINER_ROLE" value: "migrations" From a3a3d215ca4067e722ead94e886175adb589e235 Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Thu, 23 Oct 2025 12:14:48 -0500 Subject: [PATCH 228/405] fix: don't limit from_contact for incoming liaison statements (#9773) --- ietf/liaisons/forms.py | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/ietf/liaisons/forms.py b/ietf/liaisons/forms.py index ef5b29535e..1747e55571 100644 --- a/ietf/liaisons/forms.py +++ b/ietf/liaisons/forms.py @@ -495,14 +495,18 @@ def set_from_fields(self): self.fields['from_groups'].initial = qs # Note that the IAB chair currently doesn't get to work with incoming liaison statements - if not ( - has_role(self.user, "Secretariat") - or has_role(self.user, "Liaison Coordinator") - ): - self.fields["from_contact"].initial = ( - self.person.role_set.filter(group=qs[0]).first().email.formatted_email() - ) - self.fields["from_contact"].widget.attrs["disabled"] = True + + # Removing this block at the request of the IAB - as a workaround until the new liaison tool is + # create, anyone with access to the form can set any from_contact value + # + # if not ( + # has_role(self.user, "Secretariat") + # or has_role(self.user, "Liaison Coordinator") + # ): + # self.fields["from_contact"].initial = ( + # self.person.role_set.filter(group=qs[0]).first().email.formatted_email() + # ) + # self.fields["from_contact"].widget.attrs["disabled"] = True def set_to_fields(self): '''Set to_groups and to_contacts options and initial value based on user From 1243957f06da485e5cf4c04a8479d551817d4d78 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Thu, 23 Oct 2025 14:15:22 -0300 Subject: [PATCH 229/405] feat: unversioned proceedings cache (#9779) * feat: separate, unversioned proceedings cache * refactor: don't double-hash the cache key --- ietf/meeting/utils.py | 8 ++++---- ietf/settings.py | 22 ++++++++++++++++++++++ 2 files changed, 26 insertions(+), 4 deletions(-) diff --git a/ietf/meeting/utils.py b/ietf/meeting/utils.py index feadb0c7fd..afcf7656f2 100644 --- a/ietf/meeting/utils.py +++ b/ietf/meeting/utils.py @@ -5,7 +5,6 @@ import jsonschema import os import requests -from hashlib import sha384 import pytz import subprocess @@ -1026,7 +1025,7 @@ def generate_proceedings_content(meeting, force_refresh=False): :meeting: meeting whose proceedings should be rendered :force_refresh: true to force regeneration and cache refresh """ - cache = caches["default"] + cache = caches["proceedings"] key_components = [ "proceedings", str(meeting.number), @@ -1061,8 +1060,9 @@ def generate_proceedings_content(meeting, force_refresh=False): ",".join(draft_names), ] - bare_key = ".".join(key_components) - cache_key = sha384(bare_key.encode("utf8")).hexdigest() + # Key is potentially long, but the "proceedings" cache hashes it to a fixed + # length. If that changes, hash it separately here first. + cache_key = ".".join(key_components) if not force_refresh: cached_content = cache.get(cache_key, None) if cached_content is not None: diff --git a/ietf/settings.py b/ietf/settings.py index 9a213c1a73..5e576430ed 100644 --- a/ietf/settings.py +++ b/ietf/settings.py @@ -1374,6 +1374,17 @@ def skip_unreadable_post(record): "LOCATION": f"{MEMCACHED_HOST}:{MEMCACHED_PORT}", "VERSION": __version__, "KEY_PREFIX": "ietf:dt", + # Key function is default except with sha384-encoded key + "KEY_FUNCTION": lambda key, key_prefix, version: ( + f"{key_prefix}:{version}:{sha384(str(key).encode('utf8')).hexdigest()}" + ), + }, + "proceedings": { + "BACKEND": "ietf.utils.cache.LenientMemcacheCache", + "LOCATION": f"{MEMCACHED_HOST}:{MEMCACHED_PORT}", + # No release-specific VERSION setting. + "KEY_PREFIX": "ietf:dt:proceedings", + # Key function is default except with sha384-encoded key "KEY_FUNCTION": lambda key, key_prefix, version: ( f"{key_prefix}:{version}:{sha384(str(key).encode('utf8')).hexdigest()}" ), @@ -1421,6 +1432,17 @@ def skip_unreadable_post(record): "VERSION": __version__, "KEY_PREFIX": "ietf:dt", }, + "proceedings": { + "BACKEND": "django.core.cache.backends.dummy.DummyCache", + # "BACKEND": "ietf.utils.cache.LenientMemcacheCache", + # "LOCATION": "127.0.0.1:11211", + # No release-specific VERSION setting. + "KEY_PREFIX": "ietf:dt:proceedings", + # Key function is default except with sha384-encoded key + "KEY_FUNCTION": lambda key, key_prefix, version: ( + f"{key_prefix}:{version}:{sha384(str(key).encode('utf8')).hexdigest()}" + ), + }, "sessions": { "BACKEND": "django.core.cache.backends.locmem.LocMemCache", }, From 6412d1e24a9c499c39245bba58c2c31ec8110c0e Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Thu, 23 Oct 2025 17:41:06 -0300 Subject: [PATCH 230/405] feat: blobstore-driven meeting materials (#9780) * feat: meeting materials blob resolver API (#9700) * refactor: exclude_deleted() for StoredObject queryset * chore: comment * feat: meeting materials blob resolver API * feat: materials blob retrieval API (#9728) * feat: materials blob retrieval API (WIP) * refactor: alphabetize ARTIFACT_STORAGE_NAMES * chore: limit buckets served * refactor: any-meeting option in _get_materials_doc() * feat: create missing blobs on retrieval * feat: render HTML from markdown via API (#9729) * chore: add comment * fix: allow bluesheets to be retrieved Normally not retrieved through /meeting/materials, but they're close enough in purpose that we might as well make them available. * fix: only update StoredObject.modified if changed * fix: preserve mtime when creating blob * refactor: better exception name * feat: render .md.html from .md blob * fix: explicit STATIC_IETF_ORG value in template Django's context_processors are not applied to render_string calls as we use them here, so settings are not available. * fix: typo * fix: decode utf-8 properly * feat: use filesystem to render .md.html * fix: copy/paste error in api_resolve_materials_name * refactor: get actual rev in _get_materials_doc (#9741) * fix: return filename, not full path * feat: precompute blob lookups for meeting materials (#9746) * feat: ResolvedMaterial model + migration * feat: method to populate ResolvedMaterial (WIP) * refactor: don't delete ResolvedMaterials Instead of deleting the ResolvedMaterials for a meeting, which might lose updates made during processing, update existing rows with any changes and warn if anything changed during the process. * fix: fix _get_materials_doc() Did not handle the possibility of multiple DocHistory objects with the same rev. * refactor: factor out material lookup helper * feat: resolve blobs via blobdb/fs for cache * chore: add resource * feat: admin for ResolvedMaterial * feat: cache-driven resolve materials API * fix: add all ResolvedMaterials; var names * fix: handle null case * feat: resolve_meeting_materials_task * feat: update resolver cache on material upload (#9759) * feat: robustness + date range for resolve materials task (#9760) * fix: limit types added to ResolvedMaterial * feat: resolve meeting materials in order by date * feat: add meetings_until param * fix: log&continue if resolving fails on a meeting * feat: log error message on parse errors * refactor: move ResolvedMaterial to blobdb app (#9762) * refactor: move ResolvedMaterial to blobdb app * fix: undo accidental removal * chore: fix lint (#9767) * fix: don't use DocHistory to find materials (#9771) * fix: don't use DocHistory to validate revs The DocHistory records are incomplete and, in particular, -00 revs are often missing. * Revert "refactor: get actual rev in _get_materials_doc (#9741)" This reverts commit 7fd15801 * chore: remove the on-demand resolver api * chore: fix lint * feat: populate materials buckets (#9777) * refactor: drop .txt from filename_with_rev() * feat: utilities to populate materials blobs * feat: store materials for a full meeting as blobs Plus a bunch of fixup from working with real data. (Based on meetings 71, 83, and 118, picked arbitrarily) * chore: update migration * feat: task to store materials in blobdb * refactor: reimplement api_retrieve_materials_blob * fix: update resolving task, fix bugs * Revert "refactor: drop .txt from filename_with_rev()" This reverts commit a849d0f92d4df54296a7062b6c3a05fb0977be93. * chore: fix lint --------- Co-authored-by: Robert Sparks --- ietf/api/urls.py | 3 + ietf/blobdb/admin.py | 11 +- .../migrations/0002_resolvedmaterial.py | 48 +++ ietf/blobdb/models.py | 20 + ietf/doc/models.py | 9 + ietf/doc/storage.py | 10 +- ietf/doc/storage_utils.py | 12 +- ietf/doc/views_material.py | 4 + ietf/meeting/resources.py | 14 +- ietf/meeting/tasks.py | 131 ++++++- ietf/meeting/utils.py | 355 +++++++++++++++++- ietf/meeting/views.py | 177 ++++++++- ietf/settings.py | 44 ++- ietf/templates/minimal.html | 4 +- 14 files changed, 798 insertions(+), 44 deletions(-) create mode 100644 ietf/blobdb/migrations/0002_resolvedmaterial.py diff --git a/ietf/api/urls.py b/ietf/api/urls.py index 6f2efb3c1e..04575b34cb 100644 --- a/ietf/api/urls.py +++ b/ietf/api/urls.py @@ -49,6 +49,9 @@ url(r'^group/role-holder-addresses/$', api_views.role_holder_addresses), # Let IESG members set positions programmatically url(r'^iesg/position', views_ballot.api_set_position), + # Find the blob to store for a given materials document path + url(r'^meeting/(?:(?P(?:interim-)?[a-z0-9-]+)/)?materials/%(document)s(?P\.[A-Za-z0-9]+)?/resolve-cached/$' % settings.URL_REGEXPS, meeting_views.api_resolve_materials_name_cached), + url(r'^meeting/blob/(?P[a-z0-9-]+)/(?P[a-z][a-z0-9.-]+)$', meeting_views.api_retrieve_materials_blob), # Let Meetecho set session video URLs url(r'^meeting/session/video/url$', meeting_views.api_set_session_video_url), # Let Meetecho tell us the name of its recordings diff --git a/ietf/blobdb/admin.py b/ietf/blobdb/admin.py index f4cd002e07..3e1a2a311f 100644 --- a/ietf/blobdb/admin.py +++ b/ietf/blobdb/admin.py @@ -3,7 +3,7 @@ from django.db.models.functions import Length from rangefilter.filters import DateRangeQuickSelectListFilterBuilder -from .models import Blob +from .models import Blob, ResolvedMaterial @admin.register(Blob) @@ -29,3 +29,12 @@ def get_queryset(self, request): def object_size(self, instance): """Get the size of the object""" return instance.object_size # annotation added in get_queryset() + + +@admin.register(ResolvedMaterial) +class ResolvedMaterialAdmin(admin.ModelAdmin): + model = ResolvedMaterial + list_display = ["name", "meeting_number", "bucket", "blob"] + list_filter = ["meeting_number", "bucket"] + search_fields = ["name", "blob"] + ordering = ["name"] diff --git a/ietf/blobdb/migrations/0002_resolvedmaterial.py b/ietf/blobdb/migrations/0002_resolvedmaterial.py new file mode 100644 index 0000000000..e0ab405b11 --- /dev/null +++ b/ietf/blobdb/migrations/0002_resolvedmaterial.py @@ -0,0 +1,48 @@ +# Copyright The IETF Trust 2025, All Rights Reserved + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("blobdb", "0001_initial"), + ] + + operations = [ + migrations.CreateModel( + name="ResolvedMaterial", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("name", models.CharField(help_text="Name to resolve", max_length=300)), + ( + "meeting_number", + models.CharField( + help_text="Meeting material is related to", max_length=64 + ), + ), + ( + "bucket", + models.CharField(help_text="Resolved bucket name", max_length=255), + ), + ( + "blob", + models.CharField(help_text="Resolved blob name", max_length=300), + ), + ], + ), + migrations.AddConstraint( + model_name="resolvedmaterial", + constraint=models.UniqueConstraint( + fields=("name", "meeting_number"), name="unique_name_per_meeting" + ), + ), + ] diff --git a/ietf/blobdb/models.py b/ietf/blobdb/models.py index 8f423d9f6c..fa7831f203 100644 --- a/ietf/blobdb/models.py +++ b/ietf/blobdb/models.py @@ -96,3 +96,23 @@ def _emit_blob_change_event(self, using=None): ), using=using, ) + + +class ResolvedMaterial(models.Model): + # A Document name can be 255 characters; allow this name to be a bit longer + name = models.CharField(max_length=300, help_text="Name to resolve") + meeting_number = models.CharField( + max_length=64, help_text="Meeting material is related to" + ) + bucket = models.CharField(max_length=255, help_text="Resolved bucket name") + blob = models.CharField(max_length=300, help_text="Resolved blob name") + + class Meta: + constraints = [ + models.UniqueConstraint( + fields=["name", "meeting_number"], name="unique_name_per_meeting" + ) + ] + + def __str__(self): + return f"{self.name}@{self.meeting_number} -> {self.bucket}:{self.blob}" diff --git a/ietf/doc/models.py b/ietf/doc/models.py index 25ee734cbe..8bb79b64ed 100644 --- a/ietf/doc/models.py +++ b/ietf/doc/models.py @@ -913,6 +913,7 @@ def role_for_doc(self): roles.append('Action Holder') return ', '.join(roles) +# N.B., at least a couple dozen documents exist that do not satisfy this validator validate_docname = RegexValidator( r'^[-a-z0-9]+$', "Provide a valid document name consisting of lowercase letters, numbers and hyphens.", @@ -1588,9 +1589,17 @@ class BofreqResponsibleDocEvent(DocEvent): """ Capture the responsible leadership (IAB and IESG members) for a BOF Request """ responsible = models.ManyToManyField('person.Person', blank=True) + +class StoredObjectQuerySet(models.QuerySet): + def exclude_deleted(self): + return self.filter(deleted__isnull=True) + + class StoredObject(models.Model): """Hold metadata about objects placed in object storage""" + objects = StoredObjectQuerySet.as_manager() + store = models.CharField(max_length=256) name = models.CharField(max_length=1024, null=False, blank=False) # N.B. the 1024 limit on name comes from S3 sha384 = models.CharField(max_length=96) diff --git a/ietf/doc/storage.py b/ietf/doc/storage.py index a234ef2d4f..375620ccaf 100644 --- a/ietf/doc/storage.py +++ b/ietf/doc/storage.py @@ -32,7 +32,7 @@ def __init__(self, file, name, mtime=None, content_type="", store=None, doc_name @classmethod def from_storedobject(cls, file, name, store): """Alternate constructor for objects that already exist in the StoredObject table""" - stored_object = StoredObject.objects.filter(store=store, name=name, deleted__isnull=True).first() + stored_object = StoredObject.objects.exclude_deleted().filter(store=store, name=name).first() if stored_object is None: raise FileNotFoundError(f"StoredObject for {store}:{name} does not exist or was deleted") file = cls(file, name, store, doc_name=stored_object.doc_name, doc_rev=stored_object.doc_rev) @@ -140,7 +140,11 @@ def _save_stored_object(self, name, content) -> StoredObject: ), ), ) - if not created: + if not created and ( + record.sha384 != content.custom_metadata["sha384"] + or record.len != int(content.custom_metadata["len"]) + or record.deleted is not None + ): record.sha384 = content.custom_metadata["sha384"] record.len = int(content.custom_metadata["len"]) record.modified = now @@ -160,7 +164,7 @@ def _delete_stored_object(self, name) -> Optional[StoredObject]: else: now = timezone.now() # Note that existing_record is a queryset that will have one matching object - existing_record.filter(deleted__isnull=True).update(deleted=now) + existing_record.exclude_deleted().update(deleted=now) return existing_record.first() def _save(self, name, content): diff --git a/ietf/doc/storage_utils.py b/ietf/doc/storage_utils.py index 510c98c4f5..81588c83ec 100644 --- a/ietf/doc/storage_utils.py +++ b/ietf/doc/storage_utils.py @@ -12,6 +12,14 @@ from ietf.utils.log import log +class StorageUtilsError(Exception): + pass + + +class AlreadyExistsError(StorageUtilsError): + pass + + def _get_storage(kind: str) -> Storage: if kind in settings.ARTIFACT_STORAGE_NAMES: return storages[kind] @@ -70,7 +78,7 @@ def store_file( # debug.show('f"Asked to store {name} in {kind}: is_new={is_new}, allow_overwrite={allow_overwrite}"') if not allow_overwrite and not is_new: debug.show('f"Failed to save {kind}:{name} - name already exists in store"') - raise RuntimeError(f"Failed to save {kind}:{name} - name already exists in store") + raise AlreadyExistsError(f"Failed to save {kind}:{name} - name already exists in store") new_name = _get_storage(kind).save( name, StoredObjectFile( @@ -85,7 +93,7 @@ def store_file( if new_name != name: complaint = f"Error encountered saving '{name}' - results stored in '{new_name}' instead." debug.show("complaint") - raise RuntimeError(complaint) + raise StorageUtilsError(complaint) except Exception as err: log(f"Blobstore Error: Failed to store file {kind}:{name}: {repr(err)}") if settings.SERVER_MODE == "development": diff --git a/ietf/doc/views_material.py b/ietf/doc/views_material.py index 6f8b8a8f12..eefac0ca61 100644 --- a/ietf/doc/views_material.py +++ b/ietf/doc/views_material.py @@ -22,6 +22,7 @@ from ietf.doc.utils import add_state_change_event, check_common_doc_name_rules from ietf.group.models import Group from ietf.group.utils import can_manage_materials +from ietf.meeting.utils import resolve_uploaded_material from ietf.utils import log from ietf.utils.decorators import ignore_view_kwargs from ietf.utils.meetecho import MeetechoAPIError, SlidesManager @@ -179,6 +180,9 @@ def edit_material(request, name=None, acronym=None, action=None, doc_type=None): "There was an error creating a hardlink at %s pointing to %s: %s" % (ftp_filepath, filepath, ex) ) + else: + for meeting in set([s.meeting for s in doc.session_set.all()]): + resolve_uploaded_material(meeting=meeting, doc=doc) if prev_rev != doc.rev: e = NewRevisionDocEvent(type="new_revision", doc=doc, rev=doc.rev) diff --git a/ietf/meeting/resources.py b/ietf/meeting/resources.py index ede2b5b993..88562a88fe 100644 --- a/ietf/meeting/resources.py +++ b/ietf/meeting/resources.py @@ -11,11 +11,15 @@ from ietf import api -from ietf.meeting.models import ( Meeting, ResourceAssociation, Constraint, Room, Schedule, Session, - TimeSlot, SchedTimeSessAssignment, SessionPresentation, FloorPlan, - UrlResource, ImportantDate, SlideSubmission, SchedulingEvent, - BusinessConstraint, ProceedingsMaterial, MeetingHost, Attended, - Registration, RegistrationTicket) +from ietf.meeting.models import (Meeting, ResourceAssociation, Constraint, Room, + Schedule, Session, + TimeSlot, SchedTimeSessAssignment, SessionPresentation, + FloorPlan, + UrlResource, ImportantDate, SlideSubmission, + SchedulingEvent, + BusinessConstraint, ProceedingsMaterial, MeetingHost, + Attended, + Registration, RegistrationTicket) from ietf.name.resources import MeetingTypeNameResource class MeetingResource(ModelResource): diff --git a/ietf/meeting/tasks.py b/ietf/meeting/tasks.py index 784eb00d87..c361325f9a 100644 --- a/ietf/meeting/tasks.py +++ b/ietf/meeting/tasks.py @@ -1,13 +1,20 @@ -# Copyright The IETF Trust 2024, All Rights Reserved +# Copyright The IETF Trust 2024-2025, All Rights Reserved # # Celery task definitions # +import datetime + from celery import shared_task +# from django.db.models import QuerySet from django.utils import timezone from ietf.utils import log from .models import Meeting -from .utils import generate_proceedings_content +from .utils import ( + generate_proceedings_content, + resolve_materials_for_one_meeting, + store_blobs_for_one_meeting, +) from .views import generate_agenda_data from .utils import fetch_attendance_from_meetings @@ -61,3 +68,123 @@ def fetch_meeting_attendance_task(): meeting_stats['processed'] ) ) + + +def _select_meetings( + meetings: list[str] | None = None, + meetings_since: str | None = None, + meetings_until: str | None = None +): # nyah + """Select meetings by number or date range""" + # IETF-1 = 1986-01-16 + EARLIEST_MEETING_DATE = datetime.datetime(1986, 1, 1) + meetings_since_dt: datetime.datetime | None = None + meetings_until_dt: datetime.datetime | None = None + + if meetings_since == "zero": + meetings_since_dt = EARLIEST_MEETING_DATE + elif meetings_since is not None: + try: + meetings_since_dt = datetime.datetime.fromisoformat(meetings_since) + except ValueError: + log.log( + "Failed to parse meetings_since='{meetings_since}' with fromisoformat" + ) + raise + + if meetings_until is not None: + try: + meetings_until_dt = datetime.datetime.fromisoformat(meetings_until) + except ValueError: + log.log( + "Failed to parse meetings_until='{meetings_until}' with fromisoformat" + ) + raise + if meetings_since_dt is None: + # if we only got meetings_until, start from the first meeting + meetings_since_dt = EARLIEST_MEETING_DATE + + if meetings is None: + if meetings_since_dt is None: + log.log("No meetings requested, doing nothing.") + return Meeting.objects.none() + meetings_qs = Meeting.objects.filter(date__gte=meetings_since_dt) + if meetings_until_dt is not None: + meetings_qs = meetings_qs.filter(date__lte=meetings_until_dt) + log.log( + "Selecting meetings between " + f"{meetings_since_dt} and {meetings_until_dt}" + ) + else: + log.log(f"Selecting meetings since {meetings_since_dt}") + else: + if meetings_since_dt is not None: + log.log( + "Ignoring meetings_since and meetings_until " + "because specific meetings were requested." + ) + meetings_qs = Meeting.objects.filter(number__in=meetings) + return meetings_qs + + +@shared_task +def resolve_meeting_materials_task( + *, # only allow kw arguments + meetings: list[str] | None=None, + meetings_since: str | None=None, + meetings_until: str | None=None +): + """Run materials resolver on meetings + + Can request a set of meetings by number by passing a list in the meetings arg, or + by range by passing an iso-format timestamps in meetings_since / meetings_until. + To select all meetings, set meetings_since="zero" and omit other parameters. + """ + meetings_qs = _select_meetings(meetings, meetings_since, meetings_until) + for meeting in meetings_qs.order_by("date"): + log.log( + f"Resolving materials for {meeting.type_id} " + f"meeting {meeting.number} ({meeting.date})..." + ) + mark = timezone.now() + try: + resolve_materials_for_one_meeting(meeting) + except Exception as err: + log.log( + "Exception raised while resolving materials for " + f"meeting {meeting.number}: {err}" + ) + else: + log.log(f"Resolved in {(timezone.now() - mark).total_seconds():0.3f} seconds.") + + +@shared_task +def store_meeting_materials_as_blobs_task( + *, # only allow kw arguments + meetings: list[str] | None = None, + meetings_since: str | None = None, + meetings_until: str | None = None +): + """Push meeting materials into the blob store + + Can request a set of meetings by number by passing a list in the meetings arg, or + by range by passing an iso-format timestamps in meetings_since / meetings_until. + To select all meetings, set meetings_since="zero" and omit other parameters. + """ + meetings_qs = _select_meetings(meetings, meetings_since, meetings_until) + for meeting in meetings_qs.order_by("date"): + log.log( + f"Creating blobs for materials for {meeting.type_id} " + f"meeting {meeting.number} ({meeting.date})..." + ) + mark = timezone.now() + try: + store_blobs_for_one_meeting(meeting) + except Exception as err: + log.log( + "Exception raised while creating blobs for " + f"meeting {meeting.number}: {err}" + ) + else: + log.log( + f"Blobs created in {(timezone.now() - mark).total_seconds():0.3f} seconds.") diff --git a/ietf/meeting/utils.py b/ietf/meeting/utils.py index afcf7656f2..bdf3d3d3d3 100644 --- a/ietf/meeting/utils.py +++ b/ietf/meeting/utils.py @@ -2,6 +2,9 @@ # -*- coding: utf-8 -*- import datetime import itertools +from contextlib import suppress +from dataclasses import dataclass + import jsonschema import os import requests @@ -26,16 +29,33 @@ import debug # pyflakes:ignore from ietf.dbtemplate.models import DBTemplate -from ietf.doc.storage_utils import store_bytes, store_str -from ietf.meeting.models import (Session, SchedulingEvent, TimeSlot, - Constraint, SchedTimeSessAssignment, SessionPresentation, Attended, - Registration, Meeting, RegistrationTicket) -from ietf.doc.models import Document, State, NewRevisionDocEvent, StateDocEvent +from ietf.doc.storage_utils import store_bytes, store_str, AlreadyExistsError +from ietf.meeting.models import ( + Session, + SchedulingEvent, + TimeSlot, + Constraint, + SchedTimeSessAssignment, + SessionPresentation, + Attended, + Registration, + Meeting, + RegistrationTicket, +) +from ietf.blobdb.models import ResolvedMaterial +from ietf.doc.models import ( + Document, + State, + NewRevisionDocEvent, + StateDocEvent, + StoredObject, +) from ietf.doc.models import DocEvent from ietf.group.models import Group from ietf.group.utils import can_manage_materials from ietf.name.models import SessionStatusName, ConstraintName, DocTypeName from ietf.person.models import Person +from ietf.utils import markdown from ietf.utils.html import clean_html from ietf.utils.log import log from ietf.utils.timezone import date_today @@ -220,6 +240,7 @@ def save_bluesheet(request, session, file, encoding='utf-8'): save_error = handle_upload_file(file, filename, session.meeting, 'bluesheets', request=request, encoding=encoding) if not save_error: doc.save_with_history([e]) + resolve_uploaded_material(meeting=session.meeting, doc=doc) return save_error @@ -832,6 +853,330 @@ def write_doc_for_session(session, type_id, filename, contents): store_str(type_id, filename.name, contents) return None + +@dataclass +class BlobSpec: + bucket: str + name: str + + +def resolve_one_material( + doc: Document, rev: str | None, ext: str | None +) -> BlobSpec | None: + if doc.type_id is None: + log(f"Cannot resolve a doc with no type: {doc.name}") + return None + + # Get the Document's base name. It may or may not have an extension. + if rev is None: + basename = Path(doc.get_base_name()) + else: + basename = Path(f"{doc.name}-{int(rev):02d}") + + # If the document's file exists, the blob is _always_ named with this stem, + # even if it's different from the original. + blob_stem = Path(f"{doc.name}-{rev or doc.rev}") + + # If we have an extension, either from the URL or the Document's base name, look up + # the blob or file or return 404. N.b. the suffix check needs adjustment to handle + # a bare "." extension when we reach py3.14. + if ext or basename.suffix != "": + if ext: + blob_name = str(blob_stem.with_suffix(ext)) + else: + blob_name = str(blob_stem.with_suffix(basename.suffix)) + + # See if we have a stored object under that name + preferred_blob = ( + StoredObject.objects.exclude_deleted() + .filter(store=doc.type_id, name=blob_name) + .first() + ) + if preferred_blob is not None: + return BlobSpec( + bucket=preferred_blob.store, + name=preferred_blob.name, + ) + # No stored object, fall back to the file system. + filename = Path(doc.get_file_path()) / basename # use basename for file + if filename.is_file(): + return BlobSpec( + bucket=doc.type_id, + name=str(blob_stem.with_suffix(filename.suffix)), + ) + else: + return None + + # No extension has been specified so far, so look one up. + matching_stored_objects = ( + StoredObject.objects.exclude_deleted() + .filter( + store=doc.type_id, + name__startswith=f"{blob_stem}.", # anchor to end with trailing "." + ) + .order_by("name") + ) # orders by suffix + blob_ext_choices = { + Path(stored_obj.name).suffix: stored_obj + for stored_obj in matching_stored_objects + } + + # Short-circuit to return pdf if present + if ".pdf" in blob_ext_choices: + pdf_blob = blob_ext_choices[".pdf"] + return BlobSpec( + bucket=pdf_blob.store, + name=str(blob_stem.with_suffix(".pdf")), + ) + + # Now look for files + filename = Path(doc.get_file_path()) / basename + file_ext_choices = { + # Construct a map from suffix to full filename + fn.suffix: fn.name + for fn in sorted(filename.parent.glob(filename.stem + ".*")) + } + + # Short-circuit to return pdf if we have the file + if ".pdf" in file_ext_choices: + return BlobSpec( + bucket=doc.type_id, + name=str(blob_stem.with_suffix(".pdf")), + ) + + all_exts = set(blob_ext_choices.keys()).union(file_ext_choices.keys()) + if len(all_exts) > 0: + preferred_ext = sorted(all_exts)[0] + if preferred_ext in blob_ext_choices: + preferred_blob = blob_ext_choices[preferred_ext] + return BlobSpec( + bucket=preferred_blob.store, + name=preferred_blob.name, + ) + else: + return BlobSpec( + bucket=doc.type_id, + name=str(blob_stem.with_suffix(preferred_ext)), + ) + + return None + + +def resolve_materials_for_one_meeting(meeting: Meeting): + start_time = timezone.now() + meeting_documents = ( + Document.objects.filter( + type_id__in=settings.MATERIALS_TYPES_SERVED_BY_WORKER + ).filter( + Q(session__meeting=meeting) | Q(proceedingsmaterial__meeting=meeting) + ) + ).distinct() + + resolved = [] + for doc in meeting_documents: + # request by doc name with no rev + blob = resolve_one_material(doc, rev=None, ext=None) + if blob is not None: + resolved.append( + ResolvedMaterial( + name=doc.name, + meeting_number=meeting.number, + bucket=blob.bucket, + blob=blob.name, + ) + ) + # request by doc name + rev + blob = resolve_one_material(doc, rev=doc.rev, ext=None) + if blob is not None: + resolved.append( + ResolvedMaterial( + name=f"{doc.name}-{doc.rev:02}", + meeting_number=meeting.number, + bucket=blob.bucket, + blob=blob.name, + ) + ) + # for other revisions, only need request by doc name + rev + other_revisions = doc.revisions_by_newrevisionevent() + other_revisions.remove(doc.rev) + for rev in other_revisions: + blob = resolve_one_material(doc, rev=rev, ext=None) + if blob is not None: + resolved.append( + ResolvedMaterial( + name=f"{doc.name}-{rev:02}", + meeting_number=meeting.number, + bucket=blob.bucket, + blob=blob.name, + ) + ) + ResolvedMaterial.objects.bulk_create( + resolved, + update_conflicts=True, + unique_fields=["name", "meeting_number"], + update_fields=["bucket", "blob"], + ) + # Warn if any files were updated during the above process + last_update = meeting_documents.aggregate(Max("time"))["time__max"] + if last_update and last_update > start_time: + log( + f"Warning: materials for meeting {meeting.number} " + "changed during ResolvedMaterial update" + ) + +def resolve_uploaded_material(meeting: Meeting, doc: Document): + resolved = [] + blob = resolve_one_material(doc, rev=None, ext=None) + if blob is not None: + resolved.append( + ResolvedMaterial( + name=doc.name, + meeting_number=meeting.number, + bucket=blob.bucket, + blob=blob.name, + ) + ) + # request by doc name + rev + blob = resolve_one_material(doc, rev=doc.rev, ext=None) + if blob is not None: + resolved.append( + ResolvedMaterial( + name=f"{doc.name}-{doc.rev:02}", + meeting_number=meeting.number, + bucket=blob.bucket, + blob=blob.name, + ) + ) + ResolvedMaterial.objects.bulk_create( + resolved, + update_conflicts=True, + unique_fields=["name", "meeting_number"], + update_fields=["bucket", "blob"], + ) + + +def store_blob_for_one_material_file(doc: Document, rev: str, filepath: Path): + if not settings.ENABLE_BLOBSTORAGE: + raise RuntimeError("Cannot store blobs: ENABLE_BLOBSTORAGE is False") + + bucket = doc.type_id + if bucket not in settings.MATERIALS_TYPES_SERVED_BY_WORKER: + raise ValueError(f"Bucket {bucket} not found for doc {doc.name}.") + blob_stem = f"{doc.name}-{rev}" + suffix = filepath.suffix # includes leading "." + + # Store the file + try: + file_bytes = filepath.read_bytes() + except Exception as err: + log(f"Failed to read {filepath}: {err}") + raise + with suppress(AlreadyExistsError): + store_bytes( + kind=bucket, + name= blob_stem + suffix, + content=file_bytes, + mtime=datetime.datetime.fromtimestamp( + filepath.stat().st_mtime, + tz=datetime.UTC, + ), + allow_overwrite=False, + doc_name=doc.name, + doc_rev=rev, + ) + + # Special case: pre-render markdown into HTML as .md.html + if suffix == ".md": + try: + markdown_source = file_bytes.decode("utf-8") + except UnicodeDecodeError as err: + log(f"Unable to decode {filepath} as UTF-8, treating as latin-1: {err}") + markdown_source = file_bytes.decode("latin-1") + # render the markdown + try: + html = render_to_string( + "minimal.html", + { + "content": markdown.markdown(markdown_source), + "title": blob_stem, + "static_ietf_org": settings.STATIC_IETF_ORG, + }, + ) + except Exception as err: + log(f"Failed to render markdown for {filepath}: {err}") + else: + # Don't overwrite, but don't fail if the blob exists + with suppress(AlreadyExistsError): + store_str( + kind=bucket, + name=blob_stem + ".md.html", + content=html, + allow_overwrite=False, + doc_name=doc.name, + doc_rev=rev, + content_type="text/html;charset=utf-8", + ) + + +def store_blobs_for_one_material_doc(doc: Document): + """Ensure that all files related to a materials Document are in the blob store""" + if doc.type_id not in settings.MATERIALS_TYPES_SERVED_BY_WORKER: + log(f"This method does not handle docs of type {doc.name}") + return + + # Store files for current Document / rev + file_path = Path(doc.get_file_path()) + base_name = Path(doc.get_base_name()) + # .stem would remove directories, so use .with_suffix("") + base_name_stem = str(base_name.with_suffix("")) + if base_name_stem.endswith(".") and base_name.suffix == "": + # In Python 3.14, a trailing "." is a valid suffix, but in prior versions + # it is left as part of the stem. The suffix check ensures that either way, + # only a single "." will be removed. + base_name_stem = base_name_stem[:-1] + # Add any we find without the rev + for file_to_store in file_path.glob(base_name_stem + ".*"): + if not (file_to_store.is_file()): + continue + try: + store_blob_for_one_material_file(doc, doc.rev, file_to_store) + except Exception as err: + log( + f"Failed to store blob for {doc} rev {doc.rev} " + f"from {file_to_store}: {err}" + ) + + # Get other revisions + for rev in doc.revisions_by_newrevisionevent(): + if rev == doc.rev: + continue # already handled this + + # Add some that have the rev + for file_to_store in file_path.glob(doc.name + f"-{rev}.*"): + if not file_to_store.is_file(): + continue + try: + store_blob_for_one_material_file(doc, rev, file_to_store) + except Exception as err: + log( + f"Failed to store blob for {doc} rev {rev} " + f"from {file_to_store}: {err}" + ) + + +def store_blobs_for_one_meeting(meeting: Meeting): + meeting_documents = ( + Document.objects.filter( + type_id__in=settings.MATERIALS_TYPES_SERVED_BY_WORKER + ).filter( + Q(session__meeting=meeting) | Q(proceedingsmaterial__meeting=meeting) + ) + ).distinct() + + for doc in meeting_documents: + store_blobs_for_one_material_doc(doc) + + def create_recording(session, url, title=None, user=None): ''' Creates the Document type=recording, setting external_url and creating diff --git a/ietf/meeting/views.py b/ietf/meeting/views.py index fcc9312609..cf6fed596b 100644 --- a/ietf/meeting/views.py +++ b/ietf/meeting/views.py @@ -9,6 +9,7 @@ import json import math import os + import pytz import re import tarfile @@ -27,10 +28,12 @@ from django import forms from django.core.cache import caches +from django.core.files.storage import storages from django.shortcuts import render, redirect, get_object_or_404 from django.http import (HttpResponse, HttpResponseRedirect, HttpResponseForbidden, HttpResponseNotFound, Http404, HttpResponseBadRequest, - JsonResponse, HttpResponseGone, HttpResponseNotAllowed) + JsonResponse, HttpResponseGone, HttpResponseNotAllowed, + FileResponse) from django.conf import settings from django.contrib import messages from django.contrib.auth.decorators import login_required @@ -48,18 +51,25 @@ from django.views.decorators.cache import cache_page from django.views.decorators.csrf import ensure_csrf_cookie, csrf_exempt from django.views.generic import RedirectView +from rest_framework.status import HTTP_404_NOT_FOUND import debug # pyflakes:ignore from ietf.doc.fields import SearchableDocumentsField from ietf.doc.models import Document, State, DocEvent, NewRevisionDocEvent -from ietf.doc.storage_utils import remove_from_storage, retrieve_bytes, store_file +from ietf.doc.storage_utils import ( + remove_from_storage, + retrieve_bytes, + store_file, +) from ietf.group.models import Group from ietf.group.utils import can_manage_session_materials, can_manage_some_groups, can_manage_group from ietf.person.models import Person, User from ietf.ietfauth.utils import role_required, has_role, user_is_person from ietf.mailtrigger.utils import gather_address_lists -from ietf.meeting.models import Meeting, Session, Schedule, FloorPlan, SessionPresentation, TimeSlot, SlideSubmission, Attended +from ietf.meeting.models import Meeting, Session, Schedule, FloorPlan, \ + SessionPresentation, TimeSlot, SlideSubmission, Attended +from ..blobdb.models import ResolvedMaterial from ietf.meeting.models import ImportantDate, SessionStatusName, SchedulingEvent, SchedTimeSessAssignment, Room, TimeSlotTypeName from ietf.meeting.models import Registration from ietf.meeting.forms import ( CustomDurationField, SwapDaysForm, SwapTimeslotsForm, ImportMinutesForm, @@ -83,7 +93,8 @@ finalize, generate_proceedings_content, organize_proceedings_sessions, - sort_accept_tuple, + resolve_uploaded_material, + sort_accept_tuple, store_blobs_for_one_material_doc, ) from ietf.meeting.utils import add_event_info_to_session_qs from ietf.meeting.utils import session_time_for_sorting @@ -120,6 +131,8 @@ from icalendar import Calendar, Event from ietf.doc.templatetags.ietf_filters import absurl +from ..api.ietf_utils import requires_api_token +from ..blobdb.storage import BlobdbStorage, BlobFile request_summary_exclude_group_types = ['team'] @@ -245,21 +258,32 @@ def current_materials(request): raise Http404('No such meeting') -def _get_materials_doc(meeting, name): +def _get_materials_doc(name, meeting=None): """Get meeting materials document named by name - Raises Document.DoesNotExist if a match cannot be found. + Raises Document.DoesNotExist if a match cannot be found. If meeting is None, + matches a name that is associated with _any_ meeting. """ + + def _matches_meeting(doc, meeting=None): + if meeting is None: + return doc.get_related_meeting() is not None + return doc.get_related_meeting() == meeting + # try an exact match first doc = Document.objects.filter(name=name).first() - if doc is not None and doc.get_related_meeting() == meeting: + if doc is not None and _matches_meeting(doc, meeting): return doc, None + # try parsing a rev number if "-" in name: docname, rev = name.rsplit("-", 1) if len(rev) == 2 and rev.isdigit(): doc = Document.objects.get(name=docname) # may raise Document.DoesNotExist - if doc.get_related_meeting() == meeting and rev in doc.revisions_by_newrevisionevent(): + if ( + _matches_meeting(doc, meeting) + and rev in doc.revisions_by_newrevisionevent() + ): return doc, rev # give up raise Document.DoesNotExist @@ -277,7 +301,7 @@ def materials_document(request, document, num=None, ext=None): meeting = get_meeting(num, type_in=["ietf", "interim"]) num = meeting.number try: - doc, rev = _get_materials_doc(meeting=meeting, name=document) + doc, rev = _get_materials_doc(name=document, meeting=meeting) except Document.DoesNotExist: raise Http404("No such document for meeting %s" % num) @@ -320,6 +344,7 @@ def materials_document(request, document, num=None, ext=None): { "content": markdown.markdown(bytes.decode(encoding=chset)), "title": filename.name, + "static_ietf_org": settings.STATIC_IETF_ORG, }, ) content_type = content_type.replace("plain", "html", 1) @@ -334,6 +359,133 @@ def materials_document(request, document, num=None, ext=None): return HttpResponseRedirect(redirect_to=doc.get_href(meeting=meeting)) +@requires_api_token("ietf.meeting.views.api_resolve_materials_name") +def api_resolve_materials_name_cached(request, document, num=None, ext=None): + """Resolve materials name into document to a blob spec + + Returns the bucket/name of a blob in the blob store that corresponds to the named + document. Handles resolution of revision if it is not specified and determines the + best extension if one is not provided. Response is JSON. + + As of 2025-10-10 we do not have blobs for all materials documents or for every + format of every document. This API still returns the bucket/name as if the blob + exists. Another API will allow the caller to obtain the file contents using that + name if it cannot be retrieved from the blob store. + """ + + def _error_response(status: int, detail: str): + return JsonResponse( + { + "status": status, + "title": "Error", + "detail": detail, + }, + status=status, + ) + + def _response(bucket: str, name: str): + return JsonResponse( + { + "bucket": bucket, + "name": name, + } + ) + + try: + resolved = ResolvedMaterial.objects.get( + meeting_number=num, name=document + ) + except ResolvedMaterial.DoesNotExist: + return _error_response( + HTTP_404_NOT_FOUND, f"No suitable file for {document} for meeting {num}" + ) + return _response(bucket=resolved.bucket, name=resolved.blob) + + +@requires_api_token +def api_retrieve_materials_blob(request, bucket, name): + """Retrieve contents of a meeting materials blob + + This is intended as a fallback if the web worker cannot retrieve a blob from + the blobstore itself. The most likely cause is retrieving an old materials document + that has not been backfilled. + + If a blob is requested that does not exist, this checks for it on the filesystem + and if found, adds it to the blobstore, creates a StoredObject record, and returns + the contents as it would have done if the blob was already present. + + As a special case, if a requested file with extension `.md.html` does not exist + but a file with the same name but extension `.md` does, `.md` file will be rendered + from markdown to html and returned / stored. + """ + DEFAULT_CONTENT_TYPES = { + ".html": "text/html;charset=utf-8", + ".md": "text/markdown;charset=utf-8", + ".pdf": "application/pdf", + ".txt": "text/plain;charset=utf-8", + } + + def _default_content_type(blob_name: str): + return DEFAULT_CONTENT_TYPES.get(Path(name).suffix, "application/octet-stream") + + if not ( + settings.ENABLE_BLOBSTORAGE + and bucket in settings.MATERIALS_TYPES_SERVED_BY_WORKER + ): + return HttpResponseNotFound(f"Bucket {bucket} not found.") + storage = storages[bucket] # if not configured, a server error will result + assert isinstance(storage, BlobdbStorage) + try: + blob = storage.open(name, "rb") + except FileNotFoundError: + pass + else: + # found the blob - return it + assert isinstance(blob, BlobFile) + return FileResponse( + blob, + filename=name, + content_type=blob.content_type or _default_content_type(name), + ) + + # Did not find the blob. Create it if we can + name_as_path = Path(name) + if name_as_path.suffixes == [".md", ".html"]: + # special case: .md.html means we want to create the .md and the .md.html + # will come along as a bonus + name_to_store = name_as_path.stem # removes the .html + else: + name_to_store = name + + # See if we have a meeting-related document that matches the requested bucket and + # name. + try: + doc, rev = _get_materials_doc(Path(name_to_store).stem) + if doc.type_id != bucket: + raise Document.DoesNotExist + except Document.DoesNotExist: + return HttpResponseNotFound( + f"Document corresponding to {bucket}:{name} not found." + ) + else: + # create all missing blobs for the doc while we're at it + store_blobs_for_one_material_doc(doc) + + # If we can make the blob at all, it now exists, so return it or a 404 + try: + blob = storage.open(name, "rb") + except FileNotFoundError: + return HttpResponseNotFound(f"Object {bucket}:{name} not found.") + else: + # found the blob - return it + assert isinstance(blob, BlobFile) + return FileResponse( + blob, + filename=name, + content_type=blob.content_type or _default_content_type(name), + ) + + @login_required def materials_editable_groups(request, num=None): meeting = get_meeting(num) @@ -2949,6 +3101,7 @@ def upload_session_minutes(request, session_id, num): form.add_error(None, str(err)) else: # no exception -- success! + resolve_uploaded_material(meeting=session.meeting, doc=session.minutes()) messages.success(request, f'Successfully uploaded minutes as revision {session.minutes().rev}.') return redirect('ietf.meeting.views.session_details', num=num, acronym=session.group.acronym) else: @@ -3008,6 +3161,7 @@ def upload_session_narrativeminutes(request, session_id, num): form.add_error(None, str(err)) else: # no exception -- success! + resolve_uploaded_material(meeting=session.meeting, doc=session.narrative_minutes()) messages.success(request, f'Successfully uploaded narrative minutes as revision {session.narrative_minutes().rev}.') return redirect('ietf.meeting.views.session_details', num=num, acronym=session.group.acronym) else: @@ -3154,6 +3308,7 @@ def upload_session_agenda(request, session_id, num): form.add_error(None, save_error) else: doc.save_with_history([e]) + resolve_uploaded_material(meeting=session.meeting, doc=doc) messages.success(request, f'Successfully uploaded agenda as revision {doc.rev}.') return redirect('ietf.meeting.views.session_details',num=num,acronym=session.group.acronym) else: @@ -3337,6 +3492,7 @@ def upload_session_slides(request, session_id, num, name=None): else: doc.save_with_history([e]) post_process(doc) + resolve_uploaded_material(meeting=session.meeting, doc=doc) # Send MeetEcho updates even if we had a problem saving - that will keep it in sync with the # SessionPresentation, which was already saved regardless of problems saving the file. @@ -4737,6 +4893,7 @@ def err(code, text): write_doc_for_session(session, 'chatlog', filename, json.dumps(apidata['chatlog'])) e = NewRevisionDocEvent.objects.create(doc=doc, rev=doc.rev, by=request.user.person, type='new_revision', desc='New revision available: %s'%doc.rev) doc.save_with_history([e]) + resolve_uploaded_material(meeting=session.meeting, doc=doc) return HttpResponse( "Done", status=200, @@ -4785,6 +4942,7 @@ def err(code, text): write_doc_for_session(session, 'polls', filename, json.dumps(apidata['polls'])) e = NewRevisionDocEvent.objects.create(doc=doc, rev=doc.rev, by=request.user.person, type='new_revision', desc='New revision available: %s'%doc.rev) doc.save_with_history([e]) + resolve_uploaded_material(meeting=session.meeting, doc=doc) return HttpResponse( "Done", status=200, @@ -5167,6 +5325,7 @@ def approve_proposed_slides(request, slidesubmission_id, num): doc.store_bytes(target_filename, retrieve_bytes("staging", submission.filename)) remove_from_storage("staging", submission.filename) post_process(doc) + resolve_uploaded_material(meeting=submission.session.meeting, doc=doc) DocEvent.objects.create(type="approved_slides", doc=doc, rev=doc.rev, by=request.user.person, desc="Slides approved") # update meetecho slide info if configured diff --git a/ietf/settings.py b/ietf/settings.py index 5e576430ed..eb5f9d2161 100644 --- a/ietf/settings.py +++ b/ietf/settings.py @@ -786,29 +786,29 @@ def skip_unreadable_post(record): # Storages for artifacts stored as blobs ARTIFACT_STORAGE_NAMES: list[str] = [ - "bofreq", - "charter", - "conflrev", "active-draft", - "draft", - "slides", - "minutes", "agenda", + "bibxml-ids", "bluesheets", - "procmaterials", - "narrativeminutes", - "statement", - "statchg", - "liai-att", + "bofreq", + "charter", "chatlog", - "polls", - "staging", - "bibxml-ids", - "indexes", + "conflrev", + "draft", "floorplan", + "indexes", + "liai-att", "meetinghostlogo", + "minutes", + "narrativeminutes", "photo", + "polls", + "procmaterials", "review", + "slides", + "staging", + "statchg", + "statement", ] for storagename in ARTIFACT_STORAGE_NAMES: STORAGES[storagename] = { @@ -816,6 +816,20 @@ def skip_unreadable_post(record): "OPTIONS": {"bucket_name": storagename}, } +# Buckets / doc types of meeting materials the CF worker is allowed to serve. This +# differs from the list in Session.meeting_related() by the omission of "recording" +MATERIALS_TYPES_SERVED_BY_WORKER = [ + "agenda", + "bluesheets", + "chatlog", + "minutes", + "narrativeminutes", + "polls", + "procmaterials", + "slides", +] + + # Override this in settings_local.py if needed # *_PATH variables ends with a slash/ . diff --git a/ietf/templates/minimal.html b/ietf/templates/minimal.html index 87f661f501..15c432505e 100644 --- a/ietf/templates/minimal.html +++ b/ietf/templates/minimal.html @@ -9,8 +9,8 @@ {{ title }} - - + + {# load this in the head, to prevent flickering #} From af0bcc743f6e449f93e0c7a7e4f2e2eec3ec76ae Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Thu, 23 Oct 2025 17:14:39 -0400 Subject: [PATCH 231/405] docs: Update PostgreSQL version badge in README --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 4e1b7e1a45..dfaf871052 100644 --- a/README.md +++ b/README.md @@ -8,7 +8,7 @@ [![Python Version](https://img.shields.io/badge/python-3.9-blue?logo=python&logoColor=white)](#prerequisites) [![Django Version](https://img.shields.io/badge/django-4.x-51be95?logo=django&logoColor=white)](#prerequisites) [![Node Version](https://img.shields.io/badge/node.js-16.x-green?logo=node.js&logoColor=white)](#prerequisites) -[![MariaDB Version](https://img.shields.io/badge/postgres-16-blue?logo=postgresql&logoColor=white)](#prerequisites) +[![MariaDB Version](https://img.shields.io/badge/postgres-17-blue?logo=postgresql&logoColor=white)](#prerequisites) ##### The day-to-day front-end to the IETF database for people who work on IETF standards. From f9dea7df9d562ba818cf9224c1594f0e0983cdbe Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Thu, 23 Oct 2025 17:24:58 -0400 Subject: [PATCH 232/405] docs: Update Python version badge to 3.12 in README --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index dfaf871052..baffc311e7 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ [![Release](https://img.shields.io/github/release/ietf-tools/datatracker.svg?style=flat&maxAge=300)](https://github.com/ietf-tools/datatracker/releases) [![License](https://img.shields.io/github/license/ietf-tools/datatracker)](https://github.com/ietf-tools/datatracker/blob/main/LICENSE) [![Code Coverage](https://codecov.io/gh/ietf-tools/datatracker/branch/feat/bs5/graph/badge.svg?token=V4DXB0Q28C)](https://codecov.io/gh/ietf-tools/datatracker) -[![Python Version](https://img.shields.io/badge/python-3.9-blue?logo=python&logoColor=white)](#prerequisites) +[![Python Version](https://img.shields.io/badge/python-3.12-blue?logo=python&logoColor=white)](#prerequisites) [![Django Version](https://img.shields.io/badge/django-4.x-51be95?logo=django&logoColor=white)](#prerequisites) [![Node Version](https://img.shields.io/badge/node.js-16.x-green?logo=node.js&logoColor=white)](#prerequisites) [![MariaDB Version](https://img.shields.io/badge/postgres-17-blue?logo=postgresql&logoColor=white)](#prerequisites) From e0691c17121d2324d812bc68c3943d963d1c5d4d Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Thu, 23 Oct 2025 17:30:50 -0400 Subject: [PATCH 233/405] ci: remove assets rsync sync job from dev-assets-sync-nightly workflow Removed the nightly sync job for assets in the workflow. --- .github/workflows/dev-assets-sync-nightly.yml | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/.github/workflows/dev-assets-sync-nightly.yml b/.github/workflows/dev-assets-sync-nightly.yml index 19933bddfd..4cfbf6365b 100644 --- a/.github/workflows/dev-assets-sync-nightly.yml +++ b/.github/workflows/dev-assets-sync-nightly.yml @@ -47,17 +47,3 @@ jobs: file: dev/shared-assets-sync/Dockerfile push: true tags: ghcr.io/ietf-tools/datatracker-rsync-assets:latest - - sync: - name: Run assets rsync - if: ${{ always() }} - runs-on: [self-hosted, dev-server] - needs: [build] - steps: - - name: Run rsync - env: - DEBIAN_FRONTEND: noninteractive - run: | - docker pull ghcr.io/ietf-tools/datatracker-rsync-assets:latest - docker run --rm -v dt-assets:/assets ghcr.io/ietf-tools/datatracker-rsync-assets:latest - docker image prune -a -f From 354d83d2fa22f817384a792bcbdef9757771f70a Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Thu, 23 Oct 2025 17:34:00 -0400 Subject: [PATCH 234/405] ci: remove sandbox-refresh workflow --- .github/workflows/sandbox-refresh.yml | 35 --------------------------- 1 file changed, 35 deletions(-) delete mode 100644 .github/workflows/sandbox-refresh.yml diff --git a/.github/workflows/sandbox-refresh.yml b/.github/workflows/sandbox-refresh.yml deleted file mode 100644 index 3ddb119e4f..0000000000 --- a/.github/workflows/sandbox-refresh.yml +++ /dev/null @@ -1,35 +0,0 @@ -name: Sandbox Refresh - -on: - # Run every night - schedule: - - cron: '0 9 * * *' - - workflow_dispatch: - -jobs: - main: - name: Refresh DBs - runs-on: [self-hosted, dev-server] - permissions: - contents: read - - steps: - - uses: actions/checkout@v4 - - - name: Refresh DBs - env: - DEBIAN_FRONTEND: noninteractive - run: | - echo "Install Deploy to Container CLI dependencies..." - cd dev/deploy-to-container - npm ci - cd ../.. - echo "Start Refresh..." - node ./dev/deploy-to-container/refresh.js - - - name: Cleanup old docker resources - env: - DEBIAN_FRONTEND: noninteractive - run: | - docker image prune -a -f From 4e6168607cb49abc9341b27049f458bc9363297a Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Thu, 23 Oct 2025 20:43:04 -0300 Subject: [PATCH 235/405] ci: proceedings cache cfg for prod/tests (#9784) --- ietf/settings_testcrawl.py | 4 +++- k8s/settings_local.py | 11 +++++++++++ 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/ietf/settings_testcrawl.py b/ietf/settings_testcrawl.py index a1b5ce8946..40744a228d 100644 --- a/ietf/settings_testcrawl.py +++ b/ietf/settings_testcrawl.py @@ -27,9 +27,11 @@ 'MAX_ENTRIES': 10000, }, }, + 'proceedings': { + 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', + }, 'sessions': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', - # No version-specific VERSION setting. }, 'htmlized': { 'BACKEND': 'django.core.cache.backends.dummy.DummyCache', diff --git a/k8s/settings_local.py b/k8s/settings_local.py index c09bd70c86..f8ffacc83f 100644 --- a/k8s/settings_local.py +++ b/k8s/settings_local.py @@ -301,6 +301,17 @@ def _multiline_to_list(s): "LOCATION": f"{MEMCACHED_HOST}:{MEMCACHED_PORT}", "VERSION": __version__, "KEY_PREFIX": "ietf:dt", + # Key function is default except with sha384-encoded key + "KEY_FUNCTION": lambda key, key_prefix, version: ( + f"{key_prefix}:{version}:{sha384(str(key).encode('utf8')).hexdigest()}" + ), + }, + "proceedings": { + "BACKEND": "ietf.utils.cache.LenientMemcacheCache", + "LOCATION": f"{MEMCACHED_HOST}:{MEMCACHED_PORT}", + # No release-specific VERSION setting. + "KEY_PREFIX": "ietf:dt:proceedings", + # Key function is default except with sha384-encoded key "KEY_FUNCTION": lambda key, key_prefix, version: ( f"{key_prefix}:{version}:{sha384(str(key).encode('utf8')).hexdigest()}" ), From 6db7d4afbe2b876192d0aa4a63a0bbe98a3806be Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Tue, 28 Oct 2025 20:06:53 -0300 Subject: [PATCH 236/405] fix: don't trust libmagic charset recognition (#9815) --- ietf/meeting/views.py | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/ietf/meeting/views.py b/ietf/meeting/views.py index cf6fed596b..d6b5a1c0db 100644 --- a/ietf/meeting/views.py +++ b/ietf/meeting/views.py @@ -329,7 +329,7 @@ def materials_document(request, document, num=None, ext=None): old_proceedings_format = meeting.number.isdigit() and int(meeting.number) <= 96 if settings.MEETING_MATERIALS_SERVE_LOCALLY or old_proceedings_format: bytes = filename.read_bytes() - mtype, chset = get_mime_type(bytes) + mtype, chset = get_mime_type(bytes) # chset does not consider entire file! content_type = "%s; charset=%s" % (mtype, chset) if filename.suffix == ".md" and mtype == "text/plain": @@ -339,15 +339,24 @@ def materials_document(request, document, num=None, ext=None): content_type = content_type.replace("plain", "markdown", 1) break elif atype[0] == "text/html": + # Render markdown, allowing that charset may be inaccurate. + try: + md_src = bytes.decode( + "utf-8" if chset in ["ascii", "us-ascii"] else chset + ) + except UnicodeDecodeError: + # latin-1, aka iso8859-1, accepts all 8-bit code points + md_src = bytes.decode("latin-1") + content = markdown.markdown(md_src) # a string bytes = render_to_string( "minimal.html", { - "content": markdown.markdown(bytes.decode(encoding=chset)), + "content": content, "title": filename.name, "static_ietf_org": settings.STATIC_IETF_ORG, }, - ) - content_type = content_type.replace("plain", "html", 1) + ).encode("utf-8") + content_type = "text/html; charset=utf-8" break elif atype[0] == "text/plain": break From 3e34efe74950d7f237171e9ea5cedc24d8d08615 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Tue, 28 Oct 2025 20:09:27 -0300 Subject: [PATCH 237/405] chore: update names fixture (#9807) * chore(dev): update names fixture * chore(dev): update names fixture again --- ietf/name/fixtures/names.json | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/ietf/name/fixtures/names.json b/ietf/name/fixtures/names.json index 58deb01f0c..64e26e503a 100644 --- a/ietf/name/fixtures/names.json +++ b/ietf/name/fixtures/names.json @@ -650,7 +650,7 @@ }, { "fields": { - "desc": "4.2.1. Call for Adoption by WG Issued\r\n\r\n The \"Call for Adoption by WG Issued\" state should be used to indicate when an I-D is being considered for adoption by an IETF WG. An I-D that is in this state is actively being considered for adoption and has not yet achieved consensus, preference, or selection in the WG.\r\n\r\n This state may be used to describe an I-D that someone has asked a WG to consider for adoption, if the WG Chair has agreed with the request. This state may also be used to identify an I-D that a WG Chair asked an author to write specifically for consideration as a candidate WG item [WGDTSPEC], and/or an I-D that is listed as a 'candidate draft' in the WG's charter.\r\n\r\n Under normal conditions, it should not be possible for an I-D to be in the \"Call for Adoption by WG Issued\" state in more than one working group at the same time. This said, it is not uncommon for authors to \"shop\" their I-Ds to more than one WG at a time, with the hope of getting their documents adopted somewhere.\r\n\r\n After this state is implemented in the Datatracker, an I-D that is in the \"Call for Adoption by WG Issued\" state will not be able to be \"shopped\" to any other WG without the consent of the WG Chairs and the responsible ADs impacted by the shopping.\r\n\r\n Note that Figure 1 includes an arc leading from this state to outside of the WG state machine. This illustrates that some I-Ds that are considered do not get adopted as WG drafts. An I-D that is not adopted as a WG draft will transition out of the WG state machine and revert back to having no stream-specific state; however, the status change history log of the I-D will record that the I-D was previously in the \"Call for Adoption by WG Issued\" state.", + "desc": "A call for adoption of the individual submission document has been issued by the Working Group (WG) chairs. This call is still running but the WG has not yet reached consensus for adoption.", "name": "Call For Adoption By WG Issued", "next_states": [ 36, @@ -666,7 +666,7 @@ }, { "fields": { - "desc": "4.2.2. Adopted by a WG\r\n\r\n The \"Adopted by a WG\" state describes an individual submission I-D that an IETF WG has agreed to adopt as one of its WG drafts.\r\n\r\n WG Chairs who use this state will be able to clearly indicate when their WGs adopt individual submission I-Ds. This will facilitate the Datatracker's ability to correctly capture \"Replaces\" information for WG drafts and correct \"Replaced by\" information for individual submission I-Ds that have been replaced by WG drafts.\r\n\r\n This state is needed because the Datatracker uses the filename of an I-D as a key to search its database for status information about the I-D, and because the filename of a WG I-D is supposed to be different from the filename of an individual submission I-D. The filename of an individual submission I-D will typically be formatted as 'draft-author-wgname-topic-nn'.\r\n\r\n The filename of a WG document is supposed to be formatted as 'draft- ietf-wgname-topic-nn'.\r\n\r\n An individual I-D that is adopted by a WG may take weeks or months to be resubmitted by the author as a new (version-00) WG draft. If the \"Adopted by a WG\" state is not used, the Datatracker has no way to determine that an I-D has been adopted until a new version of the I-D is submitted to the WG by the author and until the I-D is approved for posting by a WG Chair.", + "desc": "The individual submission document has been adopted by the Working Group (WG), but a WG document replacing this document with the typical naming convention of 'draft- ietf-wgname-topic-nn' has not yet been submitted.", "name": "Adopted by a WG", "next_states": [ 38 @@ -681,7 +681,7 @@ }, { "fields": { - "desc": "4.2.3. Adopted for WG Info Only\r\n\r\n The \"Adopted for WG Info Only\" state describes a document that contains useful information for the WG that adopted it, but the document is not intended to be published as an RFC. The WG will not actively develop the contents of the I-D or progress it for publication as an RFC. The only purpose of the I-D is to provide information for internal use by the WG.", + "desc": "The document is adopted by the Working Group (WG) for its internal use. The WG has decided that it will not pursue publication of it as an RFC.", "name": "Adopted for WG Info Only", "next_states": [], "order": 3, @@ -694,7 +694,7 @@ }, { "fields": { - "desc": "4.2.4. WG Document\r\n\r\n The \"WG Document\" state describes an I-D that has been adopted by an IETF WG and is being actively developed.\r\n\r\n A WG Chair may transition an I-D into the \"WG Document\" state at any time as long as the I-D is not being considered or developed in any other WG.\r\n\r\n Alternatively, WG Chairs may rely upon new functionality to be added to the Datatracker to automatically move version-00 drafts into the \"WG Document\" state as described in Section 4.1.\r\n\r\n Under normal conditions, it should not be possible for an I-D to be in the \"WG Document\" state in more than one WG at a time. This said, I-Ds may be transferred from one WG to another with the consent of the WG Chairs and the responsible ADs.", + "desc": "The document has been adopted by the Working Group (WG) and is under development. A document can only be adopted by one WG at a time. However, a document may be transferred between WGs.", "name": "WG Document", "next_states": [ 39, @@ -712,7 +712,7 @@ }, { "fields": { - "desc": "4.2.5. Parked WG Document\r\n\r\n A \"Parked WG Document\" is an I-D that has lost its author or editor, is waiting for another document to be written or for a review to be completed, or cannot be progressed by the working group for some other reason.\r\n\r\n Some of the annotation tags described in Section 4.3 may be used in conjunction with this state to indicate why an I-D has been parked, and/or what may need to happen for the I-D to be un-parked.\r\n\r\n Parking a WG draft will not prevent it from expiring; however, this state can be used to indicate why the I-D has stopped progressing in the WG.\r\n\r\n A \"Parked WG Document\" that is not expired may be transferred from one WG to another with the consent of the WG Chairs and the responsible ADs.", + "desc": "The Working Group (WG) document is in a temporary state where it will not be actively developed. The reason for the pause is explained via a datatracker comments section.", "name": "Parked WG Document", "next_states": [ 38 @@ -727,7 +727,7 @@ }, { "fields": { - "desc": "4.2.6. Dead WG Document\r\n\r\n A \"Dead WG Document\" is an I-D that has been abandoned. Note that 'Dead' is not always a final state for a WG I-D. If consensus is subsequently achieved, a \"Dead WG Document\" may be resurrected. A \"Dead WG Document\" that is not resurrected will eventually expire.\r\n\r\n Note that an I-D that is declared to be \"Dead\" in one WG and that is not expired may be transferred to a non-dead state in another WG with the consent of the WG Chairs and the responsible ADs.", + "desc": "The Working Group (WG) document has been abandoned by the WG. No further development is planned in this WG. A decision to resume work on this document and move it out of this state is possible.", "name": "Dead WG Document", "next_states": [ 38 @@ -742,7 +742,7 @@ }, { "fields": { - "desc": "4.2.7. In WG Last Call\r\n\r\n A document \"In WG Last Call\" is an I-D for which a WG Last Call (WGLC) has been issued and is in progress.\r\n\r\n Note that conducting a WGLC is an optional part of the IETF WG process, per Section 7.4 of RFC 2418 [RFC2418].\r\n\r\n If a WG Chair decides to conduct a WGLC on an I-D, the \"In WG Last Call\" state can be used to track the progress of the WGLC. The Chair may configure the Datatracker to send a WGLC message to one or more mailing lists when the Chair moves the I-D into this state. The WG Chair may also be able to select a different set of mailing lists for a different document undergoing a WGLC; some documents may deserve coordination with other WGs.\r\n\r\n A WG I-D in this state should remain \"In WG Last Call\" until the WG Chair moves it to another state. The WG Chair may configure the Datatracker to send an e-mail after a specified period of time to remind or 'nudge' the Chair to conclude the WGLC and to determine the next state for the document.\r\n\r\n It is possible for one WGLC to lead into another WGLC for the same document. For example, an I-D that completed a WGLC as an \"Informational\" document may need another WGLC if a decision is taken to convert the I-D into a Standards Track document.", + "desc": "The Working Group (WG) document is currently subject to an active WG Last Call (WGLC) review per Section 7.4 of RFC2418.", "name": "In WG Last Call", "next_states": [ 38, @@ -759,7 +759,7 @@ }, { "fields": { - "desc": "4.2.8. Waiting for WG Chair Go-Ahead\r\n\r\n A WG Chair may wish to place an I-D that receives a lot of comments during a WGLC into the \"Waiting for WG Chair Go-Ahead\" state. This state describes an I-D that has undergone a WGLC; however, the Chair is not yet ready to call consensus on the document.\r\n\r\n If comments from the WGLC need to be responded to, or a revision to the I-D is needed, the Chair may place an I-D into this state until all of the WGLC comments are adequately addressed and the (possibly revised) document is in the I-D repository.", + "desc": "The Working Group (WG) document has completed Working Group Last Call (WGLC), but the WG chair(s) are not yet ready to call consensus on the document. The reasons for this may include comments from the WGLC need to be responded to, or a revision to the document is needed", "name": "Waiting for WG Chair Go-Ahead", "next_states": [ 41, @@ -775,7 +775,7 @@ }, { "fields": { - "desc": "4.2.9. WG Consensus: Waiting for Writeup\r\n\r\n A document in the \"WG Consensus: Waiting for Writeup\" state has essentially completed its development within the working group, and is nearly ready to be sent to the IESG for publication. The last thing to be done is the preparation of a protocol writeup by a Document Shepherd. The IESG requires that a document shepherd writeup be completed before publication of the I-D is requested. The IETF document shepherding process and the role of a WG Document Shepherd is described in RFC 4858 [RFC4858]\r\n\r\n A WG Chair may call consensus on an I-D without a formal WGLC and transition an I-D that was in the \"WG Document\" state directly into this state.\r\n\r\n The name of this state includes the words \"Waiting for Writeup\" because a good document shepherd writeup takes time to prepare.", + "desc": "The Working Group (WG) document has consensus to proceed to publication. However, the document is waiting for a document shepherd write-up per RFC4858.", "name": "WG Consensus: Waiting for Write-Up", "next_states": [ 44 @@ -790,7 +790,7 @@ }, { "fields": { - "desc": "4.2.10. Submitted to IESG for Publication\r\n\r\n This state describes a WG document that has been submitted to the IESG for publication and that has not been sent back to the working group for revision.\r\n\r\n An I-D in this state may be under review by the IESG, it may have been approved and be in the RFC Editor's queue, or it may have been published as an RFC. Other possibilities exist too. The document may be \"Dead\" (in the IESG state machine) or in a \"Do Not Publish\" state.", + "desc": "The Working Group (WG) document has left the WG and been submitted to the Internet Engineering Steering Group (IESG) for evaluation and publication. See the “IESG State” or “RFC Editor State” for further details on the state of the document.", "name": "Submitted to IESG for Publication", "next_states": [ 38 @@ -2020,7 +2020,7 @@ }, { "fields": { - "desc": "The document has been marked as a candidate for WG adoption by the WG Chair. This state can be used before a call for adoption is issued (and the document is put in the \"Call For Adoption By WG Issued\" state), to indicate that the document is in the queue for a call for adoption, even if none has been issued yet.", + "desc": "The individual submission document has been marked by the Working Group (WG) chairs as a candidate for adoption by the WG, but no adoption call has been started.", "name": "Candidate for WG Adoption", "next_states": [ 35 @@ -2152,7 +2152,7 @@ }, { "fields": { - "desc": "In some areas, it can be desirable to wait for multiple interoperable implementations before progressing a draft to be an RFC, and in some WGs this is required. This state should be entered after WG Last Call has completed.", + "desc": "The progression of this Working Group (WG) document towards publication is paused as it awaits implementation. The process governing the approach to implementations is WG-specific.", "name": "Waiting for Implementation", "next_states": [], "order": 8, @@ -2165,7 +2165,7 @@ }, { "fields": { - "desc": "Held by WG, see document history for details.", + "desc": "Held by Working Group (WG) chairs for administrative reasons. See document history for details.", "name": "Held by WG", "next_states": [], "order": 9, @@ -4473,6 +4473,7 @@ ], "session_purposes": [ "coding", + "open_meeting", "presentation", "social", "tutorial" @@ -5535,7 +5536,6 @@ ], "desc": "Recipients for a message when a new incoming liaison statement is posted", "to": [ - "liaison_from_contact", "liaison_to_contacts" ] }, From 145b9f76c19030b67628432b5f811a1c3c55c749 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Tue, 28 Oct 2025 20:11:52 -0300 Subject: [PATCH 238/405] chore(dev): bump dev blobdb to pg17 (#9806) --- docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index 8c6e0ea486..2440faf121 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -116,7 +116,7 @@ services: - "minio-data:/data" blobdb: - image: postgres:16 + image: postgres:17 restart: unless-stopped environment: POSTGRES_DB: blob From cbb0e2e3db4cc9e591b4397b7bc6cdebb51cfc8c Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Wed, 29 Oct 2025 11:18:47 -0300 Subject: [PATCH 239/405] feat: logs in api_retrieve_materials_blob() (#9818) --- ietf/meeting/views.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/ietf/meeting/views.py b/ietf/meeting/views.py index d6b5a1c0db..69635d6219 100644 --- a/ietf/meeting/views.py +++ b/ietf/meeting/views.py @@ -451,6 +451,7 @@ def _default_content_type(blob_name: str): else: # found the blob - return it assert isinstance(blob, BlobFile) + log(f"Materials blob: directly returning {bucket}:{name}") return FileResponse( blob, filename=name, @@ -473,17 +474,20 @@ def _default_content_type(blob_name: str): if doc.type_id != bucket: raise Document.DoesNotExist except Document.DoesNotExist: + log(f"Materials blob: no doc for {bucket}:{name}") return HttpResponseNotFound( f"Document corresponding to {bucket}:{name} not found." ) else: # create all missing blobs for the doc while we're at it + log(f"Materials blob: storing blobs for {doc.name}-{doc.rev}") store_blobs_for_one_material_doc(doc) # If we can make the blob at all, it now exists, so return it or a 404 try: blob = storage.open(name, "rb") except FileNotFoundError: + log(f"Materials blob: no blob for {bucket}:{name}") return HttpResponseNotFound(f"Object {bucket}:{name} not found.") else: # found the blob - return it From c47fe34b0e409f4811e2f96fc45ec87bc1b7931f Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Mon, 3 Nov 2025 09:05:30 -0500 Subject: [PATCH 240/405] fix: include punctuation when tablesorting (#9855) --- ietf/static/js/list.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ietf/static/js/list.js b/ietf/static/js/list.js index 756a75001a..c03368cd72 100644 --- a/ietf/static/js/list.js +++ b/ietf/static/js/list.js @@ -16,7 +16,7 @@ function text_sort(a, b, options) { // sort by text content return prep(a, options).localeCompare(prep(b, options), "en", { sensitivity: "base", - ignorePunctuation: true, + ignorePunctuation: false, numeric: true }); } From 87c3a9db06b784d2cf1484a547171a9783e50fdc Mon Sep 17 00:00:00 2001 From: Kesara Rathnayake Date: Mon, 3 Nov 2025 09:08:53 -0500 Subject: [PATCH 241/405] feat(agenda): Show calendar links to all the events (#9843) * feat(agenda): Show calendar links to all the events * test: Update playwright tests --- client/agenda/AgendaScheduleList.vue | 20 ++++++++++---------- playwright/tests/meeting/agenda.spec.js | 7 ++++++- 2 files changed, 16 insertions(+), 11 deletions(-) diff --git a/client/agenda/AgendaScheduleList.vue b/client/agenda/AgendaScheduleList.vue index fc8b5fd30f..bbe5dfee8b 100644 --- a/client/agenda/AgendaScheduleList.vue +++ b/client/agenda/AgendaScheduleList.vue @@ -398,16 +398,6 @@ const meetingEvents = computed(() => { color: 'teal' }) } - // -> Calendar item - if (item.links.calendar) { - links.push({ - id: `lnk-${item.id}-calendar`, - label: 'Calendar (.ics) entry for this session', - icon: 'calendar-check', - href: item.links.calendar, - color: 'pink' - }) - } } else { // -> Post event if (meetingNumberInt >= 60) { @@ -484,6 +474,16 @@ const meetingEvents = computed(() => { } } } + // Add Calendar item for all events that has a calendar link + if (item.adjustedEnd > current && item.links.calendar) { + links.push({ + id: `lnk-${item.id}-calendar`, + label: 'Calendar (.ics) entry for this session', + icon: 'calendar-check', + href: item.links.calendar, + color: 'pink' + }) + } // Event icon let icon = null diff --git a/playwright/tests/meeting/agenda.spec.js b/playwright/tests/meeting/agenda.spec.js index 412a3fe9b8..2248027a38 100644 --- a/playwright/tests/meeting/agenda.spec.js +++ b/playwright/tests/meeting/agenda.spec.js @@ -1219,7 +1219,12 @@ test.describe('future - desktop', () => { await expect(eventButtons.locator(`#btn-lnk-${event.id}-calendar > i.bi`)).toBeVisible() } } else { - await expect(eventButtons).toHaveCount(0) + if (event.links.calendar) { + await expect(eventButtons.locator(`#btn-lnk-${event.id}-calendar`)).toHaveAttribute('href', event.links.calendar) + await expect(eventButtons.locator(`#btn-lnk-${event.id}-calendar > i.bi`)).toBeVisible() + } else { + await expect(eventButtons).toHaveCount(0) + } } } } From 8da45cb8488345a1f449e6fc7442098cff81e3ff Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Mon, 3 Nov 2025 09:10:59 -0500 Subject: [PATCH 242/405] feat: optionally hide room-only schedule diffs (#9861) * feat: optionally hide room-only schedule diffs * test: update test --- ietf/meeting/tests_views.py | 194 +++++++++++++++++++++++++----------- ietf/meeting/views.py | 13 +++ 2 files changed, 151 insertions(+), 56 deletions(-) diff --git a/ietf/meeting/tests_views.py b/ietf/meeting/tests_views.py index b1bbc62907..50960b5143 100644 --- a/ietf/meeting/tests_views.py +++ b/ietf/meeting/tests_views.py @@ -49,7 +49,11 @@ from ietf.meeting.helpers import send_interim_minutes_reminder, populate_important_dates, update_important_dates from ietf.meeting.models import Session, TimeSlot, Meeting, SchedTimeSessAssignment, Schedule, SessionPresentation, SlideSubmission, SchedulingEvent, Room, Constraint, ConstraintName from ietf.meeting.test_data import make_meeting_test_data, make_interim_meeting, make_interim_test_data -from ietf.meeting.utils import condition_slide_order, generate_proceedings_content +from ietf.meeting.utils import ( + condition_slide_order, + generate_proceedings_content, + diff_meeting_schedules, +) from ietf.meeting.utils import add_event_info_to_session_qs, participants_for_meeting from ietf.meeting.utils import create_recording, delete_recording, get_next_sequence, bluesheet_data from ietf.meeting.views import session_draft_list, parse_agenda_filter_params, sessions_post_save, agenda_extract_schedule @@ -4765,73 +4769,151 @@ def test_list_schedules(self): self.assertTrue(r.status_code, 200) def test_diff_schedules(self): - meeting = make_meeting_test_data() - - url = urlreverse('ietf.meeting.views.diff_schedules',kwargs={'num':meeting.number}) - login_testing_unauthorized(self,"secretary", url) - r = self.client.get(url) - self.assertTrue(r.status_code, 200) - - from_schedule = Schedule.objects.get(meeting=meeting, name="test-unofficial-schedule") - - session1 = Session.objects.filter(meeting=meeting, group__acronym='mars').first() - session2 = Session.objects.filter(meeting=meeting, group__acronym='ames').first() - session3 = SessionFactory(meeting=meeting, group=Group.objects.get(acronym='mars'), - attendees=10, requested_duration=datetime.timedelta(minutes=70), - add_to_schedule=False) - SchedulingEvent.objects.create(session=session3, status_id='schedw', by=Person.objects.first()) - - slot2 = TimeSlot.objects.filter(meeting=meeting, type='regular').order_by('-time').first() - slot3 = TimeSlot.objects.create( - meeting=meeting, type_id='regular', location=slot2.location, - duration=datetime.timedelta(minutes=60), - time=slot2.time + datetime.timedelta(minutes=60), + # Create meeting and some time slots + meeting = MeetingFactory(type_id="ietf", populate_schedule=False) + rooms = RoomFactory.create_batch(2, meeting=meeting) + # first index is room, second is time + timeslots = [ + [ + TimeSlotFactory( + location=room, + meeting=meeting, + time=datetime.datetime.combine( + meeting.date, datetime.time(9, 0, tzinfo=datetime.UTC) + ) + ), + TimeSlotFactory( + location=room, + meeting=meeting, + time=datetime.datetime.combine( + meeting.date, datetime.time(10, 0, tzinfo=datetime.UTC) + ) + ), + TimeSlotFactory( + location=room, + meeting=meeting, + time=datetime.datetime.combine( + meeting.date, datetime.time(11, 0, tzinfo=datetime.UTC) + ) + ), + ] + for room in rooms + ] + sessions = SessionFactory.create_batch( + 5, meeting=meeting, add_to_schedule=False ) - # copy - new_url = urlreverse("ietf.meeting.views.new_meeting_schedule", kwargs=dict(num=meeting.number, owner=from_schedule.owner_email(), name=from_schedule.name)) - r = self.client.post(new_url, { - 'name': "newtest", - 'public': "on", - }) - self.assertNoFormPostErrors(r) + from_schedule = ScheduleFactory(meeting=meeting) + to_schedule = ScheduleFactory(meeting=meeting) - to_schedule = Schedule.objects.get(meeting=meeting, name='newtest') + # sessions[0]: not scheduled in from_schedule, scheduled in to_schedule + SchedTimeSessAssignment.objects.create( + schedule=to_schedule, + session=sessions[0], + timeslot=timeslots[0][0], + ) + # sessions[1]: scheduled in from_schedule, not scheduled in to_schedule + SchedTimeSessAssignment.objects.create( + schedule=from_schedule, + session=sessions[1], + timeslot=timeslots[0][0], + ) + # sessions[2]: moves rooms, not time + SchedTimeSessAssignment.objects.create( + schedule=from_schedule, + session=sessions[2], + timeslot=timeslots[0][1], + ) + SchedTimeSessAssignment.objects.create( + schedule=to_schedule, + session=sessions[2], + timeslot=timeslots[1][1], + ) + # sessions[3]: moves time, not room + SchedTimeSessAssignment.objects.create( + schedule=from_schedule, + session=sessions[3], + timeslot=timeslots[1][1], + ) + SchedTimeSessAssignment.objects.create( + schedule=to_schedule, + session=sessions[3], + timeslot=timeslots[1][2], + ) + # sessions[4]: moves room and time + SchedTimeSessAssignment.objects.create( + schedule=from_schedule, + session=sessions[4], + timeslot=timeslots[1][0], + ) + SchedTimeSessAssignment.objects.create( + schedule=to_schedule, + session=sessions[4], + timeslot=timeslots[0][2], + ) - # make some changes + # Check the raw diffs + raw_diffs = diff_meeting_schedules(from_schedule, to_schedule) + self.assertCountEqual( + raw_diffs, + [ + { + "change": "schedule", + "session": sessions[0].pk, + "to": timeslots[0][0].pk, + }, + { + "change": "unschedule", + "session": sessions[1].pk, + "from": timeslots[0][0].pk, + }, + { + "change": "move", + "session": sessions[2].pk, + "from": timeslots[0][1].pk, + "to": timeslots[1][1].pk, + }, + { + "change": "move", + "session": sessions[3].pk, + "from": timeslots[1][1].pk, + "to": timeslots[1][2].pk, + }, + { + "change": "move", + "session": sessions[4].pk, + "from": timeslots[1][0].pk, + "to": timeslots[0][2].pk, + }, + ] + ) - edit_url = urlreverse("ietf.meeting.views.edit_meeting_schedule", kwargs=dict(num=meeting.number, owner=to_schedule.owner_email(), name=to_schedule.name)) + # Check the view + url = urlreverse("ietf.meeting.views.diff_schedules", + kwargs={"num": meeting.number}) + login_testing_unauthorized(self, "secretary", url) + r = self.client.get(url) + self.assertTrue(r.status_code, 200) - # schedule session - r = self.client.post(edit_url, { - 'action': 'assign', - 'timeslot': slot3.pk, - 'session': session3.pk, - }) - self.assertEqual(json.loads(r.content)['success'], True) - # unschedule session - r = self.client.post(edit_url, { - 'action': 'unassign', - 'session': session1.pk, - }) - self.assertEqual(json.loads(r.content)['success'], True) - # move session - r = self.client.post(edit_url, { - 'action': 'assign', - 'timeslot': slot2.pk, - 'session': session2.pk, + # with show room changes disabled - does not show sessions[2] because it did + # not change time + r = self.client.get(url, { + "from_schedule": from_schedule.name, + "to_schedule": to_schedule.name, }) - self.assertEqual(json.loads(r.content)['success'], True) + self.assertTrue(r.status_code, 200) + q = PyQuery(r.content) + self.assertEqual(len(q(".schedule-diffs tr")), 4 + 1) - # now get differences + # with show room changes enabled - shows all changes r = self.client.get(url, { - 'from_schedule': from_schedule.name, - 'to_schedule': to_schedule.name, + "from_schedule": from_schedule.name, + "to_schedule": to_schedule.name, + "show_room_changes": "on", }) self.assertTrue(r.status_code, 200) - q = PyQuery(r.content) - self.assertEqual(len(q(".schedule-diffs tr")), 3+1) + self.assertEqual(len(q(".schedule-diffs tr")), 5 + 1) def test_delete_schedule(self): url = urlreverse('ietf.meeting.views.delete_schedule', diff --git a/ietf/meeting/views.py b/ietf/meeting/views.py index 69635d6219..b0c46cb05a 100644 --- a/ietf/meeting/views.py +++ b/ietf/meeting/views.py @@ -1675,6 +1675,11 @@ def list_schedules(request, num): class DiffSchedulesForm(forms.Form): from_schedule = forms.ChoiceField() to_schedule = forms.ChoiceField() + show_room_changes = forms.BooleanField( + initial=False, + required=False, + help_text="Include changes to room without a date or time change", + ) def __init__(self, meeting, user, *args, **kwargs): super().__init__(*args, **kwargs) @@ -1707,6 +1712,14 @@ def diff_schedules(request, num): raw_diffs = diff_meeting_schedules(from_schedule, to_schedule) diffs = prefetch_schedule_diff_objects(raw_diffs) + if not form.cleaned_data["show_room_changes"]: + # filter out room-only changes + diffs = [ + d + for d in diffs + if (d["change"] != "move") or (d["from"].time != d["to"].time) + ] + for d in diffs: s = d['session'] s.session_label = s.short_name From 9546e15224df7d8d9f385a8f670cd27012d7aee5 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Mon, 3 Nov 2025 09:11:32 -0500 Subject: [PATCH 243/405] fix: no autoescape for bluesheet template (#9858) --- ietf/templates/meeting/bluesheet.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ietf/templates/meeting/bluesheet.txt b/ietf/templates/meeting/bluesheet.txt index dd3bf36ac7..5b3960f3aa 100644 --- a/ietf/templates/meeting/bluesheet.txt +++ b/ietf/templates/meeting/bluesheet.txt @@ -1,7 +1,8 @@ -Bluesheet for {{session}} +{% autoescape off %}Bluesheet for {{session}} ======================================================================== {{ data|length }} attendees. {% for item in data %} {{ item.name }} {{ item.affiliation }}{% endfor %} +{% endautoescape %} From 7b4035d7fcd1130cdf8e08b3aa54efda35087a8a Mon Sep 17 00:00:00 2001 From: Tero Kivinen Date: Mon, 3 Nov 2025 18:16:33 +0200 Subject: [PATCH 244/405] fix: Change add period button to save new period. (#9847) --- ietf/templates/group/change_reviewer_settings.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ietf/templates/group/change_reviewer_settings.html b/ietf/templates/group/change_reviewer_settings.html index 9ecec5633c..75451fdd75 100644 --- a/ietf/templates/group/change_reviewer_settings.html +++ b/ietf/templates/group/change_reviewer_settings.html @@ -89,7 +89,7 @@

    Unavailable periods

    + value="add_period">Save new period

    History of settings

    From 1ba63977c00121572048c506289f88d41ce67291 Mon Sep 17 00:00:00 2001 From: Matthew Holloway Date: Tue, 4 Nov 2025 06:26:25 +1300 Subject: [PATCH 245/405] fix: ask google not to index noscript content (#9844) --- ietf/templates/base.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ietf/templates/base.html b/ietf/templates/base.html index aa44955527..d8ff85f86e 100644 --- a/ietf/templates/base.html +++ b/ietf/templates/base.html @@ -96,7 +96,7 @@ {% endif %}
    -