-
+
{{ session.group.acronym }}
{% if session.purpose_id != "regular" and session.purpose_id != "none" %}
diff --git a/ietf/secr/templates/sreq/session_approval_notification.txt b/ietf/templates/meeting/session_approval_notification.txt
similarity index 56%
rename from ietf/secr/templates/sreq/session_approval_notification.txt
rename to ietf/templates/meeting/session_approval_notification.txt
index 7bb63aa3fa..74eca09bd8 100644
--- a/ietf/secr/templates/sreq/session_approval_notification.txt
+++ b/ietf/templates/meeting/session_approval_notification.txt
@@ -1,3 +1,4 @@
+{# Copyright The IETF Trust 2025, All Rights Reserved #}
Dear {{ group.parent }} Director(s):
{{ header }} meeting session request has just been
@@ -5,11 +6,11 @@ submitted by {{ requester }}.
The third session requires your approval.
To approve the session go to the session request view here:
-{{ settings.IDTRACKER_BASE_URL }}{% url "ietf.secr.sreq.views.view" acronym=group.acronym %}
+{{ settings.IDTRACKER_BASE_URL }}{% url "ietf.meeting.views_session_request.view_request" acronym=group.acronym %}
and click "Approve Third Session".
Regards,
The IETF Secretariat.
-{% include "includes/session_info.txt" %}
+{% include "meeting/session_request_info.txt" %}
diff --git a/ietf/templates/meeting/session_buttons_include.html b/ietf/templates/meeting/session_buttons_include.html
index bb5144b45d..b1fd92ba5b 100644
--- a/ietf/templates/meeting/session_buttons_include.html
+++ b/ietf/templates/meeting/session_buttons_include.html
@@ -2,7 +2,8 @@
{% load origin %}
{% load static %}
{% load textfilters tz %}
-{% load ietf_filters %}
+{% load ietf_filters session_filters %}
+
{% origin %}
{% if item and item|should_show_agenda_session_buttons %}
{% with slug=item.slug session=item.session timeslot=item.timeslot %}
@@ -126,10 +127,10 @@
{% else %}
{# chat logs #}
- {% if meeting.has_chat_logs %}
+ {% if meeting.has_chat_logs and session.chatlog %}
@@ -303,10 +304,10 @@
{% else %}
{# chat logs #}
- {% if meeting.has_chat_logs %}
+ {% if meeting.has_chat_logs and session.chatlog %}
+ href="/doc/{{ session.chatlog.document.name }}">
Chat logs
diff --git a/ietf/secr/templates/sreq/session_cancel_notification.txt b/ietf/templates/meeting/session_cancel_notification.txt
similarity index 71%
rename from ietf/secr/templates/sreq/session_cancel_notification.txt
rename to ietf/templates/meeting/session_cancel_notification.txt
index 8aee6c89db..3de67fc8f4 100644
--- a/ietf/secr/templates/sreq/session_cancel_notification.txt
+++ b/ietf/templates/meeting/session_cancel_notification.txt
@@ -1,3 +1,4 @@
+{# Copyright The IETF Trust 2025, All Rights Reserved #}
{% autoescape off %}{% load ams_filters %}
A request to cancel a meeting session has just been submitted by {{ requester }}.{% endautoescape %}
diff --git a/ietf/templates/meeting/session_details.html b/ietf/templates/meeting/session_details.html
index 571715b4b4..a4d9ba1090 100644
--- a/ietf/templates/meeting/session_details.html
+++ b/ietf/templates/meeting/session_details.html
@@ -1,5 +1,5 @@
{% extends "base.html" %}
-{# Copyright The IETF Trust 2015, All Rights Reserved #}
+{# Copyright The IETF Trust 2015-2026, All Rights Reserved #}
{% load origin ietf_filters static %}
{% block title %}{{ meeting }} : {{ group.acronym }}{% endblock %}
{% block morecss %}
@@ -31,93 +31,58 @@ Scheduled Sessions
{% include 'meeting/session_details_panel.html' with sessions=scheduled_sessions %}
Unscheduled Sessions
{% include 'meeting/session_details_panel.html' with sessions=unscheduled_sessions %}
- {% if pending_suggestions %}
-
+ {% for s in pending_suggestions %}
+ {% if forloop.first %}
{% if can_manage_materials %}
Proposed slides awaiting your approval
{% else %}
Your proposed slides awaiting chair approval
{% endif %}
-
- {% for s in pending_suggestions %}
- {% if can_manage_materials %}
-
-
- {{ s.submitter }} - {{ s.title }} ({{ s.time }})
-
-
- {% else %}
-
- {{ s.title }} ({{ s.time }})
-
- {% endif %}
- {% endfor %}
+
{% endif %}
+ {% endfor %}
+ {% if user|has_role:"Secretariat" %}
+
{% endif %}
+ {% comment %}
+ The existence of an element with id canManageMaterialsFlag is checked in
+ session_details.js to determine whether it should init the sortable tables.
+ Not the most elegant approach, but it works.
+ {% endcomment %}
+ {% if can_manage_materials %}
{% endif %}
{% endblock %}
{% block js %}
-
{% if can_manage_materials %}
-
{% endif %}
+
{% endblock %}
\ No newline at end of file
diff --git a/ietf/templates/meeting/session_details_form.html b/ietf/templates/meeting/session_details_form.html
index 6b59e7dacd..9cd1b6e85c 100644
--- a/ietf/templates/meeting/session_details_form.html
+++ b/ietf/templates/meeting/session_details_form.html
@@ -1,42 +1,48 @@
-{# Copyright The IETF Trust 2007-2020, All Rights Reserved #}
+{# Copyright The IETF Trust 2007-2025, All Rights Reserved #}
+{% load django_bootstrap5 %}
+
\ No newline at end of file
+ {{ form.attendees.as_hidden }}
+ {{ form.comments.as_hidden }}
+ {{ form.id.as_hidden }}
+ {{ form.on_agenda.as_hidden }}
+ {{ form.DELETE.as_hidden }}
+ {{ form.remote_instructions.as_hidden }}
+ {{ form.short.as_hidden }}
+ {{ form.agenda_note.as_hidden }}
+
diff --git a/ietf/templates/meeting/session_details_panel.html b/ietf/templates/meeting/session_details_panel.html
index a0f5884b9d..7c52ac0b4a 100644
--- a/ietf/templates/meeting/session_details_panel.html
+++ b/ietf/templates/meeting/session_details_panel.html
@@ -109,7 +109,7 @@ Agenda, Minutes, and Bluesheets
{% endif %}
{% if not session.type_counter.minutes %}
- Import minutes from notes.ietf.org
+ Import minutes from notes.ietf.org
Upload minutes
@@ -310,7 +310,7 @@ Notes and recordings
{% endif %}
+
+ {% if can_manage_materials %}
+
+ Link additional recordings to session
+
+ {% endif %}
+
{% endwith %}{% endwith %}
{% endfor %}
diff --git a/ietf/secr/templates/sreq/not_meeting_notification.txt b/ietf/templates/meeting/session_not_meeting_notification.txt
similarity index 83%
rename from ietf/secr/templates/sreq/not_meeting_notification.txt
rename to ietf/templates/meeting/session_not_meeting_notification.txt
index 1120f8480c..0e5c940708 100644
--- a/ietf/secr/templates/sreq/not_meeting_notification.txt
+++ b/ietf/templates/meeting/session_not_meeting_notification.txt
@@ -1,3 +1,4 @@
+{# Copyright The IETF Trust 2025, All Rights Reserved #}
{% load ams_filters %}
{{ login|smart_login }} {{ group.acronym }} working group, indicated that the {{ group.acronym }} working group does not plan to hold a session at IETF {{ meeting.number }}.
diff --git a/ietf/templates/meeting/session_request_confirm.html b/ietf/templates/meeting/session_request_confirm.html
new file mode 100644
index 0000000000..09043d3d0c
--- /dev/null
+++ b/ietf/templates/meeting/session_request_confirm.html
@@ -0,0 +1,38 @@
+{# Copyright The IETF Trust 2025, All Rights Reserved #}
+{% extends "base.html" %}
+{% load static ietf_filters django_bootstrap5 %}
+{% block title %}Confirm Session Request{% endblock %}
+
+{% block content %}
+ Confirm Session Request - IETF {{ meeting.number }}
+
+
+
+
+
+
+
+ {% include "meeting/session_request_view_table.html" %}
+
+
+
+
+
+{% endblock %}
+
+{% block js %}
+
+{% endblock %}
\ No newline at end of file
diff --git a/ietf/templates/meeting/session_request_form.html b/ietf/templates/meeting/session_request_form.html
new file mode 100644
index 0000000000..ecf5cb7268
--- /dev/null
+++ b/ietf/templates/meeting/session_request_form.html
@@ -0,0 +1,206 @@
+{# Copyright The IETF Trust 2025, All Rights Reserved #}
+{% extends "base.html" %}
+{% load static ietf_filters django_bootstrap5 %}
+{% block title %}{% if is_create %}New {% else %}Edit {% endif %}Session Request{% endblock %}
+{% block morecss %}{{ block.super }}
+ .hidden {display: none !important;}
+ div.form-group {display: inline;}
+{% endblock %}
+{% block content %}
+ {% if is_create %}New {% else %}Edit {% endif %}Session Request
+
+ {% if is_create %}
+
+ {% endif %}
+
+
+
+
+
+{% endblock %}
+{% block js %}
+
+ {{ form.media }}
+{% endblock %}
\ No newline at end of file
diff --git a/ietf/templates/meeting/session_request_info.txt b/ietf/templates/meeting/session_request_info.txt
new file mode 100644
index 0000000000..2e96efb31f
--- /dev/null
+++ b/ietf/templates/meeting/session_request_info.txt
@@ -0,0 +1,26 @@
+{# Copyright The IETF Trust 2025, All Rights Reserved #}
+{% load ams_filters %}
+---------------------------------------------------------
+Working Group Name: {{ group.name }}
+Area Name: {{ group.parent }}
+Session Requester: {{ login }}
+{% if session.joint_with_groups %}{{ session.joint_for_session_display }} joint with: {{ session.joint_with_groups }}{% endif %}
+
+Number of Sessions: {{ session.num_session }}
+Length of Session(s): {% for session_length in session_lengths %}{{ session_length.total_seconds|display_duration }}{% if not forloop.last %}, {% endif %}{% endfor %}
+Number of Attendees: {{ session.attendees }}
+Conflicts to Avoid:
+{% for line in session.outbound_conflicts %} {{line}}
+{% endfor %}{% if session.session_time_relation_display %} {{ session.session_time_relation_display }}{% endif %}
+{% if session.adjacent_with_wg %} Adjacent with WG: {{ session.adjacent_with_wg }}{% endif %}
+{% if session.timeranges_display %} Can't meet: {{ session.timeranges_display|join:", " }}{% endif %}
+
+Participants who must be present:
+{% for person in session.bethere %} {{ person.ascii_name }}
+{% endfor %}
+Resources Requested:
+{% for resource in session.resources %} {{ resource.desc }}
+{% endfor %}
+Special Requests:
+ {{ session.comments }}
+---------------------------------------------------------
diff --git a/ietf/templates/meeting/session_request_list.html b/ietf/templates/meeting/session_request_list.html
new file mode 100644
index 0000000000..789b7006e5
--- /dev/null
+++ b/ietf/templates/meeting/session_request_list.html
@@ -0,0 +1,65 @@
+{# Copyright The IETF Trust 2025, All Rights Reserved #}
+{% extends "base.html" %}
+{% load static %}
+{% load ietf_filters %}
+{% load django_bootstrap5 %}
+{% block title %}Session Requests{% endblock %}
+{% block content %}
+ Session Requests IETF {{ meeting.number }}
+
+
+
+
+
+
+
The list below includes those working groups that you currently chair which do not already have a session scheduled. You can click on an acronym to complete a request for a new session at the upcoming IETF meeting. Click "Group will not meet" to send a notification that the group does not plan to meet.
+
+ {% for group in unscheduled_groups %}
+
+ {{ group.acronym }}
+ {% if group.not_meeting %}
+ (Currently, this group does not plan to hold a session at IETF {{ meeting.number }})
+ {% endif %}
+
+ {% empty %}
+ NONE
+ {% endfor %}
+
+
+
+
+
+
+
+
+
The list below includes those working groups for which you or your co-chair has requested sessions at the upcoming IETF meeting. You can click on an acronym to initiate changes to a session, or cancel a session.
+
+
+
+
+{% endblock %}
+
+{% block footer-extras %}
+ {% include "includes/sessions_footer.html" %}
+{% endblock %}
\ No newline at end of file
diff --git a/ietf/templates/meeting/session_request_locked.html b/ietf/templates/meeting/session_request_locked.html
new file mode 100644
index 0000000000..15c023ce33
--- /dev/null
+++ b/ietf/templates/meeting/session_request_locked.html
@@ -0,0 +1,21 @@
+{# Copyright The IETF Trust 2025, All Rights Reserved #}
+{% extends "base.html" %}
+{% load static ietf_filters django_bootstrap5 %}
+{% block title %}Session Request{% endblock %}
+
+{% block content %}
+ Session Request - IETF {{ meeting.number }}
+
+ View list of timeslot requests
+
+
+
+
+
{{ message }}
+
+
+ Back
+
+
+
+{% endblock %}
diff --git a/ietf/secr/templates/sreq/session_request_notification.txt b/ietf/templates/meeting/session_request_notification.txt
similarity index 56%
rename from ietf/secr/templates/sreq/session_request_notification.txt
rename to ietf/templates/meeting/session_request_notification.txt
index 75f2cbbae4..49dbbfc42c 100644
--- a/ietf/secr/templates/sreq/session_request_notification.txt
+++ b/ietf/templates/meeting/session_request_notification.txt
@@ -1,5 +1,6 @@
+{# Copyright The IETF Trust 2025, All Rights Reserved #}
{% autoescape off %}{% load ams_filters %}
{% filter wordwrap:78 %}{{ header }} meeting session request has just been submitted by {{ requester }}.{% endfilter %}
-{% include "includes/session_info.txt" %}{% endautoescape %}
+{% include "meeting/session_request_info.txt" %}{% endautoescape %}
diff --git a/ietf/templates/meeting/session_request_status.html b/ietf/templates/meeting/session_request_status.html
new file mode 100644
index 0000000000..65e98d6d23
--- /dev/null
+++ b/ietf/templates/meeting/session_request_status.html
@@ -0,0 +1,28 @@
+{# Copyright The IETF Trust 2025, All Rights Reserved #}
+{% extends "base.html" %}
+{% load static %}
+{% load ietf_filters %}
+{% load django_bootstrap5 %}
+{% block title %}Session Request Status{% endblock %}
+{% block content %}
+ Session Request Status
+
+
+
+
+
Enter the message that you would like displayed to the WG Chair when this tool is locked.
+
+
+
+
+{% endblock %}
diff --git a/ietf/templates/meeting/session_request_view.html b/ietf/templates/meeting/session_request_view.html
new file mode 100644
index 0000000000..3db16f56cb
--- /dev/null
+++ b/ietf/templates/meeting/session_request_view.html
@@ -0,0 +1,59 @@
+{# Copyright The IETF Trust 2025, All Rights Reserved #}
+{% extends "base.html" %}
+{% load static ietf_filters django_bootstrap5 %}
+{% block title %}Session Request{% endblock %}
+
+{% block content %}
+ Session Request - IETF {{ meeting.number }}
+
+
+
+
+
+
+
+ {% include "meeting/session_request_view_table.html" %}
+
+
+
+
Activities Log
+
+
+
+
+ Date
+ Time
+ Action
+ Name
+
+
+
+ {% for entry in activities %}
+
+ {{ entry.act_date }}
+ {{ entry.act_time }}
+ {{ entry.activity }}
+ {{ entry.act_by }}
+
+ {% endfor %}
+
+
+
+
+
+
Edit
+ {% if show_approve_button %}
+
Approve Third Session
+ {% endif %}
+
Cancel this Request
+
Back
+
+
+
+{% endblock %}
+
+{% block js %}
+
+{% endblock %}
\ No newline at end of file
diff --git a/ietf/templates/meeting/session_request_view_formset.html b/ietf/templates/meeting/session_request_view_formset.html
new file mode 100644
index 0000000000..72811b8c2c
--- /dev/null
+++ b/ietf/templates/meeting/session_request_view_formset.html
@@ -0,0 +1,49 @@
+{# Copyright The IETF Trust 2025, All Rights Reserved #}
+{% load ams_filters %}{# keep this in sync with sessions_request_view_session_set.html #}
+{% for sess_form in formset %}
+ {% if sess_form.cleaned_data and not sess_form.cleaned_data.DELETE %}
+
+
+ Session {{ forloop.counter }}
+
+
+
+
Length
+
{{ sess_form.cleaned_data.requested_duration.total_seconds|display_duration }}
+
+ {% if sess_form.cleaned_data.name %}
+
+
Name
+
{{ sess_form.cleaned_data.name }}
+
+ {% endif %}
+ {% if sess_form.cleaned_data.purpose.slug != 'regular' %}
+
+
Purpose
+
+ {{ sess_form.cleaned_data.purpose }}
+ {% if sess_form.cleaned_data.purpose.timeslot_types|length > 1 %}({{ sess_form.cleaned_data.type }}
+ ){% endif %}
+
+
+
+
Onsite tool?
+
{{ sess_form.cleaned_data.has_onsite_tool|yesno }}
+
+ {% endif %}
+
+
+
+ {% if group.features.acts_like_wg and forloop.counter == 2 and not is_virtual %}
+
+
+ Time between sessions
+
+
+ {% if session.session_time_relation_display %}{{ session.session_time_relation_display }}{% else %}No
+ preference{% endif %}
+
+
+ {% endif %}
+ {% endif %}
+{% endfor %}
\ No newline at end of file
diff --git a/ietf/templates/meeting/session_request_view_session_set.html b/ietf/templates/meeting/session_request_view_session_set.html
new file mode 100644
index 0000000000..0b8412b04f
--- /dev/null
+++ b/ietf/templates/meeting/session_request_view_session_set.html
@@ -0,0 +1,47 @@
+{# Copyright The IETF Trust 2025, All Rights Reserved #}
+{% load ams_filters %}{# keep this in sync with sessions_request_view_formset.html #}
+{% for sess in session_set %}
+
+
+ Session {{ forloop.counter }}
+
+
+
+
Length
+
{{ sess.requested_duration.total_seconds|display_duration }}
+
+ {% if sess.name %}
+
+
Name
+
{{ sess.name }}
+
+ {% endif %}
+ {% if sess.purpose.slug != 'regular' %}
+
+
Purpose
+
+ {{ sess.purpose }}
+ {% if sess.purpose.timeslot_types|length > 1 %}({{ sess.type }}){% endif %}
+
+
+
+
Onsite tool?
+
{{ sess.has_onsite_tool|yesno }}
+
+ {% endif %}
+
+
+
+{% if group.features.acts_like_wg and forloop.counter == 2 and not is_virtual %}
+
+
+ Time between sessions
+
+
+ {% if session.session_time_relation_display %}{{ session.session_time_relation_display }}{% else %}No
+ preference{% endif %}
+
+
+{% endif %}
+
+{% endfor %}
\ No newline at end of file
diff --git a/ietf/templates/meeting/session_request_view_table.html b/ietf/templates/meeting/session_request_view_table.html
new file mode 100644
index 0000000000..a5cb85c252
--- /dev/null
+++ b/ietf/templates/meeting/session_request_view_table.html
@@ -0,0 +1,146 @@
+{# Copyright The IETF Trust 2025, All Rights Reserved #}
+{% load ams_filters %}
+
+
+
+ Working Group Name
+
+
+ {{ group.name }} ({{ group.acronym }})
+
+
+
+
+
+ Area Name
+
+
+ {{ group.parent }}
+
+
+
+
+
+ Number of Sessions Requested
+
+
+ {% if session.third_session %}3{% else %}{{ session.num_session }}{% endif %}
+
+
+
+{% if form %}
+ {% include 'meeting/session_request_view_formset.html' with formset=form.session_forms group=group session=session only %}
+{% else %}
+ {% include 'meeting/session_request_view_session_set.html' with session_set=sessions group=group session=session only %}
+{% endif %}
+
+
+
+
+ Number of Attendees
+
+
+ {{ session.attendees }}
+
+
+
+
+
+ Conflicts to Avoid
+
+
+ {% if session_conflicts.outbound %}
+ {% for conflict in session_conflicts.outbound %}
+
+
+ {{ conflict.name|title }}
+
+
+ {{ conflict.groups }}
+
+
+ {% endfor %}
+ {% else %}
None {% endif %}
+
+
+
+
+
+ Other WGs that included {{ group }} in their conflict list
+
+
+ {% if session_conflicts.inbound %}{{ session_conflicts.inbound }}{% else %}None so far {% endif %}
+
+
+
+{% if not is_virtual %}
+
+
+ Resources requested
+
+
+ {% if session.resources %}
{% for resource in session.resources %}{{ resource.desc }} {% endfor %} {% else %}
None so far {% endif %}
+
+
+{% endif %}
+
+
+
+ Participants who must be present
+
+
+ {% if session.bethere %}
{% for person in session.bethere %}{{ person }} {% endfor %} {% else %}
None {% endif %}
+
+
+
+
+
+ Can not meet on
+
+
+ {% if session.timeranges_display %}{{ session.timeranges_display|join:', ' }}{% else %}No constraints{% endif %}
+
+
+
+{% if not is_virtual %}
+
+
+ Adjacent with WG
+
+
+ {{ session.adjacent_with_wg|default:'No preference' }}
+
+
+
+
+ Joint session
+
+
+ {% if session.joint_with_groups %}
+ {{ session.joint_for_session_display }} with: {{ session.joint_with_groups }}
+ {% else %}
+ Not a joint session
+ {% endif %}
+
+
+{% endif %}
+
+
+
+ Special Requests
+
+
+ {{ session.comments }}
+
+
+
+{% if form and form.notifications_optional %}
+
+
+ {{ form.send_notifications.label}}
+
+
+ {% if form.cleaned_data.send_notifications %}Yes{% else %}No{% endif %}
+
+
+{% endif %}
diff --git a/ietf/templates/meeting/timeslot_edit.html b/ietf/templates/meeting/timeslot_edit.html
index 11691ba6dd..3259dba9da 100644
--- a/ietf/templates/meeting/timeslot_edit.html
+++ b/ietf/templates/meeting/timeslot_edit.html
@@ -11,20 +11,22 @@
{% endcomment %}
.timeslot-edit { overflow: auto; height: max(30rem, calc(100vh - 25rem));}
.tstable { width: 100%; border-collapse: separate; } {# "separate" to ensure sticky cells keep their borders #}
-.tstable thead { position: sticky; top: 0; z-index: 3; background-color: white;}
-.tstable th:first-child, .tstable td:first-child {
- background-color: white; {# needs to match the lighter of the striped-table colors! #}
-position: sticky;
-left: 0;
- z-index: 2; {# render above other cells / borders but below thead (z-index 3, above) #}
-}
-.tstable tbody > tr:nth-of-type(odd) > th:first-child {
- background-color: rgb(249, 249, 249); {# needs to match the darker of the striped-table colors! #}
-}
-.tstable th { white-space: nowrap;}
-.tstable td { white-space: nowrap;}
-.capacity { font-size:80%; font-weight: normal;}
-a.new-timeslot-link { color: lightgray; font-size: large;}
+ .tstable tr th:first-child { min-width: 25rem; max-width: 25rem; overflow: hidden; }
+ .tstable thead { position: sticky; top: 0; z-index: 3; background-color: white;}
+ .tstable thead th span.day { position: sticky; left: 25.5rem; }
+ .tstable th:first-child, .tstable td:first-child {
+ background-color: white; {# needs to match the lighter of the striped-table colors! #}
+ position: sticky;
+ left: 0;
+ z-index: 2; {# render above other cells / borders but below thead (z-index 3, above) #}
+ }
+ .tstable tbody > tr:nth-of-type(odd) > th:first-child {
+ background-color: rgb(249, 249, 249); {# needs to match the darker of the striped-table colors! #}
+ }
+ .tstable th { white-space: nowrap;}
+ .tstable td { white-space: nowrap;}
+ .capacity { font-size:80%; font-weight: normal;}
+ a.new-timeslot-link { color: lightgray; font-size: large;}
{% endblock %}
{% block content %}
{% origin %}
@@ -84,12 +86,14 @@
{% for day in time_slices %}
- {{ day|date:'D' }} ({{ day }})
-
-
+
+ {{ day|date:'D' }} ({{ day }})
+
+
+
{% endfor %}
{% endif %}
diff --git a/ietf/templates/meeting/upcoming.ics b/ietf/templates/meeting/upcoming.ics
deleted file mode 100644
index 5eca7ec81d..0000000000
--- a/ietf/templates/meeting/upcoming.ics
+++ /dev/null
@@ -1,32 +0,0 @@
-{% load humanize tz %}{% autoescape off %}{% load ietf_filters textfilters %}BEGIN:VCALENDAR
-VERSION:2.0
-METHOD:PUBLISH
-PRODID:-//IETF//datatracker.ietf.org ical upcoming//EN
-{{vtimezones}}{% for item in assignments %}BEGIN:VEVENT
-UID:ietf-{{item.session.meeting.number}}-{{item.timeslot.pk}}
-SUMMARY:{% if item.session.name %}{{item.session.group.acronym|lower}} - {{item.session.name|ics_esc}}{% else %}{{item.session.group.acronym|lower}} - {{item.session.group.name}}{%endif%}
-{% if item.schedule.meeting.city %}LOCATION:{{item.schedule.meeting.city}},{{item.schedule.meeting.country}}
-{% endif %}STATUS:{{item.session.ical_status}}
-CLASS:PUBLIC
-DTSTART{% ics_date_time item.timeslot.local_start_time item.schedule.meeting.time_zone %}
-DTEND{% ics_date_time item.timeslot.local_end_time item.schedule.meeting.time_zone %}
-DTSTAMP{% ics_date_time item.timeslot.modified|utc 'utc' %}{% if item.session.agenda %}
-URL:{{item.session.agenda.get_href}}{% endif %}
-DESCRIPTION:{% if item.timeslot.name %}{{item.timeslot.name|ics_esc}}\n{% endif %}{% if item.session.agenda_note %}
- Note: {{item.session.agenda_note|ics_esc}}\n{% endif %}{% for material in item.session.materials.all %}
- \n{{material.type}}{% if material.type.name != "Agenda" %}
- ({{material.title|ics_esc}}){% endif %}:
- {{material.get_href}}\n{% endfor %}{% if item.session.remote_instructions %}
- Remote instructions: {{ item.session.remote_instructions }}\n{% endif %}
-END:VEVENT
-{% endfor %}{% for meeting in ietfs %}BEGIN:VEVENT
-UID:ietf-{{ meeting.number }}
-SUMMARY:IETF {{ meeting.number }}{% if meeting.city %}
-LOCATION:{{ meeting.city }},{{ meeting.country }}{% endif %}
-CLASS:PUBLIC
-DTSTART;VALUE=DATE{% if meeting.time_zone %};TZID={{ meeting.time_zone|ics_esc }}{% endif %}:{{ meeting.date|date:"Ymd" }}
-DTEND;VALUE=DATE{% if meeting.time_zone %};TZID={{ meeting.time_zone|ics_esc }}{% endif %}:{{ meeting.end_date|next_day|date:"Ymd" }}
-DTSTAMP{% ics_date_time meeting.cached_updated|utc 'utc' %}
-URL:{{ request.scheme }}://{{ request.get_host }}{% url 'agenda' num=meeting.number %}
-END:VEVENT
-{% endfor %}END:VCALENDAR{% endautoescape %}
diff --git a/ietf/templates/meeting/upload_session_minutes.html b/ietf/templates/meeting/upload_session_minutes.html
index 30eadda277..324440681f 100644
--- a/ietf/templates/meeting/upload_session_minutes.html
+++ b/ietf/templates/meeting/upload_session_minutes.html
@@ -26,6 +26,11 @@
{% if session_number %}
Session {{ session_number }} : {{ session.official_timeslotassignment.timeslot.time|timezone:session.meeting.time_zone|date:"D M-d-Y Hi" }}
{% endif %}
+ {% if future %}
+
+ Caution: Session has not ended yet
+
+ {% endif %}
{% endblock %}
\ No newline at end of file
diff --git a/ietf/templates/person/merge_request_email.txt b/ietf/templates/person/merge_request_email.txt
new file mode 100644
index 0000000000..0a695f036c
--- /dev/null
+++ b/ietf/templates/person/merge_request_email.txt
@@ -0,0 +1,23 @@
+Hello,
+
+We have identified multiple IETF Datatracker accounts that may represent a single person:
+
+https://datatracker.ietf.org/person/{{ source_account }}
+
+and
+
+https://datatracker.ietf.org/person/{{ target_account }}
+
+If this is so then it is important that we merge the accounts.
+
+This email is being sent to the primary emails associated with each Datatracker account.
+
+Please respond to this message individually from the email account(s) you control so we can take the appropriate action.
+
+If these should be merged, please identify which account you would like to keep the login credentials from.
+
+If you are associated with but no longer have access to one of the email accounts, then please let us know and we will follow up to determine how to proceed.
+
+
+{{ sender_name }}
+IETF Support
\ No newline at end of file
diff --git a/ietf/templates/person/merge_submit.html b/ietf/templates/person/merge_submit.html
new file mode 100644
index 0000000000..30e1999f81
--- /dev/null
+++ b/ietf/templates/person/merge_submit.html
@@ -0,0 +1,57 @@
+{# Copyright The IETF Trust 2025, All Rights Reserved #}
+{% extends "base.html" %}
+{% load static %}
+{% load django_bootstrap5 %}
+{% block title %}Merge Persons{% endblock %}
+{% block content %}
+ Merge Person Records
+
+ This tool will merge two Person records into one. If both records have logins and you want to retain the one on the left, use the Swap button to swap source and target records.
+
+
+{% endblock %}
\ No newline at end of file
diff --git a/ietf/templates/person/profile.html b/ietf/templates/person/profile.html
index 1424f037a1..a78a90412f 100644
--- a/ietf/templates/person/profile.html
+++ b/ietf/templates/person/profile.html
@@ -50,7 +50,11 @@ Roles
{% for role in person.role_set.all|active_roles %}
- {{ role.name.name }}
+ {{ role.name.name }}
+ {% if role.name.name == 'Reviewer' %}
+ (See reviews )
+ {% endif %}
+
{{ role.group.name }}
({{ role.group.acronym }} )
diff --git a/ietf/templates/person/send_merge_request.html b/ietf/templates/person/send_merge_request.html
new file mode 100644
index 0000000000..f0c6272dca
--- /dev/null
+++ b/ietf/templates/person/send_merge_request.html
@@ -0,0 +1,20 @@
+{# Copyright The IETF Trust 2025, All Rights Reserved #}
+{% extends "base.html" %}
+{% load static %}
+{% load django_bootstrap5 %}
+{% block title %}Send Merge Notice{% endblock %}
+{% block content %}
+ Send Merge Notice
+ {% if form.non_field_errors %}{{ form.non_field_errors }}
{% endif %}
+
+{% endblock %}
diff --git a/ietf/templates/registration/change_password.html b/ietf/templates/registration/change_password.html
index 21c102bd0a..58bc2d2587 100644
--- a/ietf/templates/registration/change_password.html
+++ b/ietf/templates/registration/change_password.html
@@ -34,11 +34,14 @@ Change password
Change password
-
Online attack: This password form uses the
+
Password strength requirements:
+ You must choose a password at least 12 characters long that scores at least a 3 according to the
zxcvbn
- password strength estimator to give an indication of password strength.
- The crack time estimate given above assume online attack without rate
- limiting, at a rate of 10 attempts per second.
+ password strength estimator. A warning will appear if your password does not meet this standard.
+
+
Online attack:
+ The crack time estimate given above assumes an online attack at a rate of 10 attempts per second.
+ It is only a very rough guideline.
Offline cracking:
The datatracker currently uses the
{{ hasher.algorithm }}
diff --git a/ietf/templates/registration/edit_profile.html b/ietf/templates/registration/edit_profile.html
index 1837016b15..1e4ab169e1 100644
--- a/ietf/templates/registration/edit_profile.html
+++ b/ietf/templates/registration/edit_profile.html
@@ -32,12 +32,19 @@
Your account
Change password
- {% if person.photo %}
+ {% if person.photo or person.role_set.exists %}
Photo
-
{% include "person/photo.html" with person=person %}
+
+ {% if person.photo %}
+ {% include "person/photo.html" with person=person %}
+ {% endif %}
+ {% if person.role_set.exists %}
+
Email support@ietf.org
+ to update your photo.
+ {% endif %}
{% endif %}
diff --git a/ietf/templates/review/completed_review.txt b/ietf/templates/review/completed_review.txt
index bdbe321ca1..7d10d8bf13 100644
--- a/ietf/templates/review/completed_review.txt
+++ b/ietf/templates/review/completed_review.txt
@@ -1,7 +1,9 @@
{% load ietf_filters %}{% autoescape off %}{% filter maybewordwrap:80 %}{% if assignment.state_id == "part-completed" %}
Review is partially done. Another assignment may be needed to complete it.
-{% endif %}Reviewer: {{ assignment.reviewer.person }}
+{% endif %}Document: {{ assignment.review_request.doc.name }}
+Title: {{ assignment.review_request.doc.title }}
+Reviewer: {{ assignment.reviewer.person }}
Review result: {{ assignment.result.name }}
{{ content }}
diff --git a/ietf/templates/submit/manual_post.html b/ietf/templates/submit/manual_post.html
index 6e4a2ba42a..0da83e750f 100644
--- a/ietf/templates/submit/manual_post.html
+++ b/ietf/templates/submit/manual_post.html
@@ -1,5 +1,5 @@
{% extends "submit/submit_base.html" %}
-{# Copyright The IETF Trust 2015, All Rights Reserved #}
+{# Copyright The IETF Trust 2015-2026, All Rights Reserved #}
{% load origin static %}
{% block pagehead %}
@@ -27,17 +27,9 @@
Submissions needing manual posting
{% for s in manual %}
- {% if user.is_authenticated %}
-
-
- {{ s.name }}-{{ s.rev }}
-
-
- {% else %}
-
- {{ s.name }}-{{ s.rev }}
-
- {% endif %}
+
+ {{ s.name }}-{{ s.rev }}
+
{{ s.submission_date }}
{% if s.passes_checks %}
diff --git a/ietf/templates/submit/submit_base.html b/ietf/templates/submit/submit_base.html
index 0613cf5d6a..a2c7be1a3b 100644
--- a/ietf/templates/submit/submit_base.html
+++ b/ietf/templates/submit/submit_base.html
@@ -4,7 +4,7 @@
{% block pagehead %}{{ block.super }}{% endblock %}
{% block content %}
{% origin %}
- Internet-Draft submission
+ Submit an Internet-Draft
{% endblock %}
diff --git a/ietf/templates/sync/bcp-index.txt b/ietf/templates/sync/bcp-index.txt
new file mode 100644
index 0000000000..dd19920eba
--- /dev/null
+++ b/ietf/templates/sync/bcp-index.txt
@@ -0,0 +1,52 @@
+
+
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ BCP INDEX
+ -------------
+
+(CREATED ON: {{created_on}}.)
+
+This file contains citations for all BCPs in numeric order. The BCPs
+form a sub-series of the RFC document series, specifically those RFCs
+with the status BEST CURRENT PRACTICE.
+
+BCP citations appear in this format:
+
+ [BCP#] Best Current Practice #,
+ .
+ At the time of writing, this BCP comprises the following:
+
+ Author 1, Author 2, "Title of the RFC", BCP #, RFC №,
+ DOI DOI string, Issue date,
+ .
+
+For example:
+
+ [BCP3] Best Current Practice 3,
+ .
+ At the time of writing, this BCP comprises the following:
+
+ F. Kastenholz, "Variance for The PPP Compression Control Protocol
+ and The PPP Encryption Control Protocol", BCP 3, RFC 1915,
+ DOI 10.17487/RFC1915, February 1996,
+ .
+
+Key to fields:
+
+# is the BCP number.
+
+№ is the RFC number.
+
+BCPs and other RFCs may be obtained from https://www.rfc-editor.org.
+
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ BCP INDEX
+ ---------
+
+
+
+{% for bcp in bcps %}{{bcp|safe}}
+
+{% endfor %}
diff --git a/ietf/templates/sync/fyi-index.txt b/ietf/templates/sync/fyi-index.txt
new file mode 100644
index 0000000000..cf9d57d570
--- /dev/null
+++ b/ietf/templates/sync/fyi-index.txt
@@ -0,0 +1,52 @@
+
+
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ FYI INDEX
+ -------------
+
+(CREATED ON: {{created_on}}.)
+
+This file contains citations for all FYIs in numeric order. The FYIs
+(For Your Information) documents form a sub-series of the RFC series,
+specifically those documents that may be of particular interest
+to Internet users. The corresponding RFCs have status INFORMATIONAL.
+
+FYI citations appear in this format:
+
+ [FYI#] For Your Information #,
+ .
+ At the time of writing, this FYI comprises the following:
+
+ Author 1, Author 2, "Title of the RFC", FYI #, RFC №,
+ DOI DOI string, Issue date,
+ .
+
+For example:
+
+ [FYI8] For Your Information 8,
+ .
+ At the time of writing, this FYI comprises the following:
+
+ B. Fraser, "Site Security Handbook", FYI 8, RFC 2196,
+ DOI 10.17487/RFC2196, September 1997,
+ .
+
+Key to fields:
+
+# is the FYI number.
+
+№ is the RFC number.
+
+FYIs and other RFCs may be obtained from https://www.rfc-editor.org.
+
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ FYI INDEX
+ ---------
+
+
+
+{% for fyi in fyis %}{{fyi|safe}}
+
+{% endfor %}
diff --git a/ietf/templates/sync/rfc-index.txt b/ietf/templates/sync/rfc-index.txt
new file mode 100644
index 0000000000..0f01ddfa90
--- /dev/null
+++ b/ietf/templates/sync/rfc-index.txt
@@ -0,0 +1,69 @@
+
+
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ RFC INDEX
+ -------------
+
+(CREATED ON: {{created_on}}.)
+
+This file contains citations for all RFCs in numeric order.
+
+RFC citations appear in this format:
+
+ #### Title of RFC. Author 1, Author 2, Author 3. Issue date.
+ (Format: ASCII) (Obsoletes xxx) (Obsoleted by xxx) (Updates xxx)
+ (Updated by xxx) (Also FYI ####) (Status: ssssss) (DOI: ddd)
+
+or
+
+ #### Not Issued.
+
+For example:
+
+ 1129 Internet Time Synchronization: The Network Time Protocol. D.L.
+ Mills. October 1989. (Format: TXT, PS, PDF, HTML) (Also RFC1119)
+ (Status: INFORMATIONAL) (DOI: 10.17487/RFC1129)
+
+Key to citations:
+
+#### is the RFC number.
+
+Following the RFC number are the title, the author(s), and the
+publication date of the RFC. Each of these is terminated by a period.
+
+Following the number are the title (terminated with a period), the
+author, or list of authors (terminated with a period), and the date
+(terminated with a period).
+
+The format follows in parentheses. One or more of the following formats
+are listed: text (TXT), PostScript (PS), Portable Document Format
+(PDF), HTML, XML.
+
+Obsoletes xxxx refers to other RFCs that this one replaces;
+Obsoleted by xxxx refers to RFCs that have replaced this one.
+Updates xxxx refers to other RFCs that this one merely updates (but
+does not replace); Updated by xxxx refers to RFCs that have updated
+(but not replaced) this one. Generally, only immediately succeeding
+and/or preceding RFCs are indicated, not the entire history of each
+related earlier or later RFC in a related series.
+
+The (Also FYI ##) or (Also STD ##) or (Also BCP ##) phrase gives the
+equivalent FYI, STD, or BCP number if the RFC is also in those
+document sub-series. The Status field gives the document's
+current status (see RFC 2026). The (DOI ddd) field gives the
+Digital Object Identifier.
+
+RFCs may be obtained in a number of ways, using HTTP, FTP, or email.
+See the RFC Editor Web page http://www.rfc-editor.org
+
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ RFC INDEX
+ ---------
+
+
+
+{% for rfc in rfcs %}{{rfc|safe}}
+
+{% endfor %}
diff --git a/ietf/templates/sync/std-index.txt b/ietf/templates/sync/std-index.txt
new file mode 100644
index 0000000000..a4a5fba946
--- /dev/null
+++ b/ietf/templates/sync/std-index.txt
@@ -0,0 +1,51 @@
+
+
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ STD INDEX
+ -------------
+
+(CREATED ON: {{created_on}}.)
+
+This file contains citations for all STDs in numeric order. Each
+STD represents a single Internet Standard technical specification,
+composed of one or more RFCs with Internet Standard status.
+
+STD citations appear in this format:
+
+ [STD#] Internet Standard #,
+ .
+ At the time of writing, this STD comprises the following:
+
+ Author 1, Author 2, "Title of the RFC", STD #, RFC №,
+ DOI DOI string, Issue date,
+ .
+
+For example:
+
+ [STD6] Internet Standard 6,
+ .
+ At the time of writing, this STD comprises the following:
+
+ J. Postel, "User Datagram Protocol", STD 6, RFC 768,
+ DOI 10.17487/RFC0768, August 1980,
+ .
+
+Key to fields:
+
+# is the STD number.
+
+№ is the RFC number.
+
+STDs and other RFCs may be obtained from https://www.rfc-editor.org.
+
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ STD INDEX
+ ---------
+
+
+
+{% for std in stds %}{{std|safe}}
+
+{% endfor %}
diff --git a/ietf/utils/admin.py b/ietf/utils/admin.py
index 6c1c8726e1..cb8841cdc6 100644
--- a/ietf/utils/admin.py
+++ b/ietf/utils/admin.py
@@ -1,58 +1,30 @@
-# Copyright The IETF Trust 2011-2020, All Rights Reserved
-# -*- coding: utf-8 -*-
+# Copyright The IETF Trust 2011-2026, All Rights Reserved
from django.contrib import admin
-from django.utils.encoding import force_str
-
-def name(obj):
- if hasattr(obj, 'abbrev'):
- return obj.abbrev()
- elif hasattr(obj, 'name'):
- if callable(obj.name):
- name = obj.name()
- else:
- name = force_str(obj.name)
- if name:
- return name
- return str(obj)
-
-def admin_link(field, label=None, ordering="", display=name, suffix=""):
- if not label:
- label = field.capitalize().replace("_", " ").strip()
- if ordering == "":
- ordering = field
- def _link(self):
- obj = self
- for attr in field.split("__"):
- obj = getattr(obj, attr)
- if callable(obj):
- obj = obj()
- if hasattr(obj, "all"):
- objects = obj.all()
- elif callable(obj):
- objects = obj()
- if not hasattr(objects, "__iter__"):
- objects = [ objects ]
- elif hasattr(obj, "__iter__"):
- objects = obj
- else:
- objects = [ obj ]
- chunks = []
- for obj in objects:
- app = obj._meta.app_label
- model = obj.__class__.__name__.lower()
- id = obj.pk
- chunks += [ '%(display)s ' %
- {'app':app, "model": model, "id":id, "display": display(obj), "suffix":suffix, } ]
- return ", ".join(chunks)
- _link.allow_tags = True
- _link.short_description = label
- _link.admin_order_field = ordering
- return _link
-
-from .models import DumpInfo
+from .models import DumpInfo, DirtyBits
+
+
+class SaferStackedInline(admin.StackedInline):
+ """StackedInline without delete by default"""
+
+ can_delete = False # no delete button
+ show_change_link = True # show a link to the resource (where it can be deleted)
+
+
+class SaferTabularInline(admin.TabularInline):
+ """TabularInline without delete by default"""
+
+ can_delete = False # no delete button
+ show_change_link = True # show a link to the resource (where it can be deleted)
+
+
+@admin.register(DumpInfo)
class DumpInfoAdmin(admin.ModelAdmin):
- list_display = ['date', 'host', 'tz']
- list_filter = ['date']
-admin.site.register(DumpInfo, DumpInfoAdmin)
+ list_display = ["date", "host", "tz"]
+ list_filter = ["date"]
+
+
+@admin.register(DirtyBits)
+class DirtyBitsAdmin(admin.ModelAdmin):
+ list_display = ["slug", "dirty_time", "processed_time"]
diff --git a/ietf/utils/aiosmtpd.py b/ietf/utils/aiosmtpd.py
new file mode 100644
index 0000000000..3e4cd65dd9
--- /dev/null
+++ b/ietf/utils/aiosmtpd.py
@@ -0,0 +1,73 @@
+# Copyright The IETF Trust 2014-2025, All Rights Reserved
+"""aiosmtpd-related utilities
+
+These are for testing / dev use. If you're using this for production code, think very
+hard about the choices you're making...
+"""
+from aiosmtpd import handlers
+from aiosmtpd.controller import Controller
+from aiosmtpd.smtp import SMTP
+from email.utils import parseaddr
+from typing import Optional, TextIO
+
+
+class SMTPTestHandler:
+
+ def __init__(self, inbox: list):
+ self.inbox = inbox
+
+ async def handle_DATA(self, server, session, envelope):
+ """Handle the DATA command and 'deliver' the message"""
+
+ self.inbox.append(envelope.content)
+ # Per RFC2033: https://datatracker.ietf.org/doc/html/rfc2033.html#section-4.2
+ # ...after the final ".", the server returns one reply
+ # for each previously successful RCPT command in the mail transaction,
+ # in the order that the RCPT commands were issued. Even if there were
+ # multiple successful RCPT commands giving the same forward-path, there
+ # must be one reply for each successful RCPT command.
+ return "\n".join("250 OK" for _ in envelope.rcpt_tos)
+
+ async def handle_RCPT(self, server, session, envelope, address, rcpt_options):
+ """Handle an RCPT command and add the address to the envelope if it is acceptable"""
+ _, address = parseaddr(address)
+ if address == "":
+ return "501 Syntax: RCPT TO: "
+ if "poison" in address:
+ return "550 Error: Not touching that"
+ # At this point the address is acceptable
+ envelope.rcpt_tos.append(address)
+ return "250 OK"
+
+
+class SMTPTestServerDriver:
+
+ def __init__(self, address: str, port: int, inbox: Optional[list] = None):
+ # Allow longer lines than the 1001 that RFC 5321 requires. As of 2025-04-16 the
+ # datatracker emits some non-compliant messages.
+ # See https://aiosmtpd.aio-libs.org/en/latest/smtp.html
+ SMTP.line_length_limit = 4000 # tests start failing between 3000 and 4000
+ self.controller = Controller(
+ hostname=address,
+ port=port,
+ handler=SMTPTestHandler(inbox=[] if inbox is None else inbox),
+ )
+
+ def start(self):
+ self.controller.start()
+
+ def stop(self):
+ self.controller.stop()
+
+
+class DevDebuggingHandler(handlers.Debugging):
+ """Debugging handler for use in dev ONLY"""
+ def __init__(self, stream: Optional[TextIO] = None):
+ # Allow longer lines than the 1001 that RFC 5321 requires. As of 2025-04-16 the
+ # datatracker emits some non-compliant messages.
+ # See https://aiosmtpd.aio-libs.org/en/latest/smtp.html
+ # Doing this in a handler class is a huge hack. Tests all pass with this set
+ # to 4000, but make the limit longer for dev just in case.
+ SMTP.line_length_limit = 10000
+ super().__init__(stream)
+
diff --git a/ietf/utils/coverage.py b/ietf/utils/coverage.py
new file mode 100644
index 0000000000..bd205ce586
--- /dev/null
+++ b/ietf/utils/coverage.py
@@ -0,0 +1,90 @@
+# Copyright The IETF Trust 2025, All Rights Reserved
+from coverage import Coverage, CoverageData, FileReporter
+from coverage.control import override_config as override_coverage_config
+from coverage.results import Numbers
+from coverage.report_core import get_analysis_to_report
+from coverage.results import Analysis
+from django.conf import settings
+
+
+class CoverageManager:
+ checker: Coverage | None = None
+ started = False
+
+ def start(self):
+ if settings.SERVER_MODE != "production" and not self.started:
+ self.checker = Coverage(
+ source=[settings.BASE_DIR],
+ cover_pylib=False,
+ omit=settings.TEST_CODE_COVERAGE_EXCLUDE_FILES,
+ )
+ for exclude_regex in getattr(
+ settings,
+ "TEST_CODE_COVERAGE_EXCLUDE_LINES",
+ [],
+ ):
+ self.checker.exclude(exclude_regex)
+ self.checker.start()
+ self.started = True
+
+ def stop(self):
+ if self.checker is not None:
+ self.checker.stop()
+
+ def save(self):
+ if self.checker is not None:
+ self.checker.save()
+
+ def report(self, include: list[str] | None = None):
+ if self.checker is None:
+ return None
+ reporter = CustomDictReporter()
+ with override_coverage_config(
+ self.checker,
+ report_include=include,
+ ):
+ return reporter.report(self.checker)
+
+
+class CustomDictReporter: # pragma: no cover
+ total = Numbers()
+
+ def report(self, coverage):
+ coverage_data = coverage.get_data()
+ coverage_data.set_query_contexts(None)
+ measured_files = {}
+ for file_reporter, analysis in get_analysis_to_report(coverage, None):
+ measured_files[file_reporter.relative_filename()] = self.report_one_file(
+ coverage_data,
+ analysis,
+ file_reporter,
+ )
+ tot_numer, tot_denom = self.total.ratio_covered
+ return {
+ "coverage": 1 if tot_denom == 0 else tot_numer / tot_denom,
+ "covered": measured_files,
+ "format": 5,
+ }
+
+ def report_one_file(
+ self,
+ coverage_data: CoverageData,
+ analysis: Analysis,
+ file_reporter: FileReporter,
+ ):
+ """Extract the relevant report data for a single file."""
+ nums = analysis.numbers
+ self.total += nums
+ n_statements = nums.n_statements
+ numer, denom = nums.ratio_covered
+ fraction_covered = 1 if denom == 0 else numer / denom
+ missing_line_nums = sorted(analysis.missing)
+ # Extract missing lines from source files
+ source_lines = file_reporter.source().splitlines()
+ missing_lines = [source_lines[num - 1] for num in missing_line_nums]
+ return (
+ n_statements,
+ fraction_covered,
+ missing_line_nums,
+ missing_lines,
+ )
diff --git a/ietf/utils/db.py b/ietf/utils/db.py
index d451f6cfd8..49c89da13a 100644
--- a/ietf/utils/db.py
+++ b/ietf/utils/db.py
@@ -1,28 +1,67 @@
-# Copyright The IETF Trust 2021, All Rights Reserved
-# -*- coding: utf-8 -*-
-
-# Taken from/inspired by
-# https://stackoverflow.com/questions/55147169/django-admin-jsonfield-default-empty-dict-wont-save-in-admin
-#
-# JSONField should recognize {}, (), and [] as valid, non-empty JSON
-# values. However, the base Field class excludes them
+# Copyright The IETF Trust 2021-2025, All Rights Reserved
+
import jsonfield
+from django.db import models
+
+from ietf.utils.fields import (
+ IETFJSONField as FormIETFJSONField,
+ EmptyAwareJSONField as FormEmptyAwareJSONField,
+)
+
+
+class EmptyAwareJSONField(models.JSONField):
+ """JSONField that allows empty JSON values when model specifies empty=False
+
+ Taken from/inspired by
+ https://stackoverflow.com/questions/55147169/django-admin-jsonfield-default-empty-dict-wont-save-in-admin
+
+ JSONField should recognize {}, (), and [] as valid, non-empty JSON values.
-from ietf.utils.fields import IETFJSONField as FormIETFJSONField
+ If customizing the formfield, the field must accept the `empty_values` argument.
+ """
+
+ def __init__(
+ self,
+ *args,
+ empty_values=FormEmptyAwareJSONField.empty_values,
+ accepted_empty_values=None,
+ **kwargs,
+ ):
+ if accepted_empty_values is None:
+ accepted_empty_values = []
+ self.empty_values = [x for x in empty_values if x not in accepted_empty_values]
+ super().__init__(*args, **kwargs)
+
+ def formfield(self, **kwargs):
+ defaults = {
+ "form_class": FormEmptyAwareJSONField,
+ "empty_values": self.empty_values,
+ }
+ defaults.update(kwargs)
+ return super().formfield(**defaults)
-class IETFJSONField(jsonfield.JSONField):
+class IETFJSONField(jsonfield.JSONField): # pragma: no cover
+ # Deprecated - use EmptyAwareJSONField instead (different base class requires a
+ # new field name)
+ # Remove this class when migrations are squashed and it is no longer referenced
form_class = FormIETFJSONField
- def __init__(self, *args, empty_values=FormIETFJSONField.empty_values, accepted_empty_values=None, **kwargs):
+ def __init__(
+ self,
+ *args,
+ empty_values=FormIETFJSONField.empty_values,
+ accepted_empty_values=None,
+ **kwargs,
+ ):
if accepted_empty_values is None:
accepted_empty_values = []
- self.empty_values = [x
- for x in empty_values
- if x not in accepted_empty_values]
+ self.empty_values = [x for x in empty_values if x not in accepted_empty_values]
super().__init__(*args, **kwargs)
def formfield(self, **kwargs):
- if 'form_class' not in kwargs or issubclass(kwargs['form_class'], FormIETFJSONField):
- kwargs.setdefault('empty_values', self.empty_values)
+ if "form_class" not in kwargs or issubclass(
+ kwargs["form_class"], FormIETFJSONField
+ ):
+ kwargs.setdefault("empty_values", self.empty_values)
return super().formfield(**{**kwargs})
diff --git a/ietf/utils/decorators.py b/ietf/utils/decorators.py
index 56c28c4b19..b50e0e7f96 100644
--- a/ietf/utils/decorators.py
+++ b/ietf/utils/decorators.py
@@ -15,21 +15,9 @@
import debug # pyflakes:ignore
-from ietf.utils.test_runner import set_coverage_checking
from ietf.person.models import Person, PersonalApiKey, PersonApiKeyEvent
from ietf.utils import log
-def skip_coverage(f):
- @wraps(f)
- def _wrapper(*args, **kwargs):
- if settings.TEST_CODE_COVERAGE_CHECKER:
- set_coverage_checking(False)
- result = f(*args, **kwargs)
- set_coverage_checking(True)
- return result
- else:
- return f(*args, **kwargs)
- return _wrapper
def person_required(f):
@wraps(f)
@@ -48,7 +36,7 @@ def require_api_key(f):
@wraps(f)
def _wrapper(request, *args, **kwargs):
def err(code, text):
- return HttpResponse(text, status=code, content_type='text/plain')
+ return HttpResponse(text, status=code, content_type=f"text/plain; charset={settings.DEFAULT_CHARSET}")
# Check method and get hash
if request.method == 'POST':
hash = request.POST.get('apikey')
diff --git a/ietf/utils/draft.py b/ietf/utils/draft.py
index 50add5abba..53d3d40811 100755
--- a/ietf/utils/draft.py
+++ b/ietf/utils/draft.py
@@ -65,7 +65,6 @@
opt_debug = False
opt_timestamp = False
opt_trace = False
-opt_authorinfo = False
opt_attributes = False
# Don't forget to add the option variable to the globals list in _main below
@@ -1332,8 +1331,6 @@ def getmeta(fn):
# ----------------------------------------------------------------------
def _output(docname, fields, outfile=sys.stdout):
- global company_domain
-
if opt_attributes:
def outputkey(key, fields):
field = fields[key]
@@ -1373,9 +1370,8 @@ def _printmeta(fn, outfile=sys.stdout):
# Main
# ----------------------------------------------------------------------
-company_domain = {} # type: Dict[str, str]
def _main(outfile=sys.stdout):
- global opt_debug, opt_timestamp, opt_trace, opt_authorinfo, files, company_domain, opt_attributes
+ global opt_debug, opt_timestamp, opt_trace, files, opt_attributes
# set default values, if any
# ----------------------------------------------------------------------
# Option processing
@@ -1423,8 +1419,6 @@ def _main(outfile=sys.stdout):
elif opt in ["-T", "--trace"]: # Emit trace information while working
opt_trace = True
- company_domain = {}
-
if not files:
files = [ "-" ]
diff --git a/ietf/utils/fields.py b/ietf/utils/fields.py
index 3e6f56d45e..6e8765612f 100644
--- a/ietf/utils/fields.py
+++ b/ietf/utils/fields.py
@@ -1,12 +1,11 @@
-# Copyright The IETF Trust 2012-2020, All Rights Reserved
+# Copyright The IETF Trust 2012-2025, All Rights Reserved
# -*- coding: utf-8 -*-
import datetime
import json
import re
-
-import jsonfield
+from email.utils import parseaddr
import debug # pyflakes:ignore
@@ -18,6 +17,7 @@
from django.core.exceptions import ValidationError
from django.utils.dateparse import parse_duration
+
class MultiEmailField(forms.Field):
def to_python(self, value):
"Normalize data to a list of strings."
@@ -40,6 +40,25 @@ def validate(self, value):
for email in value:
validate_email(email)
+
+def validate_name_addr_email(value):
+ "Validate name-addr style email address"
+ name, addr = parseaddr(value)
+ if not addr:
+ raise ValidationError("Invalid email format.")
+ try:
+ validate_email(addr) # validate the actual address part
+ except ValidationError:
+ raise ValidationError("Invalid email address.")
+
+
+class NameAddrEmailField(forms.CharField):
+ def validate(self, value):
+ "Check if value consists only of valid emails."
+ super().validate(value)
+ validate_name_addr_email(value)
+
+
def yyyymmdd_to_strftime_format(fmt):
translation_table = sorted([
("yyyy", "%Y"),
@@ -328,8 +347,21 @@ def has_changed(self, initial, data):
return super().has_changed(initial, data)
-class IETFJSONField(jsonfield.fields.forms.JSONField):
- def __init__(self, *args, empty_values=jsonfield.fields.forms.JSONField.empty_values,
+class IETFJSONField(forms.JSONField): # pragma: no cover
+ # Deprecated - use EmptyAwareJSONField instead
+ def __init__(self, *args, empty_values=forms.JSONField.empty_values,
+ accepted_empty_values=None, **kwargs):
+ if accepted_empty_values is None:
+ accepted_empty_values = []
+ self.empty_values = [x
+ for x in empty_values
+ if x not in accepted_empty_values]
+
+ super().__init__(*args, **kwargs)
+
+
+class EmptyAwareJSONField(forms.JSONField):
+ def __init__(self, *args, empty_values=forms.JSONField.empty_values,
accepted_empty_values=None, **kwargs):
if accepted_empty_values is None:
accepted_empty_values = []
diff --git a/ietf/utils/jstest.py b/ietf/utils/jstest.py
index 215d78d65f..cf242fc4eb 100644
--- a/ietf/utils/jstest.py
+++ b/ietf/utils/jstest.py
@@ -3,6 +3,8 @@
import os
+from django.conf import settings
+from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.urls import reverse as urlreverse
from unittest import skipIf
@@ -21,7 +23,11 @@
from ietf.utils.pipe import pipe
-from ietf.utils.test_runner import IetfLiveServerTestCase
+from ietf.utils.test_runner import (
+ set_template_coverage,
+ set_url_coverage,
+ load_and_run_fixtures,
+)
executable_name = 'geckodriver'
code, out, err = pipe('{} --version'.format(executable_name))
@@ -49,17 +55,44 @@ def ifSeleniumEnabled(func):
return skipIf(skip_selenium, skip_message)(func)
-class IetfSeleniumTestCase(IetfLiveServerTestCase):
+class IetfSeleniumTestCase(StaticLiveServerTestCase): # pragma: no cover
login_view = 'ietf.ietfauth.views.login'
+ @classmethod
+ def setUpClass(cls):
+ set_template_coverage(False)
+ set_url_coverage(False)
+ super().setUpClass()
+
+ @classmethod
+ def tearDownClass(cls):
+ super().tearDownClass()
+ set_template_coverage(True)
+ set_url_coverage(True)
+
def setUp(self):
- super(IetfSeleniumTestCase, self).setUp()
+ super().setUp()
+ # LiveServerTestCase uses TransactionTestCase which seems to
+ # somehow interfere with the fixture loading process in
+ # IetfTestRunner when running multiple tests (the first test
+ # is fine, in the next ones the fixtures have been wiped) -
+ # this is no doubt solvable somehow, but until then we simply
+ # recreate them here
+ from ietf.person.models import Person
+ if not Person.objects.exists():
+ load_and_run_fixtures(verbosity=0)
+ self.replaced_settings = dict()
+ if hasattr(settings, 'IDTRACKER_BASE_URL'):
+ self.replaced_settings['IDTRACKER_BASE_URL'] = settings.IDTRACKER_BASE_URL
+ settings.IDTRACKER_BASE_URL = self.live_server_url
self.driver = start_web_driver()
self.driver.set_window_size(1024,768)
def tearDown(self):
- super(IetfSeleniumTestCase, self).tearDown()
self.driver.close()
+ for k, v in self.replaced_settings.items():
+ setattr(settings, k, v)
+ super().tearDown()
def absreverse(self,*args,**kwargs):
return '%s%s'%(self.live_server_url, urlreverse(*args, **kwargs))
diff --git a/ietf/utils/management/tests.py b/ietf/utils/management/tests.py
index e94c39354f..38be464c7f 100644
--- a/ietf/utils/management/tests.py
+++ b/ietf/utils/management/tests.py
@@ -1,7 +1,7 @@
# Copyright The IETF Trust 2013-2020, All Rights Reserved
# -*- coding: utf-8 -*-
-import mock
+from unittest import mock
from django.core.management import call_command, CommandError
from django.test import override_settings
@@ -12,7 +12,7 @@
from ietf.utils.test_utils import TestCase
-@mock.patch.object(EmailOnFailureCommand, 'handle')
+@mock.patch.object(EmailOnFailureCommand, 'handle', return_value=None)
class EmailOnFailureCommandTests(TestCase):
def test_calls_handle(self, handle_method):
call_command(EmailOnFailureCommand())
diff --git a/ietf/utils/meetecho.py b/ietf/utils/meetecho.py
index 0dbf75736a..943f3789ef 100644
--- a/ietf/utils/meetecho.py
+++ b/ietf/utils/meetecho.py
@@ -27,7 +27,7 @@
class MeetechoAPI:
- timezone = datetime.timezone.utc
+ timezone = datetime.UTC
def __init__(
self, api_base: str, client_id: str, client_secret: str, request_timeout=3.01
@@ -504,12 +504,17 @@ def _should_send_update(self, session):
if self.slides_notify_time < datetime.timedelta(0):
return True # < 0 means "always" for a scheduled session
else:
- now = datetime.datetime.now(tz=datetime.timezone.utc)
+ now = datetime.datetime.now(tz=datetime.UTC)
return (timeslot.time - self.slides_notify_time) < now < (timeslot.end_time() + self.slides_notify_time)
def add(self, session: "Session", slides: "Document", order: int):
+ """Add a slide deck to the session
+
+ Returns True if the update was sent, False if it was not sent because the
+ current time is outside the update window for the session.
+ """
if not self._should_send_update(session):
- return
+ return False
# Would like to confirm that session.presentations includes the slides Document, but we can't
# (same problem regarding unsaved Documents discussed in the docstring)
@@ -524,11 +529,16 @@ def add(self, session: "Session", slides: "Document", order: int):
"order": order,
}
)
+ return True
def delete(self, session: "Session", slides: "Document"):
- """Delete a slide deck from the session"""
+ """Delete a slide deck from the session
+
+ Returns True if the update was sent, False if it was not sent because the
+ current time is outside the update window for the session.
+ """
if not self._should_send_update(session):
- return
+ return False
if session.presentations.filter(document=slides).exists():
# "order" problems are very likely to result if we delete slides that are actually still
@@ -543,12 +553,17 @@ def delete(self, session: "Session", slides: "Document"):
id=slides.pk,
)
if session.presentations.filter(document__type_id="slides").exists():
- self.send_update(session) # adjust order to fill in the hole
+ self._send_update(session) # adjust order to fill in the hole
+ return True
def revise(self, session: "Session", slides: "Document"):
- """Replace existing deck with its current state"""
+ """Replace existing deck with its current state
+
+ Returns True if the update was sent, False if it was not sent because the
+ current time is outside the update window for the session.
+ """
if not self._should_send_update(session):
- return
+ return False
sp = session.presentations.filter(document=slides).first()
if sp is None:
@@ -561,11 +576,13 @@ def revise(self, session: "Session", slides: "Document"):
id=slides.pk,
)
self.add(session, slides, order) # fill in the hole
+ return True
- def send_update(self, session: "Session"):
- if not self._should_send_update(session):
- return
-
+ def _send_update(self, session: "Session"):
+ """Notify of the current state of the session's slides (no time window check)
+
+ This is a private helper - use send_update() (no leading underscore) instead.
+ """
self.api.update_slide_decks(
wg_token=self.wg_token(session.group),
session=str(session.pk),
@@ -580,3 +597,14 @@ def send_update(self, session: "Session"):
for deck in session.presentations.filter(document__type="slides")
]
)
+
+ def send_update(self, session: "Session"):
+ """Notify of the current state of the session's slides
+
+ Returns True if the update was sent, False if it was not sent because the
+ current time is outside the update window for the session.
+ """
+ if not self._should_send_update(session):
+ return False
+ self._send_update(session)
+ return True
diff --git a/ietf/utils/migrations/0003_dirtybits.py b/ietf/utils/migrations/0003_dirtybits.py
new file mode 100644
index 0000000000..11f6ed09f6
--- /dev/null
+++ b/ietf/utils/migrations/0003_dirtybits.py
@@ -0,0 +1,37 @@
+# Copyright The IETF Trust 2026, All Rights Reserved
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+ dependencies = [
+ ("utils", "0002_delete_versioninfo"),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name="DirtyBits",
+ fields=[
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
+ (
+ "slug",
+ models.CharField(
+ choices=[("rfcindex", "RFC Index")], max_length=40, unique=True
+ ),
+ ),
+ ("dirty_time", models.DateTimeField(blank=True, null=True)),
+ ("processed_time", models.DateTimeField(blank=True, null=True)),
+ ],
+ options={
+ "verbose_name_plural": "dirty bits",
+ },
+ ),
+ ]
diff --git a/ietf/utils/migrations/0004_alter_dirtybits_slug.py b/ietf/utils/migrations/0004_alter_dirtybits_slug.py
new file mode 100644
index 0000000000..e17ea6cadd
--- /dev/null
+++ b/ietf/utils/migrations/0004_alter_dirtybits_slug.py
@@ -0,0 +1,21 @@
+# Copyright The IETF Trust 2026, All Rights Reserved
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+ dependencies = [
+ ("utils", "0003_dirtybits"),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name="dirtybits",
+ name="slug",
+ field=models.CharField(
+ choices=[("rfcindex", "RFC Index"), ("errata", "Errata Tags")],
+ max_length=40,
+ unique=True,
+ ),
+ ),
+ ]
diff --git a/ietf/utils/mime.py b/ietf/utils/mime.py
index ab21cfe5c6..1f9b75b4df 100644
--- a/ietf/utils/mime.py
+++ b/ietf/utils/mime.py
@@ -5,6 +5,7 @@
import magic
import re
+
def get_mime_type(content):
# try to fixup encoding
if hasattr(magic, "open"):
@@ -13,15 +14,17 @@ def get_mime_type(content):
filetype = m.buffer(content)
else:
m = magic.Magic()
- m.cookie = magic.magic_open(magic.MAGIC_NONE | magic.MAGIC_MIME | magic.MAGIC_MIME_ENCODING)
+ m.cookie = magic.magic_open(
+ magic.MAGIC_NONE | magic.MAGIC_MIME | magic.MAGIC_MIME_ENCODING
+ )
magic.magic_load(m.cookie, None)
filetype = m.from_buffer(content)
# Work around silliness in libmagic on OpenSUSE 15.1
- filetype = filetype.replace('text/x-Algol68;', 'text/plain;')
- if ';' in filetype and 'charset=' in filetype:
- mimetype, charset = re.split('; *charset=', filetype)
+ filetype = filetype.replace("text/x-Algol68;", "text/plain;")
+ filetype = filetype.replace("application/vnd.hp-HPGL;", "text/plain;")
+ if ";" in filetype and "charset=" in filetype:
+ mimetype, charset = re.split("; *charset=", filetype)
else:
- mimetype = re.split(';', filetype)[0]
- charset = 'utf-8'
+ mimetype = re.split(";", filetype)[0]
+ charset = "utf-8"
return mimetype, charset
-
diff --git a/ietf/utils/models.py b/ietf/utils/models.py
index 21af5766e9..64f7f253f2 100644
--- a/ietf/utils/models.py
+++ b/ietf/utils/models.py
@@ -1,14 +1,36 @@
-# Copyright The IETF Trust 2015-2020, All Rights Reserved
+# Copyright The IETF Trust 2015-2026, All Rights Reserved
import itertools
from django.db import models
+
+class DirtyBits(models.Model):
+ """A weak semaphore mechanism for coordination with celery beat tasks
+
+ Web workers will set the "dirty_time" value for a given dirtybit slug.
+ Celery workers will do work if "processed_time" < "dirty_time" and update
+ "processed_time".
+ """
+
+ class Slugs(models.TextChoices):
+ RFCINDEX = "rfcindex", "RFC Index"
+ ERRATA = "errata", "Errata Tags"
+
+ # next line can become `...choices=Slugs)` when we get to Django 5.x
+ slug = models.CharField(max_length=40, blank=False, choices=Slugs.choices, unique=True)
+ dirty_time = models.DateTimeField(null=True, blank=True)
+ processed_time = models.DateTimeField(null=True, blank=True)
+
+ class Meta:
+ verbose_name_plural = "dirty bits"
+
+
class DumpInfo(models.Model):
date = models.DateTimeField()
host = models.CharField(max_length=128)
tz = models.CharField(max_length=32, default='UTC')
-
+
class ForeignKey(models.ForeignKey):
"A local ForeignKey proxy which provides the on_delete value required under Django 2.0."
def __init__(self, to, on_delete=models.CASCADE, **kwargs):
diff --git a/ietf/utils/patch.py b/ietf/utils/patch.py
index 9de2270ebb..fd3e4a165d 100644
--- a/ietf/utils/patch.py
+++ b/ietf/utils/patch.py
@@ -87,8 +87,7 @@ def createLock(self):
debugmode = False
def setdebug():
- global debugmode, streamhandler
-
+ global debugmode
debugmode = True
loglevel = logging.DEBUG
logformat = "%(levelname)8s %(message)s"
diff --git a/ietf/utils/resources.py b/ietf/utils/resources.py
index 1252cfef14..63206eb33a 100644
--- a/ietf/utils/resources.py
+++ b/ietf/utils/resources.py
@@ -1,6 +1,4 @@
-# Copyright The IETF Trust 2014-2019, All Rights Reserved
-# -*- coding: utf-8 -*-
-# Autogenerated by the mkresources management command 2014-11-13 05:39
+# Copyright The IETF Trust 2014-2026, All Rights Reserved
from ietf.api import ModelResource
@@ -12,7 +10,7 @@
from django.contrib.contenttypes.models import ContentType
from ietf import api
-from ietf.utils.models import DumpInfo
+from ietf.utils.models import DirtyBits, DumpInfo
class UserResource(ModelResource):
@@ -43,3 +41,9 @@ class Meta:
"host": ALL,
}
api.utils.register(DumpInfoResource())
+
+
+class DirtyBitsResource(ModelResource):
+ class Meta:
+ queryset = DirtyBits.objects.none()
+api.utils.register(DirtyBitsResource())
diff --git a/ietf/utils/searchindex.py b/ietf/utils/searchindex.py
new file mode 100644
index 0000000000..87951abb60
--- /dev/null
+++ b/ietf/utils/searchindex.py
@@ -0,0 +1,372 @@
+# Copyright The IETF Trust 2026, All Rights Reserved
+"""Search indexing utilities"""
+
+import re
+from itertools import batched
+from math import floor
+from typing import Iterable
+
+import httpx # just for exceptions
+import typesense
+import typesense.exceptions
+from django.conf import settings
+from typesense.types.document import DocumentSchema
+
+from ietf.doc.models import Document, StoredObject
+from ietf.doc.storage_utils import retrieve_str
+from ietf.utils.log import log
+
+# Error classes that might succeed just by retrying a failed attempt.
+# Must be a tuple for use with isinstance()
+RETRYABLE_ERROR_CLASSES = (
+ httpx.ConnectError,
+ httpx.ConnectTimeout,
+ typesense.exceptions.Timeout,
+ typesense.exceptions.ServerError,
+ typesense.exceptions.ServiceUnavailable,
+)
+
+
+DEFAULT_SETTINGS = {
+ "TYPESENSE_API_URL": "",
+ "TYPESENSE_API_KEY": "",
+ "TYPESENSE_COLLECTION_NAME": "docs",
+ "TASK_RETRY_DELAY": 10,
+ "TASK_MAX_RETRIES": 12,
+}
+
+
+def get_settings():
+ return DEFAULT_SETTINGS | getattr(settings, "SEARCHINDEX_CONFIG", {})
+
+
+def enabled():
+ _settings = get_settings()
+ return _settings["TYPESENSE_API_URL"] != ""
+
+
+def get_typesense_client() -> typesense.Client:
+ _settings = get_settings()
+ client = typesense.Client(
+ {
+ "api_key": _settings["TYPESENSE_API_KEY"],
+ "nodes": [_settings["TYPESENSE_API_URL"]],
+ }
+ )
+ return client
+
+
+def get_collection_name() -> str:
+ _settings = get_settings()
+ collection_name = _settings["TYPESENSE_COLLECTION_NAME"]
+ assert isinstance(collection_name, str)
+ return collection_name
+
+
+def _sanitize_text(content):
+ """Sanitize content or abstract text for search"""
+ # REs (with approximate names)
+ RE_DOT_OR_BANG_SPACE = r"\. |! " # -> " " (space)
+ RE_COMMENT_OR_TOC_CRUD = r"<--|-->|--+|\+|\.\.+" # -> ""
+ RE_BRACKETED_REF = r"\[[a-zA-Z0-9 -]+\]" # -> ""
+ RE_DOTTED_NUMBERS = r"[0-9]+\.[0-9]+(\.[0-9]+)?" # -> ""
+ RE_MULTIPLE_WHITESPACE = r"\s+" # -> " " (space)
+ # Replacement values (for clarity of intent)
+ SPACE = " "
+ EMPTY = ""
+ # Sanitizing begins here, order is significant!
+ content = re.sub(RE_DOT_OR_BANG_SPACE, SPACE, content.strip())
+ content = re.sub(RE_COMMENT_OR_TOC_CRUD, EMPTY, content)
+ content = re.sub(RE_BRACKETED_REF, EMPTY, content)
+ content = re.sub(RE_DOTTED_NUMBERS, EMPTY, content)
+ content = re.sub(RE_MULTIPLE_WHITESPACE, SPACE, content)
+ return content.strip()
+
+
+def typesense_doc_from_rfc(rfc: Document) -> DocumentSchema:
+ assert rfc.type_id == "rfc"
+ assert rfc.rfc_number is not None
+ assert rfc.pages is not None
+
+ keywords: list[str] = rfc.keywords # help type checking
+
+ subseries = rfc.part_of()
+ if len(subseries) > 1:
+ log(
+ f"RFC {rfc.rfc_number} is in multiple subseries. "
+ f"Indexing as {subseries[0].name}"
+ )
+ subseries = subseries[0] if len(subseries) > 0 else None
+ obsoleted_by = rfc.related_that("obs")
+ updated_by = rfc.related_that("updates")
+
+ stored_txt = (
+ StoredObject.objects.exclude_deleted()
+ .filter(store="rfc", doc_name=rfc.name, name__startswith="txt/")
+ .first()
+ )
+ content = ""
+ if stored_txt is not None:
+ # Should be available in the blobdb, but be cautious...
+ try:
+ content = retrieve_str(kind=stored_txt.store, name=stored_txt.name)
+ except Exception as err:
+ log(f"Unable to retrieve {stored_txt} from storage: {err}")
+
+ ts_document = {
+ "id": f"doc-{rfc.pk}",
+ "rfcNumber": rfc.rfc_number,
+ "rfc": str(rfc.rfc_number),
+ "filename": rfc.name,
+ "title": rfc.title,
+ "abstract": _sanitize_text(rfc.abstract),
+ "pages": rfc.pages,
+ "keywords": keywords,
+ "type": "rfc",
+ "state": [state.name for state in rfc.states.all()],
+ "status": {"slug": rfc.std_level.slug, "name": rfc.std_level.name},
+ "date": floor(rfc.time.timestamp()),
+ "publicationDate": floor(rfc.pub_datetime().timestamp()),
+ "stream": {"slug": rfc.stream.slug, "name": rfc.stream.name},
+ "authors": [
+ {"name": rfc_author.titlepage_name, "affiliation": rfc_author.affiliation}
+ for rfc_author in rfc.rfcauthor_set.all()
+ ],
+ "flags": {
+ "hiddenDefault": False,
+ "obsoleted": len(obsoleted_by) > 0,
+ "updated": len(updated_by) > 0,
+ },
+ "obsoletedBy": [str(doc.rfc_number) for doc in obsoleted_by],
+ "updatedBy": [str(doc.rfc_number) for doc in updated_by],
+ "ranking": rfc.rfc_number,
+ }
+ if subseries is not None:
+ ts_document["subseries"] = {
+ "acronym": subseries.type.slug,
+ "number": int(subseries.name[len(subseries.type.slug) :]),
+ "total": len(subseries.contains()),
+ }
+ if rfc.group is not None:
+ ts_document["group"] = {
+ "acronym": rfc.group.acronym,
+ "name": rfc.group.name,
+ "full": f"{rfc.group.acronym} - {rfc.group.name}",
+ }
+ if (
+ rfc.group.parent is not None
+ and rfc.stream_id not in ["ise", "irtf", "iab"] # exclude editorial?
+ ):
+ ts_document["area"] = {
+ "acronym": rfc.group.parent.acronym,
+ "name": rfc.group.parent.name,
+ "full": f"{rfc.group.parent.acronym} - {rfc.group.parent.name}",
+ }
+ if rfc.ad is not None:
+ ts_document["adName"] = rfc.ad.name
+ if content != "":
+ ts_document["content"] = _sanitize_text(content)
+ return ts_document
+
+
+def update_or_create_rfc_entry(rfc: Document):
+ """Update/create index entries for one RFC"""
+ ts_document = typesense_doc_from_rfc(rfc)
+ client = get_typesense_client()
+ client.collections[get_collection_name()].documents.upsert(ts_document)
+
+
+def update_or_create_rfc_entries(
+ rfcs: Iterable[Document], batchsize: int | None = None
+):
+ """Update/create index entries for RFCs in bulk
+
+ If batchsize is set, computes index data in batches of batchsize and adds to the
+ index. Will make a total of (len(rfcs) // batchsize) + 1 API calls.
+
+ N.b. that typesense has a server-side batch size that defaults to 40, which should
+ "almost never be changed from the default." This does not change that. Further,
+ the python client library's import_ method has a batch_size parameter that does
+ client-side batching. We don't use that, either.
+ """
+ success_count = 0
+ fail_count = 0
+ client = get_typesense_client()
+ batches = [rfcs] if batchsize is None else batched(rfcs, batchsize)
+ for batch in batches:
+ tdoc_batch = [typesense_doc_from_rfc(rfc) for rfc in batch]
+ results = client.collections[get_collection_name()].documents.import_(
+ tdoc_batch, {"action": "upsert"}
+ )
+ for tdoc, result in zip(tdoc_batch, results):
+ if result["success"]:
+ success_count += 1
+ else:
+ fail_count += 1
+ log(f"Failed to index RFC {tdoc['rfcNumber']}: {result['error']}")
+ log(f"Added {success_count} RFCs to the index, failed to add {fail_count}")
+
+
+DOCS_SCHEMA = {
+ "enable_nested_fields": True,
+ "default_sorting_field": "ranking",
+ "fields": [
+ # RFC number in integer form, for sorting asc/desc in search results
+ # Omit field for drafts
+ {
+ "name": "rfcNumber",
+ "type": "int32",
+ "facet": False,
+ "optional": True,
+ "sort": True,
+ },
+ # RFC number in string form, for direct matching with ranking
+ # Omit field for drafts
+ {"name": "rfc", "type": "string", "facet": False, "optional": True},
+ # For drafts that correspond to an RFC, insert the RFC number
+ # Omit field for rfcs or if not relevant
+ {"name": "ref", "type": "string", "facet": False, "optional": True},
+ # Filename of the document (without the extension, e.g. "rfc1234"
+ # or "draft-ietf-abc-def-02")
+ {"name": "filename", "type": "string", "facet": False, "infix": True},
+ # Title of the draft / rfc
+ {"name": "title", "type": "string", "facet": False},
+ # Abstract of the draft / rfc
+ {"name": "abstract", "type": "string", "facet": False},
+ # Number of pages
+ {"name": "pages", "type": "int32", "facet": False},
+ # A list of search keywords if relevant, set to empty array otherwise
+ {"name": "keywords", "type": "string[]", "facet": True},
+ # Type of the document
+ # Accepted values: "draft" or "rfc"
+ {"name": "type", "type": "string", "facet": True},
+ # State(s) of the document (e.g. "Published", "Adopted by a WG", etc.)
+ # Use the full name, not the slug
+ {"name": "state", "type": "string[]", "facet": True, "optional": True},
+ # Status (Standard Level Name)
+ # Object with properties "slug" and "name"
+ # e.g.: { slug: "std", "name": "Internet Standard" }
+ {"name": "status", "type": "object", "facet": True, "optional": True},
+ # The subseries it is part of. (e.g. "BCP")
+ # Omit otherwise.
+ {
+ "name": "subseries.acronym",
+ "type": "string",
+ "facet": True,
+ "optional": True,
+ },
+ # The subseries number it is part of. (e.g. 123)
+ # Omit otherwise.
+ {
+ "name": "subseries.number",
+ "type": "int32",
+ "facet": True,
+ "sort": True,
+ "optional": True,
+ },
+ # The total of RFCs in the subseries
+ # Omit if not part of a subseries
+ {
+ "name": "subseries.total",
+ "type": "int32",
+ "facet": False,
+ "sort": False,
+ "optional": True,
+ },
+ # Date of the document, in unix epoch seconds (can be negative for < 1970)
+ {"name": "date", "type": "int64", "facet": False},
+ # Expiration date of the document, in unix epoch seconds (can be negative
+ # for < 1970). Omit field for RFCs
+ {"name": "expires", "type": "int64", "facet": False, "optional": True},
+ # Publication date of the RFC, in unix epoch seconds (can be negative
+ # for < 1970). Omit field for drafts
+ {
+ "name": "publicationDate",
+ "type": "int64",
+ "facet": True,
+ "optional": True,
+ },
+ # Working Group
+ # Object with properties "acronym", "name" and "full"
+ # e.g.:
+ # {
+ # "acronym": "ntp",
+ # "name": "Network Time Protocols",
+ # "full": "ntp - Network Time Protocols",
+ # }
+ {"name": "group", "type": "object", "facet": True, "optional": True},
+ # Area
+ # Object with properties "acronym", "name" and "full"
+ # e.g.:
+ # {
+ # "acronym": "mpls",
+ # "name": "Multiprotocol Label Switching",
+ # "full": "mpls - Multiprotocol Label Switching",
+ # }
+ {"name": "area", "type": "object", "facet": True, "optional": True},
+ # Stream
+ # Object with properties "slug" and "name"
+ # e.g.: { slug: "ietf", "name": "IETF" }
+ {"name": "stream", "type": "object", "facet": True, "optional": True},
+ # List of authors
+ # Array of objects with properties "name" and "affiliation"
+ # e.g.:
+ # [
+ # {"name": "John Doe", "affiliation": "ACME Inc."},
+ # {"name": "Ada Lovelace", "affiliation": "Babbage Corps."},
+ # ]
+ {"name": "authors", "type": "object[]", "facet": True, "optional": True},
+ # Area Director Name (e.g. "Leonardo DaVinci")
+ {"name": "adName", "type": "string", "facet": True, "optional": True},
+ # Whether the document should be hidden by default in search results or not.
+ {"name": "flags.hiddenDefault", "type": "bool", "facet": True},
+ # Whether the document is obsoleted by another document or not.
+ {"name": "flags.obsoleted", "type": "bool", "facet": True},
+ # Whether the document is updated by another document or not.
+ {"name": "flags.updated", "type": "bool", "facet": True},
+ # List of documents that obsolete this document.
+ # Array of strings. Use RFC number for RFCs. (e.g. ["123", "456"])
+ # Omit if none. Must be provided if "flags.obsoleted" is set to True.
+ {
+ "name": "obsoletedBy",
+ "type": "string[]",
+ "facet": False,
+ "optional": True,
+ },
+ # List of documents that update this document.
+ # Array of strings. Use RFC number for RFCs. (e.g. ["123", "456"])
+ # Omit if none. Must be provided if "flags.updated" is set to True.
+ {"name": "updatedBy", "type": "string[]", "facet": False, "optional": True},
+ # Sanitized content of the document.
+ # Make sure to remove newlines, double whitespaces, symbols and tags.
+ {
+ "name": "content",
+ "type": "string",
+ "facet": False,
+ "optional": True,
+ "store": False,
+ },
+ # Ranking value to use when no explicit sorting is used during search
+ # Set to the RFC number for RFCs and the revision number for drafts
+ # This ensures newer RFCs get listed first in the default search results
+ # (without a query)
+ {"name": "ranking", "type": "int32", "facet": False},
+ ],
+}
+
+
+def create_collection():
+ collection_name = get_collection_name()
+ log(f"Creating '{collection_name}' collection")
+ client = get_typesense_client()
+ client.collections.create({"name": get_collection_name()} | DOCS_SCHEMA)
+
+
+def delete_collection():
+ collection_name = get_collection_name()
+ log(f"Deleting '{collection_name}' collection")
+ client = get_typesense_client()
+ try:
+ client.collections[collection_name].delete()
+ except typesense.exceptions.ObjectNotFound:
+ pass
diff --git a/ietf/utils/serialize.py b/ietf/utils/serialize.py
index 342d211cf5..77f97942cb 100644
--- a/ietf/utils/serialize.py
+++ b/ietf/utils/serialize.py
@@ -16,7 +16,7 @@ def object_as_shallow_dict(obj):
if isinstance(f, models.ManyToManyField):
v = list(v.values_list("pk", flat=True))
elif isinstance(f, models.DateTimeField):
- v = v.astimezone(datetime.timezone.utc).isoformat()
+ v = v.astimezone(datetime.UTC).isoformat()
elif isinstance(f, models.DateField):
v = v.strftime('%Y-%m-%d')
diff --git a/ietf/utils/storage.py b/ietf/utils/storage.py
index 0aa02cab86..bad5af5178 100644
--- a/ietf/utils/storage.py
+++ b/ietf/utils/storage.py
@@ -1,8 +1,95 @@
+# Copyright The IETF Trust 2020-2025, All Rights Reserved
+"""Django Storage classes"""
+import datetime
+from hashlib import sha384
+from pathlib import Path
+from typing import Optional
+
+from django.conf import settings
+from django.core.files.base import File
from django.core.files.storage import FileSystemStorage
+from ietf.doc.storage_utils import store_file
+from .log import log
+
class NoLocationMigrationFileSystemStorage(FileSystemStorage):
- def deconstruct(obj): # pylint: disable=no-self-argument
- path, args, kwargs = FileSystemStorage.deconstruct(obj)
- kwargs["location"] = None
- return (path, args, kwargs)
+ def deconstruct(self):
+ path, args, kwargs = super().deconstruct()
+ kwargs["location"] = None # don't record location in migrations
+ return path, args, kwargs
+
+
+class BlobShadowFileSystemStorage(NoLocationMigrationFileSystemStorage):
+ """FileSystemStorage that shadows writes to the blob store as well
+
+ Strips directories from the filename when naming the blob.
+ """
+
+ def __init__(
+ self,
+ *, # disallow positional arguments
+ kind: str,
+ location=None,
+ base_url=None,
+ file_permissions_mode=None,
+ directory_permissions_mode=None,
+ ):
+ self.kind = kind
+ super().__init__(
+ location, base_url, file_permissions_mode, directory_permissions_mode
+ )
+
+ def save(self, name, content, max_length=None):
+ # Write content to the filesystem - this deals with chunks, etc...
+ saved_name = super().save(name, content, max_length)
+
+ if settings.ENABLE_BLOBSTORAGE:
+ try:
+ # Retrieve the content and write to the blob store
+ blob_name = Path(saved_name).name # strips path
+ with self.open(saved_name, "rb") as f:
+ store_file(self.kind, blob_name, f, allow_overwrite=True)
+ except Exception as err:
+ log(f"Blobstore Error: Failed to shadow {saved_name} at {self.kind}:{blob_name}: {repr(err)}")
+ if settings.SERVER_MODE == "development":
+ raise
+ return saved_name # includes the path!
+
+ def deconstruct(self):
+ path, args, kwargs = super().deconstruct()
+ kwargs["kind"] = "" # don't record "kind" in migrations
+ return path, args, kwargs
+
+
+class MetadataFile(File):
+ """File that includes metadata"""
+
+ def __init__(self, file, name=None, mtime: Optional[datetime.datetime]=None, content_type=""):
+ super().__init__(file=file, name=name)
+ self.mtime = mtime
+ self.content_type = content_type
+ self._custom_metadata = None
+
+ @property
+ def custom_metadata(self):
+ if self._custom_metadata is None:
+ self._custom_metadata = self._compute_custom_metadata()
+ return self._custom_metadata
+
+ def _compute_custom_metadata(self):
+ try:
+ self.file.seek(0)
+ except AttributeError: # TODO-BLOBSTORE
+ raise NotImplementedError("cannot handle unseekable content")
+ content_bytes = self.file.read()
+ if not isinstance(
+ content_bytes, bytes
+ ): # TODO-BLOBSTORE: This is sketch-development only -remove before committing
+ raise Exception(f"Expected bytes - got {type(content_bytes)}")
+ self.file.seek(0)
+ return {
+ "len": f"{len(content_bytes)}",
+ "sha384": f"{sha384(content_bytes).hexdigest()}",
+ "mtime": None if self.mtime is None else self.mtime.isoformat(),
+ }
diff --git a/ietf/utils/templatetags/tests.py b/ietf/utils/templatetags/tests.py
index a93bf2d94d..859319be3d 100644
--- a/ietf/utils/templatetags/tests.py
+++ b/ietf/utils/templatetags/tests.py
@@ -3,6 +3,7 @@
from django.template import Context, Origin, Template
from django.test import override_settings
+from ietf.utils.templatetags.textfilters import linkify
from ietf.utils.test_utils import TestCase
import debug # pyflakes: ignore
@@ -39,3 +40,68 @@ def test_origin_outside_base_dir(self):
output = template.render(Context())
self.assertNotIn(component, output,
'Full path components should not be revealed in html')
+
+
+class TextfiltersTests(TestCase):
+ def test_linkify(self):
+ # Cases with autoescape = True (the default)
+ self.assertEqual(
+ linkify("plain string"),
+ "plain string",
+ )
+ self.assertEqual(
+ linkify("https://www.ietf.org"),
+ 'https://www.ietf.org ',
+ )
+ self.assertEqual(
+ linkify('IETF '),
+ (
+ '<a href="https://www.ietf.org ">IETF</a>'
+ ),
+ )
+ self.assertEqual(
+ linkify("somebody@example.com"),
+ 'somebody@example.com ',
+ )
+ self.assertEqual(
+ linkify("Some Body "),
+ (
+ 'Some Body <'
+ 'somebody@example.com >'
+ ),
+ )
+ self.assertEqual(
+ linkify(""),
+ "<script>alert('h4x0r3d');</script>",
+ )
+
+ # Cases with autoescape = False (these are dangerous and assume the caller
+ # has sanitized already)
+ self.assertEqual(
+ linkify("plain string", autoescape=False),
+ "plain string",
+ )
+ self.assertEqual(
+ linkify("https://www.ietf.org", autoescape=False),
+ 'https://www.ietf.org ',
+ )
+ self.assertEqual(
+ linkify('IETF ', autoescape=False),
+ 'IETF ',
+ )
+ self.assertEqual(
+ linkify("somebody@example.com", autoescape=False),
+ 'somebody@example.com ',
+ )
+ # bleach.Linkifier translates the < -> < and > -> > on this one
+ self.assertEqual(
+ linkify("Some Body ", autoescape=False),
+ (
+ 'Some Body <'
+ 'somebody@example.com >'
+ ),
+ )
+ self.assertEqual(
+ linkify("", autoescape=False),
+ "",
+ )
diff --git a/ietf/utils/templatetags/textfilters.py b/ietf/utils/templatetags/textfilters.py
index 3b240740e0..e3bfbe0c56 100644
--- a/ietf/utils/templatetags/textfilters.py
+++ b/ietf/utils/templatetags/textfilters.py
@@ -7,6 +7,7 @@
from django import template
from django.conf import settings
from django.template.defaultfilters import stringfilter
+from django.utils.html import conditional_escape
from django.utils.safestring import mark_safe
import debug # pyflakes:ignore
@@ -71,10 +72,13 @@ def texescape_filter(value):
"A TeX escape filter"
return texescape(value)
-@register.filter
+@register.filter(needs_autoescape=True)
@stringfilter
-def linkify(value):
- text = mark_safe(_linkify(value))
+def linkify(value, autoescape=True):
+ if autoescape:
+ # Escape unless the input was already a SafeString
+ value = conditional_escape(value)
+ text = mark_safe(_linkify(value)) # _linkify is a safe operation
return text
@register.filter
diff --git a/ietf/utils/test_runner.py b/ietf/utils/test_runner.py
index 49d53e1e1d..a23416e87f 100644
--- a/ietf/utils/test_runner.py
+++ b/ietf/utils/test_runner.py
@@ -1,4 +1,4 @@
-# Copyright The IETF Trust 2009-2020, All Rights Reserved
+# Copyright The IETF Trust 2009-2025, All Rights Reserved
# -*- coding: utf-8 -*-
#
# Portion Copyright (C) 2009 Nokia Corporation and/or its subsidiary(-ies).
@@ -48,16 +48,16 @@
import subprocess
import tempfile
import copy
+from contextlib import contextmanager
+
+import boto3
+import botocore.config
import factory.random
import urllib3
import warnings
-from urllib.parse import urlencode
-from fnmatch import fnmatch
-
-from coverage.report import Reporter
-from coverage.results import Numbers
-from coverage.misc import NotPython
+from typing import Callable, Optional
+from urllib.parse import urlencode
import django
from django.conf import settings
@@ -70,7 +70,7 @@
from django.template.loaders.filesystem import Loader as BaseLoader
from django.test.runner import DiscoverRunner
from django.core.management import call_command
-from django.urls import URLResolver # type: ignore
+from django.urls import URLResolver, resolve, Resolver404 # type: ignore
from django.template.backends.django import DjangoTemplates
from django.template.backends.django import Template # type: ignore[attr-defined]
from django.utils import timezone
@@ -82,21 +82,47 @@
import ietf
import ietf.utils.mail
from ietf.utils.management.commands import pyflakes
-from ietf.utils.test_smtpserver import SMTPTestServerDriver
+from ietf.utils.aiosmtpd import SMTPTestServerDriver
from ietf.utils.test_utils import TestCase
+from mypy_boto3_s3.service_resource import Bucket
+
+
+class UrlCoverageWarning(UserWarning):
+ """Warning category for URL coverage-related warnings"""
+ # URLs for which we don't expect patterns to match
+ IGNORE_URLS = (
+ "/_doesnotexist/",
+ "/sitemap.xml.",
+ )
-loaded_templates = set()
-visited_urls = set()
-test_database_name = None
-old_destroy = None
-old_create = None
-template_coverage_collection = None
-code_coverage_collection = None
-url_coverage_collection = None
+class UninterestingPatternWarning(UrlCoverageWarning):
+ """Warning category for unexpected URL match patterns
+
+ These are common, caused by tests that hit a URL that is not selected for
+ coverage checking. The warning is in place to help with a putative future
+ review of whether we're selecting the right patterns to check for coverage.
+ """
+ pass
+
+
+# Configure warnings for reasonable output quantity
+warnings.simplefilter("once", UrlCoverageWarning)
+warnings.simplefilter("ignore", UninterestingPatternWarning)
+
+
+loaded_templates: set[str] = set()
+visited_urls: set[str] = set()
+test_database_name: Optional[str] = None
+old_destroy: Optional[Callable] = None
+old_create: Optional[Callable] = None
+
+template_coverage_collection = False
+url_coverage_collection = False
validation_settings = {"validate_html": None, "validate_html_harder": None, "show_logging": False}
+
def start_vnu_server(port=8888):
"Start a vnu validation server on the indicated port"
vnu = subprocess.Popen(
@@ -226,10 +252,12 @@ def load_and_run_fixtures(verbosity):
fn()
def safe_create_test_db(self, verbosity, *args, **kwargs):
- global test_database_name, old_create
+ if old_create is None:
+ raise RuntimeError("old_create has not been set, cannot proceed")
keepdb = kwargs.get('keepdb', False)
if not keepdb:
print(" Creating test database...")
+ global test_database_name
test_database_name = old_create(self, 0, *args, **kwargs)
if settings.GLOBAL_TEST_FIXTURES:
@@ -239,8 +267,9 @@ def safe_create_test_db(self, verbosity, *args, **kwargs):
return test_database_name
def safe_destroy_test_db(*args, **kwargs):
+ if old_destroy is None:
+ raise RuntimeError("old_destroy has not been set, cannot proceed")
sys.stdout.write('\n')
- global test_database_name, old_destroy
keepdb = kwargs.get('keepdb', False)
if not keepdb:
if settings.DATABASES["default"]["NAME"] != test_database_name:
@@ -259,7 +288,14 @@ def pyflakes_test(self):
path = os.path.join(settings.BASE_DIR)
warnings = []
warnings = pyflakes.checkPaths([path], verbosity=0)
- self.assertEqual([], [str(w) for w in warnings])
+
+ # Filter out warnings about unused global variables
+ filtered_warnings = [
+ w for w in warnings
+ if not re.search(r"`global \w+` is unused: name is never assigned in scope", str(w))
+ ]
+
+ self.assertEqual([], [str(w) for w in filtered_warnings])
class MyPyTest(TestCase):
@@ -347,15 +383,13 @@ class TemplateCoverageLoader(BaseLoader):
is_usable = True
def get_template(self, template_name, skip=None):
- global template_coverage_collection, loaded_templates
- if template_coverage_collection == True:
+ if template_coverage_collection:
loaded_templates.add(str(template_name))
raise TemplateDoesNotExist(template_name)
def record_urls_middleware(get_response):
def record_urls(request):
- global url_coverage_collection, visited_urls
- if url_coverage_collection == True:
+ if url_coverage_collection:
visited_urls.add(request.path)
return get_response(request)
return record_urls
@@ -401,8 +435,9 @@ def do_append(res, p0, p1, item):
res.append((str(item.pattern), item))
return res
+
_all_templates = None
-def get_template_paths(apps=None):
+def get_template_paths(apps=None) -> list[str]:
global _all_templates
if not _all_templates:
# TODO: Add app templates to the full list, if we are using
@@ -411,25 +446,30 @@ def get_template_paths(apps=None):
templatepaths = settings.TEMPLATES[0]['DIRS']
for templatepath in templatepaths:
for dirpath, dirs, files in os.walk(templatepath):
- if ".svn" in dirs:
- dirs.remove(".svn")
- relative_path = dirpath[len(templatepath)+1:]
- for file in files:
- ignore = False
- for pattern in settings.TEST_TEMPLATE_IGNORE:
- if fnmatch(file, pattern):
- ignore = True
- break
- if ignore:
- continue
- if relative_path != "":
- file = os.path.join(relative_path, file)
- templates.add(file)
- if apps:
- templates = [ t for t in templates if t.split(os.path.sep)[0] in apps ]
- _all_templates = templates
+ # glob against path from PROJECT_DIR
+ project_path = pathlib.Path(
+ dirpath.removeprefix(settings.PROJECT_DIR).lstrip("/")
+ )
+ # label entries with name relative to templatepath
+ relative_path = pathlib.Path(
+ dirpath.removeprefix(templatepath).lstrip("/")
+ )
+ if (
+ apps
+ and len(relative_path.parts) > 0
+ and relative_path.parts[0] not in apps
+ ):
+ continue # skip uninteresting apps
+ for filename in files:
+ file_path = project_path / filename
+ if not any(
+ file_path.match(pat) for pat in settings.TEST_TEMPLATE_IGNORE
+ ):
+ templates.add(relative_path / filename)
+ _all_templates = [str(t) for t in templates]
return _all_templates
+
def save_test_results(failures, test_labels):
# Record the test result in a file, in order to be able to check the
# results and avoid re-running tests if we've already run them with OK
@@ -445,50 +485,29 @@ def save_test_results(failures, test_labels):
tfile.write("%s OK\n" % (timestr, ))
tfile.close()
-def set_coverage_checking(flag=True):
+
+def set_template_coverage(flag):
global template_coverage_collection
- global code_coverage_collection
+ orig = template_coverage_collection
+ template_coverage_collection = flag
+ return orig
+
+
+def set_url_coverage(flag):
global url_coverage_collection
- if settings.SERVER_MODE == 'test':
- if flag:
- settings.TEST_CODE_COVERAGE_CHECKER.collector.resume()
- template_coverage_collection = True
- code_coverage_collection = True
- url_coverage_collection = True
- else:
- settings.TEST_CODE_COVERAGE_CHECKER.collector.pause()
- template_coverage_collection = False
- code_coverage_collection = False
- url_coverage_collection = False
-
-class CoverageReporter(Reporter):
- def report(self):
- self.find_file_reporters(None)
-
- total = Numbers()
- result = {"coverage": 0.0, "covered": {}, "format": 5, }
- for fr in self.file_reporters:
- try:
- analysis = self.coverage._analyze(fr)
- nums = analysis.numbers
- missing_nums = sorted(analysis.missing)
- with io.open(analysis.filename, encoding='utf-8') as file:
- lines = file.read().splitlines()
- missing_lines = [ lines[l-1] for l in missing_nums ]
- result["covered"][fr.relative_filename()] = (nums.n_statements, nums.pc_covered/100.0, missing_nums, missing_lines)
- total += nums
- except KeyboardInterrupt: # pragma: not covered
- raise
- except Exception:
- report_it = not self.config.ignore_errors
- if report_it:
- typ, msg = sys.exc_info()[:2]
- if typ is NotPython and not fr.should_be_python():
- report_it = False
- if report_it:
- raise
- result["coverage"] = total.pc_covered/100.0
- return result
+ orig = url_coverage_collection
+ url_coverage_collection = flag
+ return orig
+
+
+@contextmanager
+def disable_coverage():
+ """Context manager/decorator that disables template/url coverage"""
+ orig_template = set_template_coverage(False)
+ orig_url = set_url_coverage(False)
+ yield
+ set_template_coverage(orig_template)
+ set_url_coverage(orig_url)
class CoverageTest(unittest.TestCase):
@@ -521,7 +540,6 @@ def report_test_result(self, test):
( test, test_coverage*100, latest_coverage_version, master_coverage*100, ))
def template_coverage_test(self):
- global loaded_templates
if self.runner.check_coverage:
apps = [ app.split('.')[-1] for app in self.runner.test_apps ]
all = get_template_paths(apps)
@@ -556,44 +574,62 @@ def ignore_pattern(regex, pattern):
)
or pattern.callback == django.views.static.serve)
- patterns = [(regex, re.compile(regex, re.U), obj) for regex, obj in url_patterns
- if not ignore_pattern(regex, obj)]
+ patterns ={
+ regex: obj
+ for regex, obj in url_patterns
+ if not ignore_pattern(regex, obj)
+ }
covered = set()
for url in visited_urls:
- for regex, compiled, obj in patterns:
- if regex not in covered and compiled.match(url[1:]): # strip leading /
- covered.add(regex)
- break
+ try:
+ resolved = resolve(url) # let Django resolve the URL for us
+ except Resolver404:
+ if url not in UrlCoverageWarning.IGNORE_URLS:
+ warnings.warn(
+ f"Unable to resolve visited URL {url}", UrlCoverageWarning
+ )
+ continue
+ if resolved.route not in patterns:
+ warnings.warn(
+ f"WARNING: url resolved to an unexpected pattern (url='{url}', "
+ f"resolved to r'{resolved.route}'",
+ UninterestingPatternWarning,
+ )
+ continue
+ covered.add(resolved.route)
self.runner.coverage_data["url"] = {
- "coverage": 1.0*len(covered)/len(patterns),
- "covered": dict( (k, (o.lookup_str, k in covered)) for k,p,o in patterns ),
+ "coverage": 1.0 * len(covered) / len(patterns),
+ "covered": dict(
+ (k, (o.lookup_str, k in covered)) for k, o in patterns.items()
+ ),
"format": 4,
- }
+ }
self.report_test_result("url")
else:
self.skipTest("Coverage switched off with --skip-coverage")
def code_coverage_test(self):
- if self.runner.check_coverage:
- include = [ os.path.join(path, '*') for path in self.runner.test_paths ]
- checker = self.runner.code_coverage_checker
- checker.stop()
+ if (
+ self.runner.check_coverage
+ and settings.TEST_CODE_COVERAGE_CHECKER is not None
+ ):
+ coverage_manager = settings.TEST_CODE_COVERAGE_CHECKER
+ coverage_manager.stop()
# Save to the .coverage file
- checker.save()
+ coverage_manager.save()
# Apply the configured and requested omit and include data
- checker.config.from_args(ignore_errors=None, omit=settings.TEST_CODE_COVERAGE_EXCLUDE_FILES,
- include=include, file=None)
- for pattern in settings.TEST_CODE_COVERAGE_EXCLUDE_LINES:
- checker.exclude(pattern)
# Maybe output an HTML report
if self.runner.run_full_test_suite and self.runner.html_report:
- checker.html_report(directory=settings.TEST_CODE_COVERAGE_REPORT_DIR)
- # In any case, build a dictionary with per-file data for this run
- reporter = CoverageReporter(checker, checker.config)
- self.runner.coverage_data["code"] = reporter.report()
+ coverage_manager.checker.html_report(
+ directory=settings.TEST_CODE_COVERAGE_REPORT_DIR
+ )
+ # Generate the output report data
+ self.runner.coverage_data["code"] = coverage_manager.report(
+ include=[str(pathlib.Path(p) / "*") for p in self.runner.test_paths]
+ )
self.report_test_result("code")
else:
self.skipTest("Coverage switched off with --skip-coverage")
@@ -722,9 +758,25 @@ def add_arguments(cls, parser):
parser.add_argument('--rerun-until-failure',
action='store_true', dest='rerun', default=False,
help='Run the indicated tests in a loop until a failure occurs. ' )
-
- def __init__(self, ignore_lower_coverage=False, skip_coverage=False, save_version_coverage=None, html_report=None, permit_mixed_migrations=None, show_logging=None, validate_html=None, validate_html_harder=None, rerun=None, **kwargs):
- #
+ parser.add_argument('--no-manage-blobstore', action='store_false', dest='manage_blobstore',
+ help='Disable creating/deleting test buckets in the blob store.'
+ 'When this argument is used, a set of buckets with "test-" prefixed to their '
+ 'names must already exist.')
+
+ def __init__(
+ self,
+ ignore_lower_coverage=False,
+ skip_coverage=False,
+ save_version_coverage=None,
+ html_report=None,
+ permit_mixed_migrations=None,
+ show_logging=None,
+ validate_html=None,
+ validate_html_harder=None,
+ rerun=None,
+ manage_blobstore=True,
+ **kwargs
+ ): #
self.ignore_lower_coverage = ignore_lower_coverage
self.check_coverage = not skip_coverage
self.save_version_coverage = save_version_coverage
@@ -733,7 +785,6 @@ def __init__(self, ignore_lower_coverage=False, skip_coverage=False, save_versio
self.show_logging = show_logging
self.rerun = rerun
self.test_labels = None
- global validation_settings
validation_settings["validate_html"] = self if validate_html else None
validation_settings["validate_html_harder"] = self if validate_html and validate_html_harder else None
validation_settings["show_logging"] = show_logging
@@ -752,11 +803,10 @@ def __init__(self, ignore_lower_coverage=False, skip_coverage=False, save_versio
# contains parent classes to later subclasses, the parent classes will determine the ordering, so use the most
# specific classes necessary to get the right ordering:
self.reorder_by = (PyFlakesTestCase, MyPyTest,) + self.reorder_by + (StaticLiveServerTestCase, TemplateTagTest, CoverageTest,)
+ #self.buckets = set()
+ self.blobstoremanager = TestBlobstoreManager() if manage_blobstore else None
def setup_test_environment(self, **kwargs):
- global template_coverage_collection
- global url_coverage_collection
-
ietf.utils.mail.test_mode = True
ietf.utils.mail.SMTP_ADDR['ip4'] = '127.0.0.1'
ietf.utils.mail.SMTP_ADDR['port'] = 2025
@@ -793,23 +843,12 @@ def setup_test_environment(self, **kwargs):
"covered": {},
"format": 1,
},
- "migration": {
- "present": {},
- "format": 3,
- }
}
settings.TEMPLATES[0]['OPTIONS']['loaders'] = ('ietf.utils.test_runner.TemplateCoverageLoader',) + settings.TEMPLATES[0]['OPTIONS']['loaders']
settings.MIDDLEWARE = ('ietf.utils.test_runner.record_urls_middleware',) + tuple(settings.MIDDLEWARE)
- self.code_coverage_checker = settings.TEST_CODE_COVERAGE_CHECKER
- if not self.code_coverage_checker._started:
- sys.stderr.write(" ** Warning: In %s: Expected the coverage checker to have\n"
- " been started already, but it wasn't. Doing so now. Coverage numbers\n"
- " will be off, though.\n" % __name__)
- self.code_coverage_checker.start()
-
if settings.SITE_ID != 1:
print(" Changing SITE_ID to '1' during testing.")
settings.SITE_ID = 1
@@ -837,7 +876,7 @@ def setup_test_environment(self, **kwargs):
try:
# remember the value so ietf.utils.mail.send_smtp() will use the same
ietf.utils.mail.SMTP_ADDR['port'] = base + offset
- self.smtpd_driver = SMTPTestServerDriver((ietf.utils.mail.SMTP_ADDR['ip4'],ietf.utils.mail.SMTP_ADDR['port']),None)
+ self.smtpd_driver = SMTPTestServerDriver(ietf.utils.mail.SMTP_ADDR['ip4'],ietf.utils.mail.SMTP_ADDR['port'], None)
self.smtpd_driver.start()
print((" Running an SMTP test server on %(ip4)s:%(port)s to catch outgoing email." % ietf.utils.mail.SMTP_ADDR))
break
@@ -936,6 +975,9 @@ def setup_test_environment(self, **kwargs):
print(" (extra pedantically)")
self.vnu = start_vnu_server()
+ if self.blobstoremanager is not None:
+ self.blobstoremanager.createTestBlobstores()
+
super(IetfTestRunner, self).setup_test_environment(**kwargs)
def teardown_test_environment(self, **kwargs):
@@ -966,6 +1008,9 @@ def teardown_test_environment(self, **kwargs):
if self.vnu:
self.vnu.terminate()
+ if self.blobstoremanager is not None:
+ self.blobstoremanager.destroyTestBlobstores()
+
super(IetfTestRunner, self).teardown_test_environment(**kwargs)
def validate(self, testcase):
@@ -1103,9 +1148,8 @@ def _extra_tests(self):
),
]
if self.check_coverage:
- global template_coverage_collection, code_coverage_collection, url_coverage_collection
+ global template_coverage_collection, url_coverage_collection
template_coverage_collection = True
- code_coverage_collection = True
url_coverage_collection = True
tests += [
PyFlakesTestCase(test_runner=self, methodName='pyflakes_test'),
@@ -1189,34 +1233,43 @@ def run_tests(self, test_labels, extra_tests=None, **kwargs):
return failures
-class IetfLiveServerTestCase(StaticLiveServerTestCase):
- @classmethod
- def setUpClass(cls):
- set_coverage_checking(False)
- super(IetfLiveServerTestCase, cls).setUpClass()
-
- def setUp(self):
- super(IetfLiveServerTestCase, self).setUp()
- # LiveServerTestCase uses TransactionTestCase which seems to
- # somehow interfere with the fixture loading process in
- # IetfTestRunner when running multiple tests (the first test
- # is fine, in the next ones the fixtures have been wiped) -
- # this is no doubt solvable somehow, but until then we simply
- # recreate them here
- from ietf.person.models import Person
- if not Person.objects.exists():
- load_and_run_fixtures(verbosity=0)
- self.replaced_settings = dict()
- if hasattr(settings, 'IDTRACKER_BASE_URL'):
- self.replaced_settings['IDTRACKER_BASE_URL'] = settings.IDTRACKER_BASE_URL
- settings.IDTRACKER_BASE_URL = self.live_server_url
- @classmethod
- def tearDownClass(cls):
- super(IetfLiveServerTestCase, cls).tearDownClass()
- set_coverage_checking(True)
-
- def tearDown(self):
- for k, v in self.replaced_settings.items():
- setattr(settings, k, v)
- super().tearDown()
+class TestBlobstoreManager():
+ # N.B. buckets and blobstore are intentional Class-level attributes
+ buckets: set[Bucket] = set()
+
+ blobstore = boto3.resource("s3",
+ endpoint_url="http://blobstore:9000",
+ aws_access_key_id="minio_root",
+ aws_secret_access_key="minio_pass",
+ aws_session_token=None,
+ config = botocore.config.Config(
+ request_checksum_calculation="when_required",
+ response_checksum_validation="when_required",
+ signature_version="s3v4",
+ ),
+ #config=botocore.config.Config(signature_version=botocore.UNSIGNED),
+ verify=False
+ )
+
+ def createTestBlobstores(self):
+ for storagename in settings.ARTIFACT_STORAGE_NAMES:
+ bucketname = f"test-{storagename}"
+ try:
+ bucket = self.blobstore.create_bucket(Bucket=bucketname)
+ self.buckets.add(bucket)
+ except self.blobstore.meta.client.exceptions.BucketAlreadyOwnedByYou:
+ bucket = self.blobstore.Bucket(bucketname)
+ self.buckets.add(bucket)
+
+ def destroyTestBlobstores(self):
+ self.emptyTestBlobstores(destroy=True)
+
+ def emptyTestBlobstores(self, destroy=False):
+ # debug.show('f"Asked to empty test blobstores with destroy={destroy}"')
+ for bucket in self.buckets:
+ bucket.objects.delete()
+ if destroy:
+ bucket.delete()
+ if destroy:
+ self.buckets = set()
diff --git a/ietf/utils/test_smtpserver.py b/ietf/utils/test_smtpserver.py
deleted file mode 100644
index 66675aa0b1..0000000000
--- a/ietf/utils/test_smtpserver.py
+++ /dev/null
@@ -1,92 +0,0 @@
-# Copyright The IETF Trust 2014-2020, All Rights Reserved
-# -*- coding: utf-8 -*-
-
-
-import smtpd
-import threading
-import asyncore
-
-import debug # pyflakes:ignore
-
-class AsyncCoreLoopThread(object):
-
- def wrap_loop(self, exit_condition, timeout=1.0, use_poll=False, map=None):
- if map is None:
- map = asyncore.socket_map
- while map and not exit_condition:
- asyncore.loop(timeout=1.0, use_poll=False, map=map, count=1)
-
- def start(self):
- """Start the listening service"""
- self.exit_condition = []
- kwargs={'exit_condition':self.exit_condition,'timeout':1.0}
- self.thread = threading.Thread(target=self.wrap_loop, kwargs=kwargs)
- self.thread.daemon = True
- self.thread.daemon = True
- self.thread.start()
-
- def stop(self):
- """Stop the listening service"""
- self.exit_condition.append(True)
- self.thread.join()
-
-
-class SMTPTestChannel(smtpd.SMTPChannel):
-
-# mail_options = ['BODY=8BITMIME', 'SMTPUTF8']
-
- def smtp_RCPT(self, arg):
- if not self.mailfrom:
- self.push(str('503 Error: need MAIL command'))
- return
- arg = self._strip_command_keyword('TO:', arg)
- address, __ = self._getaddr(arg)
- if not address:
- self.push(str('501 Syntax: RCPT TO: '))
- return
- if "poison" in address:
- self.push(str('550 Error: Not touching that'))
- return
- self.rcpt_options = []
- self.rcpttos.append(address)
- self.push(str('250 Ok'))
-
-class SMTPTestServer(smtpd.SMTPServer):
-
- def __init__(self,localaddr,remoteaddr,inbox):
- if inbox is not None:
- self.inbox=inbox
- else:
- self.inbox = []
- smtpd.SMTPServer.__init__(self,localaddr,remoteaddr)
-
- def handle_accept(self):
- pair = self.accept()
- if pair is not None:
- conn, addr = pair
- #channel = SMTPTestChannel(self, conn, addr)
- SMTPTestChannel(self, conn, addr)
-
- def process_message(self, peer, mailfrom, rcpttos, data, mail_options=None, rcpt_options=None):
- self.inbox.append(data)
-
-
-class SMTPTestServerDriver(object):
- def __init__(self, localaddr, remoteaddr, inbox=None):
- self.localaddr=localaddr
- self.remoteaddr=remoteaddr
- if inbox is not None:
- self.inbox = inbox
- else:
- self.inbox = []
- self.thread_driver = None
-
- def start(self):
- self.smtpserver = SMTPTestServer(self.localaddr,self.remoteaddr,self.inbox)
- self.thread_driver = AsyncCoreLoopThread()
- self.thread_driver.start()
-
- def stop(self):
- if self.thread_driver:
- self.thread_driver.stop()
-
diff --git a/ietf/utils/test_utils.py b/ietf/utils/test_utils.py
index 86c5a0c1c3..5faf83d93f 100644
--- a/ietf/utils/test_utils.py
+++ b/ietf/utils/test_utils.py
@@ -38,6 +38,7 @@
import re
import email
import html5lib
+import rest_framework.test
import requests_mock
import shutil
import sys
@@ -312,3 +313,11 @@ def tearDown(self):
shutil.rmtree(dir)
self.requests_mock.stop()
super().tearDown()
+
+
+class APITestCase(TestCase):
+ """Test case that uses rest_framework's APIClient
+
+ This is equivalent to rest_framework.test.APITestCase, but picks up our
+ """
+ client_class = rest_framework.test.APIClient
diff --git a/ietf/utils/tests.py b/ietf/utils/tests.py
index 0a1986a608..99c33f34b3 100644
--- a/ietf/utils/tests.py
+++ b/ietf/utils/tests.py
@@ -1,4 +1,4 @@
-# Copyright The IETF Trust 2014-2020, All Rights Reserved
+# Copyright The IETF Trust 2014-2025, All Rights Reserved
# -*- coding: utf-8 -*-
@@ -11,7 +11,7 @@
import shutil
import types
-from mock import call, patch
+from unittest.mock import call, patch
from pyquery import PyQuery
from typing import Dict, List # pyflakes:ignore
@@ -19,10 +19,11 @@
from email.mime.image import MIMEImage
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
-from fnmatch import fnmatch
from importlib import import_module
from textwrap import dedent
from tempfile import mkdtemp
+from xml2rfc import log as xml2rfc_log
+from xml2rfc.util.date import extract_date as xml2rfc_extract_date
from django.apps import apps
from django.contrib.auth.models import User
@@ -53,11 +54,14 @@
decode_header_value,
show_that_mail_was_sent,
)
-from ietf.utils.test_runner import get_template_paths, set_coverage_checking
+from ietf.utils.test_runner import (
+ get_template_paths,
+ set_template_coverage,
+ set_url_coverage,
+)
from ietf.utils.test_utils import TestCase, unicontent
-from ietf.utils.text import parse_unicode
from ietf.utils.timezone import timezone_not_near_midnight
-from ietf.utils.xmldraft import XMLDraft
+from ietf.utils.xmldraft import XMLDraft, InvalidMetadataError, capture_xml2rfc_output
class SendingMail(TestCase):
@@ -310,15 +314,16 @@ def qualified(name):
return list(callbacks)
-class TemplateChecksTestCase(TestCase):
+class TemplateChecksTestCase(TestCase): # pragma: no cover
paths = [] # type: List[str]
templates = {} # type: Dict[str, Template]
def setUp(self):
super().setUp()
- set_coverage_checking(False)
- self.paths = list(get_template_paths())
+ set_template_coverage(False)
+ set_url_coverage(False)
+ self.paths = get_template_paths() # already filtered ignores
self.paths.sort()
for path in self.paths:
try:
@@ -327,17 +332,14 @@ def setUp(self):
pass
def tearDown(self):
- set_coverage_checking(True)
+ set_template_coverage(True)
+ set_url_coverage(True)
super().tearDown()
def test_parse_templates(self):
errors = []
for path in self.paths:
- for pattern in settings.TEST_TEMPLATE_IGNORE:
- if fnmatch(path, pattern):
- continue
- if not path in self.templates:
-
+ if path not in self.templates:
try:
get_template(path)
except Exception as e:
@@ -544,7 +546,7 @@ def test_get_refs_v2(self):
def test_parse_creation_date(self):
# override date_today to avoid skew when test runs around midnight
today = datetime.date.today()
- with patch("ietf.utils.xmldraft.date_today", return_value=today):
+ with capture_xml2rfc_output(), patch("ietf.utils.xmldraft.date_today", return_value=today):
# Note: using a dict as a stand-in for XML elements, which rely on the get() method
self.assertEqual(
XMLDraft.parse_creation_date({"year": "2022", "month": "11", "day": "24"}),
@@ -590,6 +592,74 @@ def test_parse_creation_date(self):
),
datetime.date(today.year, 1 if today.month != 1 else 2, 15),
)
+ # Some exeception-inducing conditions
+ with self.assertRaises(
+ InvalidMetadataError,
+ msg="raise an InvalidMetadataError if a year-only date is not current",
+ ):
+ XMLDraft.parse_creation_date(
+ {
+ "year": str(today.year - 1),
+ "month": "",
+ "day": "",
+ }
+ )
+ with self.assertRaises(
+ InvalidMetadataError,
+ msg="raise an InvalidMetadataError for a non-numeric year"
+ ):
+ XMLDraft.parse_creation_date(
+ {
+ "year": "two thousand twenty-five",
+ "month": "2",
+ "day": "28",
+ }
+ )
+ with self.assertRaises(
+ InvalidMetadataError,
+ msg="raise an InvalidMetadataError for an invalid month"
+ ):
+ XMLDraft.parse_creation_date(
+ {
+ "year": "2024",
+ "month": "13",
+ "day": "28",
+ }
+ )
+ with self.assertRaises(
+ InvalidMetadataError,
+ msg="raise an InvalidMetadataError for a misspelled month"
+ ):
+ XMLDraft.parse_creation_date(
+ {
+ "year": "2024",
+ "month": "Oktobur",
+ "day": "28",
+ }
+ )
+ with self.assertRaises(
+ InvalidMetadataError,
+ msg="raise an InvalidMetadataError for an invalid day"
+ ):
+ XMLDraft.parse_creation_date(
+ {
+ "year": "2024",
+ "month": "feb",
+ "day": "31",
+ }
+ )
+ with self.assertRaises(
+ InvalidMetadataError,
+ msg="raise an InvalidMetadataError for a non-numeric day"
+ ):
+ XMLDraft.parse_creation_date(
+ {
+ "year": "2024",
+ "month": "feb",
+ "day": "twenty-four",
+ }
+ )
+
def test_parse_docname(self):
with self.assertRaises(ValueError) as cm:
@@ -639,6 +709,14 @@ def test_render_author_name(self):
)),
"Joanna Q. Public",
)
+ self.assertEqual(
+ XMLDraft.render_author_name(lxml.etree.Element(
+ "author",
+ fullname=chr(340)+"ich",
+ asciiFullname="Rich UTF-8",
+ )),
+ chr(340)+"ich (Rich UTF-8)",
+ )
self.assertEqual(
XMLDraft.render_author_name(lxml.etree.Element(
"author",
@@ -671,6 +749,56 @@ def test_render_author_name(self):
"J. Q.",
)
+ @patch("ietf.utils.xmldraft.XMLDraft.__init__", return_value=None)
+ def test_get_title(self, mock_init):
+ xmldraft = XMLDraft("fake")
+ self.assertTrue(mock_init.called)
+ # Stub XML that does not have a front/title element
+ xmldraft.xmlroot = lxml.etree.XML(
+ " " # no title
+ )
+ self.assertEqual(xmldraft.get_title(), "")
+
+ # Stub XML that has a front/title element
+ xmldraft.xmlroot = lxml.etree.XML(
+ "This Is the Title "
+ )
+ self.assertEqual(xmldraft.get_title(), "This Is the Title")
+
+
+ def test_capture_xml2rfc_output(self):
+ """capture_xml2rfc_output reroutes and captures xml2rfc logs"""
+ orig_write_out = xml2rfc_log.write_out
+ orig_write_err = xml2rfc_log.write_err
+ with capture_xml2rfc_output() as outer_log_streams: # ensure no output
+ # such meta! very Inception!
+ with capture_xml2rfc_output() as inner_log_streams:
+ # arbitrary xml2rfc method that triggers a log, nothing special otherwise
+ xml2rfc_extract_date({"year": "fish"}, datetime.date(2025,3,1))
+ self.assertNotEqual(inner_log_streams, outer_log_streams)
+ self.assertEqual(xml2rfc_log.write_out, outer_log_streams["stdout"], "out stream should be restored")
+ self.assertEqual(xml2rfc_log.write_err, outer_log_streams["stderr"], "err stream should be restored")
+ self.assertEqual(xml2rfc_log.write_out, orig_write_out, "original out stream should be restored")
+ self.assertEqual(xml2rfc_log.write_err, orig_write_err, "original err stream should be restored")
+
+ # don't happen to get any output on stdout and not paranoid enough to force some, just test stderr
+ self.assertGreater(len(inner_log_streams["stderr"].getvalue()), 0, "want output on inner streams")
+ self.assertEqual(len(outer_log_streams["stdout"].getvalue()), 0, "no output on outer streams")
+ self.assertEqual(len(outer_log_streams["stderr"].getvalue()), 0, "no output on outer streams")
+
+ def test_capture_xml2rfc_output_exception_handling(self):
+ """capture_xml2rfc_output restores streams after an exception"""
+ orig_write_out = xml2rfc_log.write_out
+ orig_write_err = xml2rfc_log.write_err
+ with capture_xml2rfc_output() as outer_log_streams: # ensure no output
+ with self.assertRaises(RuntimeError), capture_xml2rfc_output() as inner_log_streams:
+ raise RuntimeError("nooo")
+ self.assertNotEqual(inner_log_streams, outer_log_streams)
+ self.assertEqual(xml2rfc_log.write_out, outer_log_streams["stdout"], "out stream should be restored")
+ self.assertEqual(xml2rfc_log.write_err, outer_log_streams["stderr"], "err stream should be restored")
+ self.assertEqual(xml2rfc_log.write_out, orig_write_out, "original out stream should be restored")
+ self.assertEqual(xml2rfc_log.write_err, orig_write_err, "original err stream should be restored")
+
class NameTests(TestCase):
@@ -735,24 +863,6 @@ def test_assertion(self):
assertion('False')
settings.SERVER_MODE = 'test'
-class TestRFC2047Strings(TestCase):
- def test_parse_unicode(self):
- names = (
- ('=?utf-8?b?4Yuz4YuK4Ym1IOGJoOGJgOGIiA==?=', 'ዳዊት በቀለ'),
- ('=?utf-8?b?5Li9IOmDnA==?=', '丽 郜'),
- ('=?utf-8?b?4KSV4KSu4KWN4KSs4KWL4KScIOCkoeCkvuCksA==?=', 'कम्बोज डार'),
- ('=?utf-8?b?zpfPgc6szrrOu861zrnOsSDOm865z4zOvc+Ezrc=?=', 'Ηράκλεια Λιόντη'),
- ('=?utf-8?b?15nXqdeo15DXnCDXqNeV15bXoNek15zXkw==?=', 'ישראל רוזנפלד'),
- ('=?utf-8?b?5Li95Y2OIOeahw==?=', '丽华 皇'),
- ('=?utf-8?b?77ul77qu766V77qzIO+tlu+7ru+vvu+6ju+7pw==?=', 'ﻥﺮﮕﺳ ﭖﻮﯾﺎﻧ'),
- ('=?utf-8?b?77uh77uu77qz77uu76++IO+6su+7tO+7p++6jSDvurDvu6Pvuo7vu6jvr74=?=', 'ﻡﻮﺳﻮﯾ ﺲﻴﻧﺍ ﺰﻣﺎﻨﯾ'),
- ('=?utf-8?b?ScOxaWdvIFNhbsOnIEliw6HDsWV6IGRlIGxhIFBlw7Fh?=', 'Iñigo Sanç Ibáñez de la Peña'),
- ('Mart van Oostendorp', 'Mart van Oostendorp'),
- ('', ''),
- )
- for encoded_str, unicode in names:
- self.assertEqual(unicode, parse_unicode(encoded_str))
-
class TestAndroidSiteManifest(TestCase):
def test_manifest(self):
r = self.client.get(urlreverse('site.webmanifest'))
diff --git a/ietf/utils/tests_coverage.py b/ietf/utils/tests_coverage.py
new file mode 100644
index 0000000000..68795994a7
--- /dev/null
+++ b/ietf/utils/tests_coverage.py
@@ -0,0 +1,56 @@
+# Copyright The IETF Trust 2025, All Rights Reserved
+"""Tests of the coverage.py module"""
+
+from unittest import mock
+
+from django.test import override_settings
+
+from .coverage import CoverageManager
+from .test_utils import TestCase
+
+
+class CoverageManagerTests(TestCase):
+ @override_settings(
+ BASE_DIR="/path/to/project/ietf",
+ TEST_CODE_COVERAGE_EXCLUDE_FILES=["a.py"],
+ TEST_CODE_COVERAGE_EXCLUDE_LINES=["some-regex"],
+ )
+ @mock.patch("ietf.utils.coverage.Coverage")
+ def test_coverage_manager(self, mock_coverage):
+ """CoverageManager managed coverage correctly in non-production mode
+
+ Presumes we're not running tests in production mode.
+ """
+ cm = CoverageManager()
+ self.assertFalse(cm.started)
+
+ cm.start()
+ self.assertTrue(cm.started)
+ self.assertEqual(cm.checker, mock_coverage.return_value)
+ self.assertTrue(mock_coverage.called)
+ coverage_kwargs = mock_coverage.call_args.kwargs
+ self.assertEqual(coverage_kwargs["source"], ["/path/to/project/ietf"])
+ self.assertEqual(coverage_kwargs["omit"], ["a.py"])
+ self.assertTrue(isinstance(cm.checker.exclude, mock.Mock))
+ assert isinstance(cm.checker.exclude, mock.Mock) # for type checker
+ self.assertEqual(cm.checker.exclude.call_count, 1)
+ cm.checker.exclude.assert_called_with("some-regex")
+
+ @mock.patch("ietf.utils.coverage.Coverage")
+ def test_coverage_manager_is_defanged_in_production(self, mock_coverage):
+ """CoverageManager is a no-op in production mode"""
+ # Be careful faking settings.SERVER_MODE, but there's really no other way to
+ # test this.
+ with override_settings(SERVER_MODE="production"):
+ cm = CoverageManager()
+ cm.start()
+
+ # Check that nothing actually happened
+ self.assertFalse(mock_coverage.called)
+ self.assertIsNone(cm.checker)
+ self.assertFalse(cm.started)
+
+ # Check that other methods are guarded appropriately
+ cm.stop()
+ cm.save()
+ self.assertIsNone(cm.report())
diff --git a/ietf/utils/tests_meetecho.py b/ietf/utils/tests_meetecho.py
index a10ac68c27..c076a3df74 100644
--- a/ietf/utils/tests_meetecho.py
+++ b/ietf/utils/tests_meetecho.py
@@ -98,7 +98,7 @@ def test_schedule_meeting(self):
api_response = api.schedule_meeting(
wg_token='my-token',
room_id=18,
- start_time=datetime.datetime(2021, 9, 14, 10, 0, 0, tzinfo=datetime.timezone.utc),
+ start_time=datetime.datetime(2021, 9, 14, 10, 0, 0, tzinfo=datetime.UTC),
duration=datetime.timedelta(minutes=130),
description='interim-2021-wgname-01',
extrainfo='message for staff',
@@ -127,7 +127,7 @@ def test_schedule_meeting(self):
)
# same time in different time zones
for start_time in [
- datetime.datetime(2021, 9, 14, 10, 0, 0, tzinfo=datetime.timezone.utc),
+ datetime.datetime(2021, 9, 14, 10, 0, 0, tzinfo=datetime.UTC),
datetime.datetime(2021, 9, 14, 7, 0, 0, tzinfo=ZoneInfo('America/Halifax')),
datetime.datetime(2021, 9, 14, 13, 0, 0, tzinfo=ZoneInfo('Europe/Kiev')),
datetime.datetime(2021, 9, 14, 5, 0, 0, tzinfo=ZoneInfo('Pacific/Easter')),
@@ -198,7 +198,7 @@ def test_fetch_meetings(self):
'3d55bce0-535e-4ba8-bb8e-734911cf3c32': {
'room': {
'id': 18,
- 'start_time': datetime.datetime(2021, 9, 14, 10, 0, 0, tzinfo=datetime.timezone.utc),
+ 'start_time': datetime.datetime(2021, 9, 14, 10, 0, 0, tzinfo=datetime.UTC),
'duration': datetime.timedelta(minutes=130),
'description': 'interim-2021-wgname-01',
},
@@ -208,7 +208,7 @@ def test_fetch_meetings(self):
'e68e96d4-d38f-475b-9073-ecab46ca96a5': {
'room': {
'id': 23,
- 'start_time': datetime.datetime(2021, 9, 15, 14, 30, 0, tzinfo=datetime.timezone.utc),
+ 'start_time': datetime.datetime(2021, 9, 15, 14, 30, 0, tzinfo=datetime.UTC),
'duration': datetime.timedelta(minutes=30),
'description': 'interim-2021-wgname-02',
},
@@ -386,7 +386,7 @@ def test_request_helper_exception(self):
def test_time_serialization(self):
"""Time de/serialization should be consistent"""
- time = timezone.now().astimezone(datetime.timezone.utc).replace(microsecond=0) # cut off to 0 microseconds
+ time = timezone.now().astimezone(datetime.UTC).replace(microsecond=0) # cut off to 0 microseconds
api = MeetechoAPI(API_BASE, CLIENT_ID, CLIENT_SECRET)
self.assertEqual(api._deserialize_time(api._serialize_time(time)), time)
@@ -400,7 +400,7 @@ def test_conference_from_api_dict(self):
'session-1-uuid': {
'room': {
'id': 1,
- 'start_time': datetime.datetime(2022,2,4,1,2,3, tzinfo=datetime.timezone.utc),
+ 'start_time': datetime.datetime(2022,2,4,1,2,3, tzinfo=datetime.UTC),
'duration': datetime.timedelta(minutes=45),
'description': 'some-description',
},
@@ -410,7 +410,7 @@ def test_conference_from_api_dict(self):
'session-2-uuid': {
'room': {
'id': 2,
- 'start_time': datetime.datetime(2022,2,5,4,5,6, tzinfo=datetime.timezone.utc),
+ 'start_time': datetime.datetime(2022,2,5,4,5,6, tzinfo=datetime.UTC),
'duration': datetime.timedelta(minutes=90),
'description': 'another-description',
},
@@ -427,7 +427,7 @@ def test_conference_from_api_dict(self):
id=1,
public_id='session-1-uuid',
description='some-description',
- start_time=datetime.datetime(2022, 2, 4, 1, 2, 3, tzinfo=datetime.timezone.utc),
+ start_time=datetime.datetime(2022, 2, 4, 1, 2, 3, tzinfo=datetime.UTC),
duration=datetime.timedelta(minutes=45),
url='https://example.com/some/url',
deletion_token='delete-me',
@@ -437,7 +437,7 @@ def test_conference_from_api_dict(self):
id=2,
public_id='session-2-uuid',
description='another-description',
- start_time=datetime.datetime(2022, 2, 5, 4, 5, 6, tzinfo=datetime.timezone.utc),
+ start_time=datetime.datetime(2022, 2, 5, 4, 5, 6, tzinfo=datetime.UTC),
duration=datetime.timedelta(minutes=90),
url='https://example.com/another/url',
deletion_token='delete-me-too',
@@ -453,7 +453,7 @@ def test_fetch(self, mock_fetch, _):
'session-1-uuid': {
'room': {
'id': 1,
- 'start_time': datetime.datetime(2022,2,4,1,2,3, tzinfo=datetime.timezone.utc),
+ 'start_time': datetime.datetime(2022,2,4,1,2,3, tzinfo=datetime.UTC),
'duration': datetime.timedelta(minutes=45),
'description': 'some-description',
},
@@ -472,7 +472,7 @@ def test_fetch(self, mock_fetch, _):
id=1,
public_id='session-1-uuid',
description='some-description',
- start_time=datetime.datetime(2022,2,4,1,2,3, tzinfo=datetime.timezone.utc),
+ start_time=datetime.datetime(2022,2,4,1,2,3, tzinfo=datetime.UTC),
duration=datetime.timedelta(minutes=45),
url='https://example.com/some/url',
deletion_token='delete-me',
@@ -488,7 +488,7 @@ def test_create(self, mock_schedule, _):
'session-1-uuid': {
'room': {
'id': 1, # value should match session_id param to cm.create() below
- 'start_time': datetime.datetime(2022,2,4,1,2,3, tzinfo=datetime.timezone.utc),
+ 'start_time': datetime.datetime(2022,2,4,1,2,3, tzinfo=datetime.UTC),
'duration': datetime.timedelta(minutes=45),
'description': 'some-description',
},
@@ -506,7 +506,7 @@ def test_create(self, mock_schedule, _):
id=1,
public_id='session-1-uuid',
description='some-description',
- start_time=datetime.datetime(2022,2,4,1,2,3, tzinfo=datetime.timezone.utc),
+ start_time=datetime.datetime(2022,2,4,1,2,3, tzinfo=datetime.UTC),
duration=datetime.timedelta(minutes=45),
url='https://example.com/some/url',
deletion_token='delete-me',
@@ -547,7 +547,8 @@ def test_add(self, mock_add, mock_wg_token):
sm = SlidesManager(settings.MEETECHO_API_CONFIG)
session = SessionFactory()
slides_doc = DocumentFactory(type_id="slides")
- sm.add(session, slides_doc, 13)
+ retval = sm.add(session, slides_doc, 13)
+ self.assertIs(retval, True)
self.assertTrue(mock_wg_token.called)
self.assertTrue(mock_add.called)
self.assertEqual(
@@ -565,6 +566,14 @@ def test_add(self, mock_add, mock_wg_token):
),
)
+ # Test return value when no update is sent. Really ought to do a more
+ # careful test of the _should_send_update() method.
+ sm = SlidesManager(
+ settings.MEETECHO_API_CONFIG | {"slides_notify_time": None}
+ )
+ retval = sm.add(session, slides_doc, 14)
+ self.assertIs(retval, False)
+
@patch("ietf.utils.meetecho.MeetechoAPI.update_slide_decks")
@patch("ietf.utils.meetecho.MeetechoAPI.delete_slide_deck")
def test_delete(self, mock_delete, mock_update, mock_wg_token):
@@ -580,7 +589,8 @@ def test_delete(self, mock_delete, mock_update, mock_wg_token):
sm.delete(session, slides_doc) # can't remove slides still attached to the session
self.assertFalse(any([mock_wg_token.called, mock_delete.called, mock_update.called]))
- sm.delete(session, removed_slides_doc)
+ retval = sm.delete(session, removed_slides_doc)
+ self.assertIs(retval, True)
self.assertTrue(mock_wg_token.called)
self.assertTrue(mock_delete.called)
self.assertEqual(
@@ -609,9 +619,18 @@ def test_delete(self, mock_delete, mock_update, mock_wg_token):
# Delete the other session and check that we don't make the update call
slides.delete()
- sm.delete(session, slides_doc)
+ retval = sm.delete(session, slides_doc)
+ self.assertIs(retval, True)
self.assertTrue(mock_delete.called)
self.assertFalse(mock_update.called)
+
+ # Test return value when no update is sent. Really ought to do a more
+ # careful test of the _should_send_update() method.
+ sm = SlidesManager(
+ settings.MEETECHO_API_CONFIG | {"slides_notify_time": None}
+ )
+ retval = sm.delete(session, slides_doc)
+ self.assertIs(retval, False)
@patch("ietf.utils.meetecho.MeetechoAPI.delete_slide_deck")
@patch("ietf.utils.meetecho.MeetechoAPI.add_slide_deck")
@@ -619,7 +638,8 @@ def test_revise(self, mock_add, mock_delete, mock_wg_token):
sm = SlidesManager(settings.MEETECHO_API_CONFIG)
slides = SessionPresentationFactory(document__type_id="slides", order=23)
slides_doc = slides.document
- sm.revise(slides.session, slides.document)
+ retval = sm.revise(slides.session, slides_doc)
+ self.assertIs(retval, True)
self.assertTrue(mock_wg_token.called)
self.assertTrue(mock_delete.called)
self.assertEqual(
@@ -642,13 +662,22 @@ def test_revise(self, mock_add, mock_delete, mock_wg_token):
),
)
+ # Test return value when no update is sent. Really ought to do a more
+ # careful test of the _should_send_update() method.
+ sm = SlidesManager(
+ settings.MEETECHO_API_CONFIG | {"slides_notify_time": None}
+ )
+ retval = sm.revise(slides.session, slides_doc)
+ self.assertIs(retval, False)
+
@patch("ietf.utils.meetecho.MeetechoAPI.update_slide_decks")
def test_send_update(self, mock_send_update, mock_wg_token):
sm = SlidesManager(settings.MEETECHO_API_CONFIG)
slides = SessionPresentationFactory(document__type_id="slides")
SessionPresentationFactory(session=slides.session, document__type_id="agenda")
- sm.send_update(slides.session)
+ retval = sm.send_update(slides.session)
+ self.assertIs(retval, True)
self.assertTrue(mock_wg_token.called)
self.assertTrue(mock_send_update.called)
self.assertEqual(
@@ -667,3 +696,11 @@ def test_send_update(self, mock_send_update, mock_wg_token):
]
)
)
+
+ # Test return value when no update is sent. Really ought to do a more
+ # careful test of the _should_send_update() method.
+ sm = SlidesManager(
+ settings.MEETECHO_API_CONFIG | {"slides_notify_time": None}
+ )
+ retval = sm.send_update(slides.session)
+ self.assertIs(retval, False)
diff --git a/ietf/utils/tests_searchindex.py b/ietf/utils/tests_searchindex.py
new file mode 100644
index 0000000000..e9fbf52020
--- /dev/null
+++ b/ietf/utils/tests_searchindex.py
@@ -0,0 +1,213 @@
+# Copyright The IETF Trust 2026, All Rights Reserved
+from unittest import mock
+
+import typesense.exceptions
+from django.conf import settings
+from django.test.utils import override_settings
+
+from . import searchindex
+from .test_utils import TestCase
+from ..blobdb.models import Blob
+from ..doc.factories import (
+ WgDraftFactory,
+ WgRfcFactory,
+ PublishedRfcDocEventFactory,
+ BcpFactory,
+ StdFactory,
+)
+from ..doc.models import Document
+from ..doc.storage_utils import store_str
+from ..person.factories import PersonFactory
+
+
+class SearchindexTests(TestCase):
+ def test_enabled(self):
+ with override_settings():
+ try:
+ del settings.SEARCHINDEX_CONFIG
+ except AttributeError:
+ pass
+ self.assertFalse(searchindex.enabled())
+ with override_settings(
+ SEARCHINDEX_CONFIG={"TYPESENSE_API_KEY": "this-is-not-a-key"}
+ ):
+ self.assertFalse(searchindex.enabled())
+ with override_settings(
+ SEARCHINDEX_CONFIG={"TYPESENSE_API_URL": "http://example.com"}
+ ):
+ self.assertTrue(searchindex.enabled())
+
+ def test_sanitize_text(self):
+ dirty_text = """
+
+ This is text. It + is <---- full of \tprobl.....ems! Fix it.
+ """
+ sanitized = "This is text It is full of problems Fix it."
+ self.assertEqual(searchindex._sanitize_text(dirty_text), sanitized)
+
+ @override_settings(
+ SEARCHINDEX_CONFIG={
+ "TYPESENSE_API_URL": "http://ts.example.com",
+ "TYPESENSE_API_KEY": "test-api-key",
+ "TYPESENSE_COLLECTION_NAME": "frogs",
+ }
+ )
+ def test_typesense_doc_from_rfc(self):
+ not_rfc = WgDraftFactory()
+ assert isinstance(not_rfc, Document)
+ with self.assertRaises(AssertionError):
+ searchindex.typesense_doc_from_rfc(not_rfc)
+
+ invalid_rfc = WgRfcFactory(name="rfc1000000", rfc_number=None)
+ assert isinstance(invalid_rfc, Document)
+ with self.assertRaises(AssertionError):
+ searchindex.typesense_doc_from_rfc(invalid_rfc)
+
+ rfc = PublishedRfcDocEventFactory().doc
+ assert isinstance(rfc, Document)
+ result = searchindex.typesense_doc_from_rfc(rfc)
+ # Check a few values, not exhaustive
+ self.assertEqual(result["id"], f"doc-{rfc.pk}")
+ self.assertEqual(result["rfcNumber"], rfc.rfc_number)
+ self.assertEqual(result["abstract"], searchindex._sanitize_text(rfc.abstract))
+ self.assertEqual(result["pages"], rfc.pages)
+ self.assertNotIn("adName", result)
+ self.assertNotIn("content", result) # no blob
+ self.assertNotIn("subseries", result)
+
+ # repeat, this time with contents, an AD, and subseries docs
+ store_str(
+ kind="rfc",
+ name=f"txt/{rfc.name}.txt",
+ content="The contents of this RFC",
+ doc_name=rfc.name,
+ doc_rev=rfc.rev, # expected to be None
+ )
+ rfc.ad = PersonFactory(name="Alfred D. Rector")
+ # Put it in two Subseries docs to be sure this does not break things
+ # (the typesense schema does not support this for real at the moment)
+ BcpFactory(contains=[rfc], name="bcp1234")
+ StdFactory(contains=[rfc], name="std1234")
+ result = searchindex.typesense_doc_from_rfc(rfc)
+ # Check a few values, not exhaustive
+ self.assertEqual(
+ result["content"],
+ searchindex._sanitize_text("The contents of this RFC"),
+ )
+ self.assertEqual(result["adName"], "Alfred D. Rector")
+ self.assertIn("subseries", result)
+ ss_dict = result["subseries"]
+ # We should get one of the two subseries docs, but neither is more correct
+ # than the other...
+ self.assertTrue(
+ any(
+ ss_dict == {"acronym": ss_type, "number": 1234, "total": 1}
+ for ss_type in ["bcp", "std"]
+ )
+ )
+
+ # Finally, delete the contents blob and make sure things don't blow up
+ Blob.objects.get(bucket="rfc", name=f"txt/{rfc.name}.txt").delete()
+ result = searchindex.typesense_doc_from_rfc(rfc)
+ self.assertNotIn("content", result)
+
+ @override_settings(
+ SEARCHINDEX_CONFIG={
+ "TYPESENSE_API_URL": "http://ts.example.com",
+ "TYPESENSE_API_KEY": "test-api-key",
+ "TYPESENSE_COLLECTION_NAME": "frogs",
+ }
+ )
+ @mock.patch("ietf.utils.searchindex.typesense_doc_from_rfc")
+ @mock.patch("ietf.utils.searchindex.typesense.Client")
+ def test_update_or_create_rfc_entry(
+ self, mock_ts_client_constructor, mock_tdoc_from_rfc
+ ):
+ fake_tdoc = object()
+ mock_tdoc_from_rfc.return_value = fake_tdoc
+ rfc = WgRfcFactory()
+ assert isinstance(rfc, Document)
+ searchindex.update_or_create_rfc_entry(rfc)
+ self.assertTrue(mock_ts_client_constructor.called)
+ # walk the tree down to the method we expected to be called...
+ mock_upsert = mock_ts_client_constructor.return_value.collections[
+ "frogs" # matches value in override_settings above
+ ].documents.upsert
+ self.assertTrue(mock_upsert.called)
+ self.assertEqual(mock_upsert.call_args, mock.call(fake_tdoc))
+
+ @override_settings(
+ SEARCHINDEX_CONFIG={
+ "TYPESENSE_API_URL": "http://ts.example.com",
+ "TYPESENSE_API_KEY": "test-api-key",
+ "TYPESENSE_COLLECTION_NAME": "frogs",
+ }
+ )
+ @mock.patch("ietf.utils.searchindex.typesense_doc_from_rfc")
+ @mock.patch("ietf.utils.searchindex.typesense.Client")
+ def test_update_or_create_rfc_entries(
+ self, mock_ts_client_constructor, mock_tdoc_from_rfc
+ ):
+ fake_tdoc = object()
+ mock_tdoc_from_rfc.return_value = fake_tdoc
+ rfc = WgRfcFactory()
+ assert isinstance(rfc, Document)
+ searchindex.update_or_create_rfc_entries([rfc] * 50) # list of docs...
+ self.assertEqual(mock_ts_client_constructor.call_count, 1)
+ # walk the tree down to the method we expected to be called...
+ mock_import_ = mock_ts_client_constructor.return_value.collections[
+ "frogs" # matches value in override_settings above
+ ].documents.import_
+ self.assertEqual(mock_import_.call_count, 1)
+ self.assertEqual(
+ mock_import_.call_args, mock.call([fake_tdoc] * 50, {"action": "upsert"})
+ )
+
+ mock_import_.reset_mock()
+ searchindex.update_or_create_rfc_entries([rfc] * 50, batchsize=20)
+ self.assertEqual(mock_ts_client_constructor.call_count, 2) # one more
+ # walk the tree down to the method we expected to be called...
+ mock_import_ = mock_ts_client_constructor.return_value.collections[
+ "frogs" # matches value in override_settings above
+ ].documents.import_
+ self.assertEqual(mock_import_.call_count, 3)
+ self.assertEqual(
+ mock_import_.call_args_list,
+ [
+ mock.call([fake_tdoc] * 20, {"action": "upsert"}),
+ mock.call([fake_tdoc] * 20, {"action": "upsert"}),
+ mock.call([fake_tdoc] * 10, {"action": "upsert"}),
+ ],
+ )
+
+ @override_settings(
+ SEARCHINDEX_CONFIG={
+ "TYPESENSE_API_URL": "http://ts.example.com",
+ "TYPESENSE_API_KEY": "test-api-key",
+ "TYPESENSE_COLLECTION_NAME": "frogs",
+ }
+ )
+ @mock.patch("ietf.utils.searchindex.typesense.Client")
+ def test_create_collection(self, mock_ts_client_constructor):
+ searchindex.create_collection()
+ self.assertEqual(mock_ts_client_constructor.call_count, 1)
+ mock_collections = mock_ts_client_constructor.return_value.collections
+ self.assertTrue(mock_collections.create.called)
+ self.assertEqual(mock_collections.create.call_args[0][0]["name"], "frogs")
+
+ @override_settings(
+ SEARCHINDEX_CONFIG={
+ "TYPESENSE_API_URL": "http://ts.example.com",
+ "TYPESENSE_API_KEY": "test-api-key",
+ "TYPESENSE_COLLECTION_NAME": "frogs",
+ }
+ )
+ @mock.patch("ietf.utils.searchindex.typesense.Client")
+ def test_delete_collection(self, mock_ts_client_constructor):
+ searchindex.delete_collection()
+ self.assertEqual(mock_ts_client_constructor.call_count, 1)
+ mock_collections = mock_ts_client_constructor.return_value.collections
+ self.assertTrue(mock_collections["frogs"].delete.called)
+
+ mock_collections["frogs"].side_effect = typesense.exceptions.ObjectNotFound
+ searchindex.delete_collection() # should ignore the exception
diff --git a/ietf/utils/tests_text.py b/ietf/utils/tests_text.py
new file mode 100644
index 0000000000..51aa2eff13
--- /dev/null
+++ b/ietf/utils/tests_text.py
@@ -0,0 +1,71 @@
+# Copyright The IETF Trust 2021-2026, All Rights Reserved
+from ietf.utils.test_utils import TestCase
+from ietf.utils.text import parse_unicode, decode_document_content
+
+
+class TestDecoders(TestCase):
+ def test_parse_unicode(self):
+ names = (
+ ("=?utf-8?b?4Yuz4YuK4Ym1IOGJoOGJgOGIiA==?=", "ዳዊት በቀለ"),
+ ("=?utf-8?b?5Li9IOmDnA==?=", "丽 郜"),
+ ("=?utf-8?b?4KSV4KSu4KWN4KSs4KWL4KScIOCkoeCkvuCksA==?=", "कम्बोज डार"),
+ ("=?utf-8?b?zpfPgc6szrrOu861zrnOsSDOm865z4zOvc+Ezrc=?=", "Ηράκλεια Λιόντη"),
+ ("=?utf-8?b?15nXqdeo15DXnCDXqNeV15bXoNek15zXkw==?=", "ישראל רוזנפלד"),
+ ("=?utf-8?b?5Li95Y2OIOeahw==?=", "丽华 皇"),
+ ("=?utf-8?b?77ul77qu766V77qzIO+tlu+7ru+vvu+6ju+7pw==?=", "ﻥﺮﮕﺳ ﭖﻮﯾﺎﻧ"),
+ (
+ "=?utf-8?b?77uh77uu77qz77uu76++IO+6su+7tO+7p++6jSDvurDvu6Pvuo7vu6jvr74=?=",
+ "ﻡﻮﺳﻮﯾ ﺲﻴﻧﺍ ﺰﻣﺎﻨﯾ",
+ ),
+ (
+ "=?utf-8?b?ScOxaWdvIFNhbsOnIEliw6HDsWV6IGRlIGxhIFBlw7Fh?=",
+ "Iñigo Sanç Ibáñez de la Peña",
+ ),
+ ("Mart van Oostendorp", "Mart van Oostendorp"),
+ ("", ""),
+ )
+ for encoded_str, unicode in names:
+ self.assertEqual(unicode, parse_unicode(encoded_str))
+
+ def test_decode_document_content(self):
+ utf8_bytes = "𒀭𒊩𒌆𒄈𒋢".encode("utf-8") # ends with 4-byte character
+ latin1_bytes = "àéîøü".encode("latin-1")
+ other_bytes = "àéîøü".encode("macintosh") # different from its latin-1 encoding
+ assert other_bytes.decode("macintosh") != other_bytes.decode("latin-1"),\
+ "test broken: other_bytes must decode differently as latin-1"
+
+ # simplest case
+ self.assertEqual(
+ decode_document_content(utf8_bytes),
+ utf8_bytes.decode(),
+ )
+ # losing 1-4 bytes from the end leave the last character incomplete; the
+ # decoder should decode all but that last character
+ self.assertEqual(
+ decode_document_content(utf8_bytes[:-1]),
+ utf8_bytes.decode()[:-1],
+ )
+ self.assertEqual(
+ decode_document_content(utf8_bytes[:-2]),
+ utf8_bytes.decode()[:-1],
+ )
+ self.assertEqual(
+ decode_document_content(utf8_bytes[:-3]),
+ utf8_bytes.decode()[:-1],
+ )
+ self.assertEqual(
+ decode_document_content(utf8_bytes[:-4]),
+ utf8_bytes.decode()[:-1],
+ )
+
+ # latin-1 is also simple
+ self.assertEqual(
+ decode_document_content(latin1_bytes),
+ latin1_bytes.decode("latin-1"),
+ )
+
+ # other character sets are just treated as latin1 (bug? feature? you decide)
+ self.assertEqual(
+ decode_document_content(other_bytes),
+ other_bytes.decode("latin-1"),
+ )
diff --git a/ietf/utils/text.py b/ietf/utils/text.py
index 4e5d5b6cd5..2763056e1a 100644
--- a/ietf/utils/text.py
+++ b/ietf/utils/text.py
@@ -60,6 +60,12 @@ def check_url_validity(attrs, new=False):
def linkify(text):
+ """Convert URL-ish substrings into HTML links
+
+ This does no sanitization whatsoever. Caller must sanitize the input or output as
+ contextually appropriate. Do not call `mark_safe()` on the output if the input is
+ user-provided unless it has been sanitized or escaped.
+ """
return _bleach_linker.linkify(text)
@@ -257,3 +263,21 @@ def parse_unicode(text):
else:
text = decoded_string
return text
+
+
+def decode_document_content(content: bytes) -> str:
+ """Decode document contents as utf-8 or latin1
+
+ Method was developed in DocumentInfo.text() where it gave acceptable results
+ for existing documents / RFCs.
+ """
+ try:
+ return content.decode("utf-8")
+ except UnicodeDecodeError:
+ pass
+ for back in range(1, 4):
+ try:
+ return content[:-back].decode("utf-8")
+ except UnicodeDecodeError:
+ pass
+ return content.decode("latin-1") # everything is legal in latin-1
diff --git a/ietf/utils/timezone.py b/ietf/utils/timezone.py
index a396b5e82d..e08dfa02f2 100644
--- a/ietf/utils/timezone.py
+++ b/ietf/utils/timezone.py
@@ -26,7 +26,7 @@ def _tzinfo(tz: Union[str, datetime.tzinfo, None]):
Accepts a tzinfo or string containing a timezone name. Defaults to UTC if tz is None.
"""
if tz is None:
- return datetime.timezone.utc
+ return datetime.UTC
elif isinstance(tz, datetime.tzinfo):
return tz
else:
diff --git a/ietf/utils/unicodenormalize.py b/ietf/utils/unicodenormalize.py
new file mode 100644
index 0000000000..8644dbdb79
--- /dev/null
+++ b/ietf/utils/unicodenormalize.py
@@ -0,0 +1,9 @@
+# Copyright The IETF Trust 2025, All Rights Reserved
+import unicodedata
+
+def normalize_for_sorting(text):
+ """Normalize text for proper accent-aware sorting."""
+ # Normalize the text to NFD (decomposed form)
+ decomposed = unicodedata.normalize('NFD', text)
+ # Filter out combining diacritical marks
+ return ''.join(char for char in decomposed if not unicodedata.combining(char))
diff --git a/ietf/utils/validators.py b/ietf/utils/validators.py
index 8fe989df99..a99de72724 100644
--- a/ietf/utils/validators.py
+++ b/ietf/utils/validators.py
@@ -4,6 +4,8 @@
import os
import re
+from email.utils import parseaddr
+
from pyquery import PyQuery
from urllib.parse import urlparse, urlsplit, urlunsplit
@@ -11,7 +13,13 @@
from django.apps import apps
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist, ValidationError
-from django.core.validators import RegexValidator, URLValidator, EmailValidator, BaseValidator
+from django.core.validators import (
+ RegexValidator,
+ URLValidator,
+ BaseValidator,
+ validate_email,
+ ProhibitNullCharactersValidator,
+)
from django.template.defaultfilters import filesizeformat
from django.utils.deconstruct import deconstructible
from django.utils.ipv6 import is_valid_ipv6_address
@@ -25,8 +33,9 @@
# Note that this is an instantiation of the regex validator, _not_ the
# regex-string validator defined right below
validate_no_control_chars = RegexValidator(
- regex="^[^\x00-\x1f]*$",
- message="Please enter a string without control characters." )
+ regex="^[^\x01-\x1f]*$",
+ message="Please enter a string without control characters.",
+)
@deconstructible
@@ -136,8 +145,17 @@ def validate_no_html_frame(file):
# instantiations of sub-validiators used by the external_resource validator
validate_url = URLValidator()
-validate_http_url = URLValidator(schemes=['http','https'])
-validate_email = EmailValidator()
+validate_http_url = URLValidator(schemes=["http", "https"])
+validate_no_nulls = ProhibitNullCharactersValidator()
+
+
+def validate_mailbox_address(s):
+ """Validate an RFC 5322 'mailbox' (e.g., "Some Person" )"""
+ # parseaddr() returns ("", "") on err; validate_email() will reject that for us
+ name, addr = parseaddr(s)
+ validate_no_nulls(name) # could be stricter...
+ validate_email(addr)
+
def validate_ipv6_address(value):
if not is_valid_ipv6_address(value):
diff --git a/ietf/utils/xmldraft.py b/ietf/utils/xmldraft.py
index c39c4d0a06..325b8499a9 100644
--- a/ietf/utils/xmldraft.py
+++ b/ietf/utils/xmldraft.py
@@ -1,4 +1,4 @@
-# Copyright The IETF Trust 2022, All Rights Reserved
+# Copyright The IETF Trust 2022-2025, All Rights Reserved
# -*- coding: utf-8 -*-
import datetime
import io
@@ -7,7 +7,7 @@
import debug # pyflakes: ignore
-from contextlib import ExitStack
+from contextlib import contextmanager
from lxml.etree import XMLSyntaxError
from xml2rfc.util.date import augment_date, extract_date
from ietf.utils.timezone import date_today
@@ -15,6 +15,21 @@
from .draft import Draft
+@contextmanager
+def capture_xml2rfc_output():
+ orig_write_out = xml2rfc.log.write_out
+ orig_write_err = xml2rfc.log.write_err
+ parser_out = io.StringIO()
+ parser_err = io.StringIO()
+ xml2rfc.log.write_out = parser_out
+ xml2rfc.log.write_err = parser_err
+ try:
+ yield {"stdout": parser_out, "stderr": parser_err}
+ finally:
+ xml2rfc.log.write_out = orig_write_out
+ xml2rfc.log.write_err = orig_write_err
+
+
class XMLDraft(Draft):
"""Draft from XML source
@@ -38,27 +53,18 @@ def parse_xml(filename):
Converts to xml2rfc v3 schema, then returns the root of the v3 tree and the original
xml version.
"""
- orig_write_out = xml2rfc.log.write_out
- orig_write_err = xml2rfc.log.write_err
- parser_out = io.StringIO()
- parser_err = io.StringIO()
-
- with ExitStack() as stack:
- @stack.callback
- def cleanup(): # called when context exited, even if there's an exception
- xml2rfc.log.write_out = orig_write_out
- xml2rfc.log.write_err = orig_write_err
-
- xml2rfc.log.write_out = parser_out
- xml2rfc.log.write_err = parser_err
+ with capture_xml2rfc_output() as parser_logs:
parser = xml2rfc.XmlRfcParser(filename, quiet=True)
try:
tree = parser.parse()
except XMLSyntaxError:
raise InvalidXMLError()
except Exception as e:
- raise XMLParseError(parser_out.getvalue(), parser_err.getvalue()) from e
+ raise XMLParseError(
+ parser_logs["stdout"].getvalue(),
+ parser_logs["stderr"].getvalue(),
+ ) from e
xml_version = tree.getroot().get('version', '2')
if xml_version == '2':
@@ -96,6 +102,17 @@ def _document_name(self, ref):
number = int(maybe_number)
return f"{label}{number}"
+ target = ref.get("target")
+ if isinstance(target, str):
+ target = target.lower()
+ if target.startswith("https://datatracker.ietf.org/doc/"):
+ # len("https://datatracker.ietf.org/doc/")==33
+ m = re.match(r"^(draft-[a-z0-9-]*[a-z0-9])([/-]\d{2})?/?$",target[33:])
+ if m:
+ name = m.group(1)
+ return name
+
+
# if we couldn't find a match so far, try the seriesInfo
series_query = " or ".join(f"@name='{x.upper()}'" for x in series)
for info in ref.xpath(
@@ -141,16 +158,38 @@ def parse_docname(xmlroot):
return revmatch.group('filename'), revmatch.group('rev')
def get_title(self):
- return self.xmlroot.findtext('front/title').strip()
+ title_text = self.xmlroot.findtext('front/title')
+ return "" if title_text is None else title_text.strip()
@staticmethod
def parse_creation_date(date_elt):
if date_elt is None:
return None
+
today = date_today()
- # ths mimics handling of date elements in the xml2rfc text/html writers
- year, month, day = extract_date(date_elt, today)
- year, month, day = augment_date(year, month, day, today)
+
+ # Outright reject non-numeric year / day (xml2rfc's extract_date does not do this)
+ # (n.b., "year" can be non-numeric in a section per RFC 7991)
+ year = date_elt.get("year")
+ day = date_elt.get("day")
+ non_numeric_year = year and not year.isdigit()
+ non_numeric_day = day and not day.isdigit()
+ if non_numeric_day or non_numeric_year:
+ raise InvalidMetadataError(
+ "Unable to parse the element in the section: "
+ "year and day must be numeric values if specified."
+ )
+
+ try:
+ # ths mimics handling of date elements in the xml2rfc text/html writers
+ year, month, day = extract_date(date_elt, today)
+ year, month, day = augment_date(year, month, day, today)
+ except Exception as err:
+ # Give a generic error if anything goes wrong so far...
+ raise InvalidMetadataError(
+ "Unable to parse the element in the section."
+ ) from err
+
if not day:
# Must choose a day for a datetime.date. Per RFC 7991 sect 2.17, we use
# today's date if it is consistent with the rest of the date. Otherwise,
@@ -159,7 +198,19 @@ def parse_creation_date(date_elt):
day = today.day
else:
day = 15
- return datetime.date(year, month, day)
+
+ try:
+ creation_date = datetime.date(year, month, day)
+ except Exception:
+ # If everything went well, we should have had a valid datetime, but we didn't.
+ # The parsing _worked_ but not in a way that we can go forward with.
+ raise InvalidMetadataError(
+ "The element in the section specified an incomplete date "
+ "that was not consistent with today's date. If you specify only a year, "
+ "it must be the four-digit current year. To use today's date, omit the "
+ "date tag or use ."
+ )
+ return creation_date
def get_creation_date(self):
return self.parse_creation_date(self.xmlroot.find("front/date"))
@@ -193,6 +244,12 @@ def render_author_name(author_elt):
# Use fullname attribute, if present
fullname = author_elt.attrib.get("fullname", "").strip()
if fullname:
+ # If any 8bit chars in the fullname, try to append the author's
+ # name in ascii.
+ if any([x >= 0x80 for x in fullname.encode('utf8')]):
+ asciifullname = author_elt.attrib.get("asciiFullname", "").strip()
+ if asciifullname:
+ fullname = fullname + ' (' + asciifullname + ')'
return fullname
surname = author_elt.attrib.get("surname", "").strip()
initials = author_elt.attrib.get("initials", "").strip()
@@ -269,3 +326,7 @@ def parser_msgs(self):
class InvalidXMLError(Exception):
"""File is not valid XML"""
pass
+
+
+class InvalidMetadataError(Exception):
+ """XML is well-formed but has invalid metadata"""
diff --git a/k8s/auth.yaml b/k8s/auth.yaml
index 392e306b54..2bdb064447 100644
--- a/k8s/auth.yaml
+++ b/k8s/auth.yaml
@@ -15,16 +15,6 @@ spec:
labels:
app: auth
spec:
- affinity:
- podAffinity:
- requiredDuringSchedulingIgnoredDuringExecution:
- - labelSelector:
- matchExpressions:
- - key: app
- operator: In
- values:
- - datatracker
- topologyKey: "kubernetes.io/hostname"
securityContext:
runAsNonRoot: true
containers:
diff --git a/k8s/beat.yaml b/k8s/beat.yaml
index 9a8fe2f0a4..b4291c7e31 100644
--- a/k8s/beat.yaml
+++ b/k8s/beat.yaml
@@ -17,16 +17,6 @@ spec:
labels:
app: beat
spec:
- affinity:
- podAffinity:
- requiredDuringSchedulingIgnoredDuringExecution:
- - labelSelector:
- matchExpressions:
- - key: app
- operator: In
- values:
- - datatracker
- topologyKey: "kubernetes.io/hostname"
securityContext:
runAsNonRoot: true
containers:
@@ -36,10 +26,6 @@ spec:
- name: beat
image: "ghcr.io/ietf-tools/datatracker:$APP_IMAGE_TAG"
imagePullPolicy: Always
- ports:
- - containerPort: 8000
- name: http
- protocol: TCP
volumeMounts:
- name: dt-vol
mountPath: /a
@@ -63,7 +49,7 @@ spec:
runAsUser: 1000
runAsGroup: 1000
volumes:
- # To be overriden with the actual shared volume
+ # To be overridden with the actual shared volume
- name: dt-vol
- name: dt-tmp
emptyDir:
@@ -73,4 +59,4 @@ spec:
name: files-cfgmap
dnsPolicy: ClusterFirst
restartPolicy: Always
- terminationGracePeriodSeconds: 600
+ terminationGracePeriodSeconds: 10
diff --git a/k8s/celery.yaml b/k8s/celery.yaml
index 15f3bf0c7c..2f4c0fd439 100644
--- a/k8s/celery.yaml
+++ b/k8s/celery.yaml
@@ -17,16 +17,6 @@ spec:
labels:
app: celery
spec:
- affinity:
- podAffinity:
- requiredDuringSchedulingIgnoredDuringExecution:
- - labelSelector:
- matchExpressions:
- - key: app
- operator: In
- values:
- - datatracker
- topologyKey: "kubernetes.io/hostname"
securityContext:
runAsNonRoot: true
containers:
@@ -36,10 +26,6 @@ spec:
- name: celery
image: "ghcr.io/ietf-tools/datatracker:$APP_IMAGE_TAG"
imagePullPolicy: Always
- ports:
- - containerPort: 8000
- name: http
- protocol: TCP
volumeMounts:
- name: dt-vol
mountPath: /a
@@ -92,7 +78,7 @@ spec:
runAsUser: 65534 # "nobody" user by default
runAsGroup: 65534 # "nogroup" group by default
volumes:
- # To be overriden with the actual shared volume
+ # To be overridden with the actual shared volume
- name: dt-vol
- name: dt-tmp
emptyDir:
diff --git a/k8s/datatracker.yaml b/k8s/datatracker.yaml
index 3d9e86a29d..50a2c69687 100644
--- a/k8s/datatracker.yaml
+++ b/k8s/datatracker.yaml
@@ -115,6 +115,7 @@ spec:
initContainers:
- name: migration
image: "ghcr.io/ietf-tools/datatracker:$APP_IMAGE_TAG"
+ imagePullPolicy: Always
env:
- name: "CONTAINER_ROLE"
value: "migrations"
diff --git a/k8s/kustomization.yaml b/k8s/kustomization.yaml
index 2b623da2bd..769cb03517 100644
--- a/k8s/kustomization.yaml
+++ b/k8s/kustomization.yaml
@@ -14,3 +14,4 @@ resources:
- datatracker.yaml
- memcached.yaml
- rabbitmq.yaml
+ - replicator.yaml
diff --git a/k8s/memcached.yaml b/k8s/memcached.yaml
index 8f73f3d0d5..5a4c9f0aed 100644
--- a/k8s/memcached.yaml
+++ b/k8s/memcached.yaml
@@ -13,16 +13,6 @@ spec:
labels:
app: memcached
spec:
- affinity:
- podAffinity:
- requiredDuringSchedulingIgnoredDuringExecution:
- - labelSelector:
- matchExpressions:
- - key: app
- operator: In
- values:
- - datatracker
- topologyKey: "kubernetes.io/hostname"
securityContext:
runAsNonRoot: true
containers:
diff --git a/k8s/rabbitmq.yaml b/k8s/rabbitmq.yaml
index 3cab7ff565..346b54c93e 100644
--- a/k8s/rabbitmq.yaml
+++ b/k8s/rabbitmq.yaml
@@ -13,23 +13,13 @@ spec:
labels:
app: rabbitmq
spec:
- affinity:
- podAffinity:
- requiredDuringSchedulingIgnoredDuringExecution:
- - labelSelector:
- matchExpressions:
- - key: app
- operator: In
- values:
- - datatracker
- topologyKey: "kubernetes.io/hostname"
securityContext:
runAsNonRoot: true
containers:
# -----------------------------------------------------
# RabbitMQ Container
# -----------------------------------------------------
- - image: "ghcr.io/ietf-tools/datatracker-mq:3.12-alpine"
+ - image: "ghcr.io/ietf-tools/datatracker-mq:3.13-alpine"
imagePullPolicy: Always
name: rabbitmq
ports:
@@ -52,17 +42,17 @@ spec:
key: CELERY_PASSWORD
livenessProbe:
exec:
- command: ["rabbitmq-diagnostics", "-q", "ping"]
+ command: ["rabbitmq-diagnostics", "-q", "ping", "-t", "30"]
periodSeconds: 30
- timeoutSeconds: 5
+ timeoutSeconds: 35 # slightly longer than ping "-t" option
startupProbe:
initialDelaySeconds: 15
periodSeconds: 5
- timeoutSeconds: 5
+ timeoutSeconds: 35 # slightly longer than ping "-t" option
successThreshold: 1
failureThreshold: 60
exec:
- command: ["rabbitmq-diagnostics", "-q", "ping"]
+ command: ["rabbitmq-diagnostics", "-q", "ping", "-t", "30"]
securityContext:
allowPrivilegeEscalation: false
capabilities:
diff --git a/k8s/replicator.yaml b/k8s/replicator.yaml
new file mode 100644
index 0000000000..a28d9e8a16
--- /dev/null
+++ b/k8s/replicator.yaml
@@ -0,0 +1,72 @@
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: replicator
+ labels:
+ deleteBeforeUpgrade: yes
+spec:
+ replicas: 1
+ revisionHistoryLimit: 2
+ selector:
+ matchLabels:
+ app: replicator
+ strategy:
+ type: Recreate
+ template:
+ metadata:
+ labels:
+ app: replicator
+ spec:
+ securityContext:
+ runAsNonRoot: true
+ containers:
+ # -----------------------------------------------------
+ # Celery Container
+ # -----------------------------------------------------
+ - name: celery
+ image: "ghcr.io/ietf-tools/datatracker:$APP_IMAGE_TAG"
+ imagePullPolicy: Always
+ volumeMounts:
+ - name: dt-vol
+ mountPath: /a
+ - name: dt-tmp
+ mountPath: /tmp
+ - name: dt-home
+ mountPath: /home/datatracker
+ - name: dt-xml2rfc-cache
+ mountPath: /var/cache/xml2rfc
+ - name: dt-cfg
+ mountPath: /workspace/ietf/settings_local.py
+ subPath: settings_local.py
+ env:
+ - name: "CONTAINER_ROLE"
+ value: "replicator"
+ envFrom:
+ - secretRef:
+ name: dt-secrets-env
+ securityContext:
+ allowPrivilegeEscalation: false
+ capabilities:
+ drop:
+ - ALL
+ readOnlyRootFilesystem: true
+ runAsUser: 1000
+ runAsGroup: 1000
+ volumes:
+ # To be overridden with the actual shared volume
+ - name: dt-vol
+ - name: dt-tmp
+ emptyDir:
+ sizeLimit: "2Gi"
+ - name: dt-xml2rfc-cache
+ emptyDir:
+ sizeLimit: "2Gi"
+ - name: dt-home
+ emptyDir:
+ sizeLimit: "2Gi"
+ - name: dt-cfg
+ configMap:
+ name: files-cfgmap
+ dnsPolicy: ClusterFirst
+ restartPolicy: Always
+ terminationGracePeriodSeconds: 600
diff --git a/k8s/settings_local.py b/k8s/settings_local.py
index f266ffcd62..251f11234f 100644
--- a/k8s/settings_local.py
+++ b/k8s/settings_local.py
@@ -1,4 +1,4 @@
-# Copyright The IETF Trust 2007-2024, All Rights Reserved
+# Copyright The IETF Trust 2007-2026, All Rights Reserved
# -*- coding: utf-8 -*-
from base64 import b64decode
@@ -6,30 +6,31 @@
import json
from ietf import __release_hash__
-from ietf.settings import * # pyflakes:ignore
+from ietf.settings import * # pyflakes:ignore
+from ietf.settings import (
+ STORAGES,
+ ARTIFACT_STORAGE_NAMES,
+ BLOBSTORAGE_CONNECT_TIMEOUT,
+ BLOBSTORAGE_READ_TIMEOUT,
+ BLOBSTORAGE_MAX_ATTEMPTS,
+)
+import botocore.config
def _multiline_to_list(s):
- """Helper to split at newlines and conver to list"""
+ """Helper to split at newlines and convert to list"""
return [item.strip() for item in s.split("\n")]
# Default to "development". Production _must_ set DATATRACKER_SERVER_MODE="production" in the env!
SERVER_MODE = os.environ.get("DATATRACKER_SERVER_MODE", "development")
-# Use X-Forwarded-Proto to determine request.is_secure(). This relies on CloudFlare overwriting the
-# value of the header if an incoming request sets it, which it does:
-# https://developers.cloudflare.com/fundamentals/reference/http-request-headers/#x-forwarded-proto
-# See also, especially the warnings:
-# https://docs.djangoproject.com/en/dev/ref/settings/#secure-proxy-ssl-header
-SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
-
# Secrets
_SECRET_KEY = os.environ.get("DATATRACKER_DJANGO_SECRET_KEY", None)
if _SECRET_KEY is not None:
SECRET_KEY = _SECRET_KEY
else:
- raise RuntimeError("DATATRACKER_DJANGO_SECRET_KEY must be set")
+ raise RuntimeError("DATATRACKER_DJANGO_SECRET_KEY must be set")
_NOMCOM_APP_SECRET_B64 = os.environ.get("DATATRACKER_NOMCOM_APP_SECRET_B64", None)
if _NOMCOM_APP_SECRET_B64 is not None:
@@ -41,7 +42,7 @@ def _multiline_to_list(s):
if _IANA_SYNC_PASSWORD is not None:
IANA_SYNC_PASSWORD = _IANA_SYNC_PASSWORD
else:
- raise RuntimeError("DATATRACKER_IANA_SYNC_PASSWORD must be set")
+ raise RuntimeError("DATATRACKER_IANA_SYNC_PASSWORD must be set")
_RFC_EDITOR_SYNC_PASSWORD = os.environ.get("DATATRACKER_RFC_EDITOR_SYNC_PASSWORD", None)
if _RFC_EDITOR_SYNC_PASSWORD is not None:
@@ -59,25 +60,41 @@ def _multiline_to_list(s):
if _GITHUB_BACKUP_API_KEY is not None:
GITHUB_BACKUP_API_KEY = _GITHUB_BACKUP_API_KEY
else:
- raise RuntimeError("DATATRACKER_GITHUB_BACKUP_API_KEY must be set")
+ raise RuntimeError("DATATRACKER_GITHUB_BACKUP_API_KEY must be set")
_API_KEY_TYPE = os.environ.get("DATATRACKER_API_KEY_TYPE", None)
if _API_KEY_TYPE is not None:
API_KEY_TYPE = _API_KEY_TYPE
else:
- raise RuntimeError("DATATRACKER_API_KEY_TYPE must be set")
+ raise RuntimeError("DATATRACKER_API_KEY_TYPE must be set")
_API_PUBLIC_KEY_PEM_B64 = os.environ.get("DATATRACKER_API_PUBLIC_KEY_PEM_B64", None)
if _API_PUBLIC_KEY_PEM_B64 is not None:
API_PUBLIC_KEY_PEM = b64decode(_API_PUBLIC_KEY_PEM_B64)
else:
- raise RuntimeError("DATATRACKER_API_PUBLIC_KEY_PEM_B64 must be set")
+ raise RuntimeError("DATATRACKER_API_PUBLIC_KEY_PEM_B64 must be set")
_API_PRIVATE_KEY_PEM_B64 = os.environ.get("DATATRACKER_API_PRIVATE_KEY_PEM_B64", None)
if _API_PRIVATE_KEY_PEM_B64 is not None:
API_PRIVATE_KEY_PEM = b64decode(_API_PRIVATE_KEY_PEM_B64)
else:
- raise RuntimeError("DATATRACKER_API_PRIVATE_KEY_PEM_B64 must be set")
+ raise RuntimeError("DATATRACKER_API_PRIVATE_KEY_PEM_B64 must be set")
+
+_RED_PRECOMPUTER_TRIGGER_RETRY_DELAY = os.environ.get(
+ "DATATRACKER_RED_PRECOMPUTER_TRIGGER_RETRY_DELAY", None
+)
+if _RED_PRECOMPUTER_TRIGGER_RETRY_DELAY is not None:
+ RED_PRECOMPUTER_TRIGGER_RETRY_DELAY = _RED_PRECOMPUTER_TRIGGER_RETRY_DELAY
+_RED_PRECOMPUTER_TRIGGER_MAX_RETRIES = os.environ.get(
+ "DATATRACKER_RED_PRECOMPUTER_TRIGGER_MAX_RETRIES", None
+)
+if _RED_PRECOMPUTER_TRIGGER_MAX_RETRIES is not None:
+ RED_PRECOMPUTER_TRIGGER_MAX_RETRIES = _RED_PRECOMPUTER_TRIGGER_MAX_RETRIES
+_TRIGGER_RED_PRECOMPUTE_MULTIPLE_URL = os.environ.get(
+ "DATATRACKER_TRIGGER_RED_PRECOMPUTE_MULTIPLE_URL", None
+)
+if _TRIGGER_RED_PRECOMPUTE_MULTIPLE_URL is not None:
+ TRIGGER_RED_PRECOMPUTE_MULTIPLE_URL = _TRIGGER_RED_PRECOMPUTE_MULTIPLE_URL
# Set DEBUG if DATATRACKER_DEBUG env var is the word "true"
DEBUG = os.environ.get("DATATRACKER_DEBUG", "false").lower() == "true"
@@ -97,26 +114,44 @@ def _multiline_to_list(s):
"PASSWORD": os.environ.get("DATATRACKER_DB_PASS", ""),
"OPTIONS": json.loads(os.environ.get("DATATRACKER_DB_OPTS_JSON", "{}")),
},
+ "blobdb": {
+ "HOST": os.environ.get("BLOBDB_DB_HOST", "blobdb"),
+ "PORT": os.environ.get("BLOBDB_DB_PORT", "5432"),
+ "NAME": os.environ.get("BLOBDB_DB_NAME", "blob"),
+ "ENGINE": "django.db.backends.postgresql",
+ "USER": os.environ.get("BLOBDB_DB_USER", "django"),
+ "PASSWORD": os.environ.get("BLOBDB_DB_PASS", ""),
+ "OPTIONS": json.loads(os.environ.get("BLOBDB_DB_OPTS_JSON", "{}")),
+ },
}
+DATABASE_ROUTERS = ["ietf.blobdb.routers.BlobdbStorageRouter"]
+BLOBDB_DATABASE = "blobdb"
+
# Configure persistent connections. A setting of 0 is Django's default.
_conn_max_age = os.environ.get("DATATRACKER_DB_CONN_MAX_AGE", "0")
-# A string "none" means unlimited age.
-DATABASES["default"]["CONN_MAX_AGE"] = None if _conn_max_age.lower() == "none" else int(_conn_max_age)
+for dbname in ["default", "blobdb"]:
+ # A string "none" means unlimited age.
+ DATABASES[dbname]["CONN_MAX_AGE"] = (
+ None if _conn_max_age.lower() == "none" else int(_conn_max_age)
+ )
# Enable connection health checks if DATATRACKER_DB_CONN_HEALTH_CHECK is the string "true"
_conn_health_checks = bool(
os.environ.get("DATATRACKER_DB_CONN_HEALTH_CHECKS", "false").lower() == "true"
)
-DATABASES["default"]["CONN_HEALTH_CHECKS"] = _conn_health_checks
+for dbname in ["default", "blobdb"]:
+ DATABASES[dbname]["CONN_HEALTH_CHECKS"] = _conn_health_checks
# DATATRACKER_ADMINS is a newline-delimited list of addresses parseable by email.utils.parseaddr
_admins_str = os.environ.get("DATATRACKER_ADMINS", None)
if _admins_str is not None:
ADMINS = [parseaddr(admin) for admin in _multiline_to_list(_admins_str)]
else:
- raise RuntimeError("DATATRACKER_ADMINS must be set")
+ raise RuntimeError("DATATRACKER_ADMINS must be set")
-USING_DEBUG_EMAIL_SERVER = os.environ.get("DATATRACKER_EMAIL_DEBUG", "false").lower() == "true"
+USING_DEBUG_EMAIL_SERVER = (
+ os.environ.get("DATATRACKER_EMAIL_DEBUG", "false").lower() == "true"
+)
EMAIL_HOST = os.environ.get("DATATRACKER_EMAIL_HOST", "localhost")
EMAIL_PORT = int(os.environ.get("DATATRACKER_EMAIL_PORT", "2025"))
@@ -126,8 +161,16 @@ def _multiline_to_list(s):
CELERY_BROKER_URL = "amqp://datatracker:{password}@{host}/{queue}".format(
host=os.environ.get("RABBITMQ_HOSTNAME", "dt-rabbitmq"),
password=_celery_password,
- queue=os.environ.get("RABBITMQ_QUEUE", "dt")
+ queue=os.environ.get("RABBITMQ_QUEUE", "dt"),
+)
+
+# mailarchive API key
+_mailing_list_archive_api_key = os.environ.get(
+ "DATATRACKER_MAILING_LIST_ARCHIVE_API_KEY", None
)
+if _mailing_list_archive_api_key is None:
+ raise RuntimeError("DATATRACKER_MAILING_LIST_ARCHIVE_API_KEY must be set")
+MAILING_LIST_ARCHIVE_API_KEY = _mailing_list_archive_api_key
IANA_SYNC_USERNAME = "ietfsync"
IANA_SYNC_CHANGES_URL = "https://datatracker.iana.org:4443/data-tracker/changes"
@@ -140,10 +183,25 @@ def _multiline_to_list(s):
raise RuntimeError("DATATRACKER_REGISTRATION_API_KEY must be set")
STATS_REGISTRATION_ATTENDEES_JSON_URL = f"https://registration.ietf.org/{{number}}/attendees/?apikey={_registration_api_key}"
-#FIRST_CUTOFF_DAYS = 12
-#SECOND_CUTOFF_DAYS = 12
-#SUBMISSION_CUTOFF_DAYS = 26
-#SUBMISSION_CORRECTION_DAYS = 57
+# Registration Participants API config - key must be set, but the URL can be left
+# to the default in settings.py
+_registration_participants_api_key = os.environ.get(
+ "DATATRACKER_REGISTRATION_PARTICIPANTS_API_KEY", None
+)
+if _registration_participants_api_key is None:
+ raise RuntimeError("DATATRACKER_REGISTRATION_PARTICIPANTS_API_KEY must be set")
+REGISTRATION_PARTICIPANTS_API_KEY = _registration_participants_api_key
+
+_registration_participants_api_url = os.environ.get(
+ "DATATRACKER_REGISTRATION_PARTICIPANTS_API_URL", None
+)
+if _registration_participants_api_url is not None:
+ REGISTRATION_PARTICIPANTS_API_URL = _registration_participants_api_url
+
+# FIRST_CUTOFF_DAYS = 12
+# SECOND_CUTOFF_DAYS = 12
+# SUBMISSION_CUTOFF_DAYS = 26
+# SUBMISSION_CORRECTION_DAYS = 57
MEETING_MATERIALS_SUBMISSION_CUTOFF_DAYS = 26
MEETING_MATERIALS_SUBMISSION_CORRECTION_DAYS = 54
@@ -155,7 +213,7 @@ def _multiline_to_list(s):
if _MEETECHO_CLIENT_ID is not None and _MEETECHO_CLIENT_SECRET is not None:
MEETECHO_API_CONFIG = {
"api_base": os.environ.get(
- "DATATRACKER_MEETECHO_API_BASE",
+ "DATATRACKER_MEETECHO_API_BASE",
"https://meetings.conf.meetecho.com/api/v1/",
),
"client_id": _MEETECHO_CLIENT_ID,
@@ -167,13 +225,21 @@ def _multiline_to_list(s):
"DATATRACKER_MEETECHO_CLIENT_ID and DATATRACKER_MEETECHO_CLIENT_SECRET must be set"
)
-# For APP_API_TOKENS, ccept either base64-encoded JSON or raw JSON, but not both
+# For APP_API_TOKENS, accept either base64-encoded JSON or raw JSON, but not both.
+# To decode / pretty-print the encoded form, run:
+# base64 -d | jq .
+# paste the encoded secret into stdin. Copy/paste that into an editor you trust not
+# to leave a copy lying around. When done editing, copy/paste the final JSON through
+# jq -c | base64
+# and copy/paste the output into the secret store.
if "DATATRACKER_APP_API_TOKENS_JSON_B64" in os.environ:
if "DATATRACKER_APP_API_TOKENS_JSON" in os.environ:
raise RuntimeError(
"Only one of DATATRACKER_APP_API_TOKENS_JSON and DATATRACKER_APP_API_TOKENS_JSON_B64 may be set"
)
- _APP_API_TOKENS_JSON = b64decode(os.environ.get("DATATRACKER_APP_API_TOKENS_JSON_B64"))
+ _APP_API_TOKENS_JSON = b64decode(
+ os.environ.get("DATATRACKER_APP_API_TOKENS_JSON_B64")
+ )
else:
_APP_API_TOKENS_JSON = os.environ.get("DATATRACKER_APP_API_TOKENS_JSON", None)
@@ -189,7 +255,9 @@ def _multiline_to_list(s):
# Leave DATATRACKER_MATOMO_SITE_ID unset to disable Matomo reporting
if "DATATRACKER_MATOMO_SITE_ID" in os.environ:
- MATOMO_DOMAIN_PATH = os.environ.get("DATATRACKER_MATOMO_DOMAIN_PATH", "analytics.ietf.org")
+ MATOMO_DOMAIN_PATH = os.environ.get(
+ "DATATRACKER_MATOMO_DOMAIN_PATH", "analytics.ietf.org"
+ )
MATOMO_SITE_ID = os.environ.get("DATATRACKER_MATOMO_SITE_ID")
MATOMO_DISABLE_COOKIES = True
@@ -197,9 +265,13 @@ def _multiline_to_list(s):
_SCOUT_KEY = os.environ.get("DATATRACKER_SCOUT_KEY", None)
if _SCOUT_KEY is not None:
if SERVER_MODE == "production":
- PROD_PRE_APPS = ["scout_apm.django", ]
+ PROD_PRE_APPS = [
+ "scout_apm.django",
+ ]
else:
- DEV_PRE_APPS = ["scout_apm.django", ]
+ DEV_PRE_APPS = [
+ "scout_apm.django",
+ ]
SCOUT_MONITOR = True
SCOUT_KEY = _SCOUT_KEY
SCOUT_NAME = os.environ.get("DATATRACKER_SCOUT_NAME", "Datatracker")
@@ -216,16 +288,19 @@ def _multiline_to_list(s):
STATIC_URL = os.environ.get("DATATRACKER_STATIC_URL", None)
if STATIC_URL is None:
from ietf import __version__
+
STATIC_URL = f"https://static.ietf.org/dt/{__version__}/"
# Set these to the same as "production" in settings.py, whether production mode or not
MEDIA_ROOT = "/a/www/www6s/lib/dt/media/"
-MEDIA_URL = "https://www.ietf.org/lib/dt/media/"
+MEDIA_URL = "https://www.ietf.org/lib/dt/media/"
PHOTOS_DIRNAME = "photo"
PHOTOS_DIR = MEDIA_ROOT + PHOTOS_DIRNAME
# Normally only set for debug, but needed until we have a real FS
-DJANGO_VITE_MANIFEST_PATH = os.path.join(BASE_DIR, 'static/dist-neue/manifest.json')
+DJANGO_VITE["default"]["manifest_path"] = os.path.join(
+ BASE_DIR, "static/dist-neue/manifest.json"
+)
# Binaries that are different in the docker image
DE_GFM_BINARY = "/usr/local/bin/de-gfm"
@@ -235,12 +310,34 @@ def _multiline_to_list(s):
MEMCACHED_HOST = os.environ.get("DT_MEMCACHED_SERVICE_HOST", "127.0.0.1")
MEMCACHED_PORT = os.environ.get("DT_MEMCACHED_SERVICE_PORT", "11211")
from ietf import __version__
+
CACHES = {
"default": {
"BACKEND": "ietf.utils.cache.LenientMemcacheCache",
"LOCATION": f"{MEMCACHED_HOST}:{MEMCACHED_PORT}",
"VERSION": __version__,
"KEY_PREFIX": "ietf:dt",
+ # Key function is default except with sha384-encoded key
+ "KEY_FUNCTION": lambda key, key_prefix, version: (
+ f"{key_prefix}:{version}:{sha384(str(key).encode('utf8')).hexdigest()}"
+ ),
+ },
+ "agenda": {
+ "BACKEND": "ietf.utils.cache.LenientMemcacheCache",
+ "LOCATION": f"{MEMCACHED_HOST}:{MEMCACHED_PORT}",
+ # No release-specific VERSION setting.
+ "KEY_PREFIX": "ietf:dt:agenda",
+ # Key function is default except with sha384-encoded key
+ "KEY_FUNCTION": lambda key, key_prefix, version: (
+ f"{key_prefix}:{version}:{sha384(str(key).encode('utf8')).hexdigest()}"
+ ),
+ },
+ "proceedings": {
+ "BACKEND": "ietf.utils.cache.LenientMemcacheCache",
+ "LOCATION": f"{MEMCACHED_HOST}:{MEMCACHED_PORT}",
+ # No release-specific VERSION setting.
+ "KEY_PREFIX": "ietf:dt:proceedings",
+ # Key function is default except with sha384-encoded key
"KEY_FUNCTION": lambda key, key_prefix, version: (
f"{key_prefix}:{version}:{sha384(str(key).encode('utf8')).hexdigest()}"
),
@@ -285,3 +382,137 @@ def _multiline_to_list(s):
# Console logs as JSON instead of plain when running in k8s
LOGGING["handlers"]["console"]["formatter"] = "json"
+
+# Configure storages for the replica blob store
+_blob_store_endpoint_url = os.environ.get("DATATRACKER_BLOB_STORE_ENDPOINT_URL")
+_blob_store_access_key = os.environ.get("DATATRACKER_BLOB_STORE_ACCESS_KEY")
+_blob_store_secret_key = os.environ.get("DATATRACKER_BLOB_STORE_SECRET_KEY")
+if None in (_blob_store_endpoint_url, _blob_store_access_key, _blob_store_secret_key):
+ raise RuntimeError(
+ "All of DATATRACKER_BLOB_STORE_ENDPOINT_URL, DATATRACKER_BLOB_STORE_ACCESS_KEY, "
+ "and DATATRACKER_BLOB_STORE_SECRET_KEY must be set"
+ )
+_blob_store_bucket_prefix = os.environ.get("DATATRACKER_BLOB_STORE_BUCKET_PREFIX", "")
+_blob_store_bucket_suffix = os.environ.get("DATATRACKER_BLOB_STORE_BUCKET_SUFFIX", "")
+_blob_store_enable_profiling = (
+ os.environ.get("DATATRACKER_BLOB_STORE_ENABLE_PROFILING", "false").lower() == "true"
+)
+_blob_store_max_attempts = int(
+ os.environ.get("DATATRACKER_BLOB_STORE_MAX_ATTEMPTS", BLOBSTORAGE_MAX_ATTEMPTS)
+)
+_blob_store_connect_timeout = float(
+ os.environ.get(
+ "DATATRACKER_BLOB_STORE_CONNECT_TIMEOUT", BLOBSTORAGE_CONNECT_TIMEOUT
+ )
+)
+_blob_store_read_timeout = float(
+ os.environ.get("DATATRACKER_BLOB_STORE_READ_TIMEOUT", BLOBSTORAGE_READ_TIMEOUT)
+)
+
+for storagename in ARTIFACT_STORAGE_NAMES:
+ if storagename in ["staging"]:
+ continue
+ replica_storagename = f"r2-{storagename}"
+ adjusted_bucket_name = (
+ _blob_store_bucket_prefix + storagename + _blob_store_bucket_suffix
+ ).strip()
+ STORAGES[replica_storagename] = {
+ "BACKEND": "ietf.doc.storage.MetadataS3Storage",
+ "OPTIONS": dict(
+ endpoint_url=_blob_store_endpoint_url,
+ access_key=_blob_store_access_key,
+ secret_key=_blob_store_secret_key,
+ security_token=None,
+ client_config=botocore.config.Config(
+ request_checksum_calculation="when_required",
+ response_checksum_validation="when_required",
+ signature_version="s3v4",
+ connect_timeout=_blob_store_connect_timeout,
+ read_timeout=_blob_store_read_timeout,
+ retries={"total_max_attempts": _blob_store_max_attempts},
+ ),
+ verify=False,
+ bucket_name=adjusted_bucket_name,
+ ietf_log_blob_timing=_blob_store_enable_profiling,
+ ),
+ }
+
+# Configure storage for the red bucket - assume it uses the same credentials as
+# other blobs
+_red_bucket_name = os.environ.get("DATATRACKER_BLOB_STORE_RED_BUCKET_NAME", "").strip()
+if _red_bucket_name == "":
+ raise RuntimeError("DATATRACKER_BLOB_STORE_RED_BUCKET_NAME must be set")
+
+STORAGES["red_bucket"] = {
+ "BACKEND": "storages.backends.s3.S3Storage",
+ "OPTIONS": dict(
+ endpoint_url=_blob_store_endpoint_url,
+ access_key=_blob_store_access_key,
+ secret_key=_blob_store_secret_key,
+ security_token=None,
+ client_config=botocore.config.Config(
+ request_checksum_calculation="when_required",
+ response_checksum_validation="when_required",
+ signature_version="s3v4",
+ connect_timeout=_blob_store_connect_timeout,
+ read_timeout=_blob_store_read_timeout,
+ retries={"total_max_attempts": _blob_store_max_attempts},
+ ),
+ verify=False,
+ bucket_name=_red_bucket_name,
+ ),
+}
+RFCINDEX_DELETE_THEN_WRITE = False # S3Storage allows file_overwrite by default
+RFCINDEX_OUTPUT_PATH = os.environ.get("DATATRACKER_RFCINDEX_OUTPUT_PATH", "other/")
+RFCINDEX_INPUT_PATH = os.environ.get("DATATRACKER_RFCINDEX_INPUT_PATH", "")
+
+# Configure the blobdb app for artifact storage
+_blobdb_replication_enabled = (
+ os.environ.get("DATATRACKER_BLOBDB_REPLICATION_ENABLED", "true").lower() == "true"
+)
+_blobdb_replication_verbose_logging = (
+ os.environ.get("DATATRACKER_BLOBDB_REPLICATION_VERBOSE_LOGGING", "false").lower()
+ == "true"
+)
+
+BLOBDB_REPLICATION = {
+ "ENABLED": _blobdb_replication_enabled,
+ "DEST_STORAGE_PATTERN": "r2-{bucket}",
+ "INCLUDE_BUCKETS": ARTIFACT_STORAGE_NAMES,
+ "EXCLUDE_BUCKETS": ["staging"],
+ "VERBOSE_LOGGING": _blobdb_replication_verbose_logging,
+}
+
+# Optionally disable password strength enforcement at login (on by default)
+PASSWORD_POLICY_ENFORCE_AT_LOGIN = (
+ os.environ.get("DATATRACKER_ENFORCE_PW_POLICY", "true").lower() != "false"
+)
+
+# Typesense search indexing
+SEARCHINDEX_CONFIG = {
+ "TYPESENSE_API_URL": os.environ.get("DATATRACKER_TYPESENSE_API_URL", ""),
+ "TYPESENSE_API_KEY": os.environ.get("DATATRACKER_TYPESENSE_API_KEY", ""),
+ "TASK_RETRY_DELAY": os.environ.get("DATATRACKER_SEARCHINDEX_TASK_RETRY_DELAY", 10),
+ "TASK_MAX_RETRIES": os.environ.get(
+ "DATATRACKER_SEARCHINDEX_TASK_MAX_RETRIES", "12"
+ ),
+}
+
+# Errata system api configuration
+ERRATA_METADATA_NOTIFICATION_API_KEY = os.environ.get(
+ "DATATRACKER_ERRATA_METADATA_NOTIFICATION_API_KEY", None
+)
+if ERRATA_METADATA_NOTIFICATION_API_KEY is not None:
+ ERRATA_METADATA_NOTIFICATION_URL = os.environ.get(
+ "DATATRACKER_ERRATA_METADATA_NOTIFICATION_URL", None
+ )
+ if ERRATA_METADATA_NOTIFICATION_URL is None:
+ raise RuntimeError(
+ "DATATRACKER_ERRATA_METADATA_NOTIFICATION_URL must be set if "
+ "DATATRACKER_ERRATA_METADATA_NOTIFICATION_API_KEY is provided"
+ )
+
+# name (with path) of errata.json in the red bucket
+ERRATA_JSON_BLOB_NAME = os.environ.get(
+ "DATATRACKER_ERRATA_JSON_BLOB_NAME", "other/errata.json"
+)
diff --git a/mypy.ini b/mypy.ini
index 19df7ec9b0..4acaf98c95 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -2,6 +2,9 @@
ignore_missing_imports = True
+# allow PEP 695 type aliases (flag needed until mypy >= 1.13)
+enable_incomplete_feature = NewGenericSyntax
+
plugins =
mypy_django_plugin.main
diff --git a/package.json b/package.json
index b3d36b349c..57642d7860 100644
--- a/package.json
+++ b/package.json
@@ -111,11 +111,15 @@
"ietf/static/images/irtf-logo-card.png",
"ietf/static/images/irtf-logo-white.svg",
"ietf/static/images/irtf-logo.svg",
+ "ietf/static/js/add_session_recordings.js",
+ "ietf/static/js/attendees-chart.js",
"ietf/static/js/agenda_filter.js",
"ietf/static/js/agenda_materials.js",
+ "ietf/static/js/announcement.js",
"ietf/static/js/complete-review.js",
"ietf/static/js/create_timeslot.js",
"ietf/static/js/create_timeslot.js",
+ "ietf/static/js/custom_striped.js",
"ietf/static/js/d3.js",
"ietf/static/js/datepicker.js",
"ietf/static/js/doc-search.js",
@@ -143,9 +147,13 @@
"ietf/static/js/manage-review-requests.js",
"ietf/static/js/meeting-interim-request.js",
"ietf/static/js/moment.js",
+ "ietf/static/js/navbar-doc-search.js",
"ietf/static/js/password_strength.js",
"ietf/static/js/select2.js",
+ "ietf/static/js/session_details.js",
"ietf/static/js/session_details_form.js",
+ "ietf/static/js/session_form.js",
+ "ietf/static/js/session_request.js",
"ietf/static/js/sortable.js",
"ietf/static/js/stats.js",
"ietf/static/js/status-change-edit-relations.js",
@@ -206,8 +214,6 @@
"ietf/secr/static/images/tooltag-arrowright.webp",
"ietf/secr/static/images/tooltag-arrowright_over.webp",
"ietf/secr/static/js/dynamic_inlines.js",
- "ietf/secr/static/js/session_form.js",
- "ietf/secr/static/js/sessions.js",
"ietf/secr/static/js/utils.js"
]
}
diff --git a/patch/django-cookie-delete-with-all-settings.patch b/patch/django-cookie-delete-with-all-settings.patch
index fb8bbbe4fe..4ceaf8fceb 100644
--- a/patch/django-cookie-delete-with-all-settings.patch
+++ b/patch/django-cookie-delete-with-all-settings.patch
@@ -9,9 +9,9 @@
samesite=settings.SESSION_COOKIE_SAMESITE,
)
---- django/http/response.py.orig 2020-08-13 11:16:04.060627793 +0200
-+++ django/http/response.py 2020-08-13 11:54:03.482476973 +0200
-@@ -282,20 +282,28 @@
+--- django/http/response.py.orig 2025-12-02 22:12:05.197283001 +0000
++++ django/http/response.py 2025-12-02 22:26:01.396576013 +0000
+@@ -286,20 +286,28 @@
value = signing.get_cookie_signer(salt=key + salt).sign(value)
return self.set_cookie(key, value, **kwargs)
diff --git a/patch/tastypie-django22-fielderror-response.patch b/patch/tastypie-django22-fielderror-response.patch
index ffb152d319..3b4418fc66 100644
--- a/patch/tastypie-django22-fielderror-response.patch
+++ b/patch/tastypie-django22-fielderror-response.patch
@@ -1,5 +1,5 @@
---- tastypie/resources.py.orig 2020-08-24 13:14:25.463166100 +0200
-+++ tastypie/resources.py 2020-08-24 13:15:55.133759224 +0200
+--- tastypie/resources.py.orig 2025-07-29 19:00:01.526948002 +0000
++++ tastypie/resources.py 2025-07-29 19:07:15.324127008 +0000
@@ -12,7 +12,7 @@
ObjectDoesNotExist, MultipleObjectsReturned, ValidationError, FieldDoesNotExist
)
@@ -9,13 +9,13 @@
from django.db.models.fields.related import ForeignKey
from django.urls.conf import re_path
from tastypie.utils.timezone import make_naive_utc
-@@ -2198,6 +2198,8 @@
+@@ -2216,6 +2216,8 @@
return self.authorized_read_list(objects, bundle)
except ValueError:
raise BadRequest("Invalid resource lookup data provided (mismatched type).")
+ except FieldError as e:
+ raise BadRequest("Invalid resource lookup: %s." % e)
-
+
def obj_get(self, bundle, **kwargs):
"""
--- tastypie/paginator.py.orig 2020-08-25 15:24:46.391588425 +0200
diff --git a/playwright/.gitignore b/playwright/.gitignore
index 75e854d8dc..f38d036a79 100644
--- a/playwright/.gitignore
+++ b/playwright/.gitignore
@@ -2,3 +2,4 @@ node_modules/
/test-results/
/playwright-report/
/playwright/.cache/
+auth.json
\ No newline at end of file
diff --git a/playwright/helpers/common.js b/playwright/helpers/common.js
index 5ba39ba022..c4dd7e2640 100644
--- a/playwright/helpers/common.js
+++ b/playwright/helpers/common.js
@@ -13,5 +13,29 @@ module.exports = {
return rect.top < bottom && rect.top > 0 - rect.height
})
+ },
+ /**
+ * Override page DateTime with a new value
+ *
+ * @param {Object} page Page object
+ * @param {Object} dateTimeOverride New DateTime object
+ */
+ overridePageDateTime: async (page, dateTimeOverride) => {
+ await page.addInitScript(`{
+ // Extend Date constructor to default to fixed time
+ Date = class extends Date {
+ constructor(...args) {
+ if (args.length === 0) {
+ super(${dateTimeOverride.toMillis()});
+ } else {
+ super(...args);
+ }
+ }
+ }
+ // Override Date.now() to start from fixed time
+ const __DateNowOffset = ${dateTimeOverride.toMillis()} - Date.now();
+ const __DateNow = Date.now;
+ Date.now = () => __DateNow() + __DateNowOffset;
+ }`)
}
}
diff --git a/playwright/helpers/meeting.js b/playwright/helpers/meeting.js
index 52bc331fd6..634ca2e8c6 100644
--- a/playwright/helpers/meeting.js
+++ b/playwright/helpers/meeting.js
@@ -395,7 +395,7 @@ module.exports = {
name: 'Hackathon Kickoff',
startDateTime: day1.set({ hour: 10, minute: 30 }),
duration: '30m',
- ...findAreaGroup('hackathon-kickoff', categories[2]),
+ ...findAreaGroup('hackathon', categories[2]),
showAgenda: true,
hasAgenda: true,
hasRecordings: true,
@@ -609,6 +609,9 @@ module.exports = {
startDateTime: curDay.set({ hour: 17, minute: 30 }),
duration: '2h',
type: 'plenary',
+ showAgenda: true,
+ hasAgenda: true,
+ hasRecordings: true,
...findAreaGroup('ietf-plenary', categories[2])
}, floors))
}
diff --git a/playwright/tests-legacy/secr/announcement.spec.js b/playwright/tests-legacy/secr/announcement.spec.js
new file mode 100644
index 0000000000..4dbbc25a81
--- /dev/null
+++ b/playwright/tests-legacy/secr/announcement.spec.js
@@ -0,0 +1,77 @@
+const { test, expect } = require('@playwright/test')
+const viewports = require('../../helpers/viewports')
+const { setTimeout } = require('timers/promises')
+
+// ====================================================================
+// ANNOUNCEMENT | DESKTOP viewport
+// ====================================================================
+
+test.describe('desktop', () => {
+
+ test.beforeAll(async ({ browser }) => {
+ const context = await browser.newContext();
+ const page = await context.newPage();
+
+ await page.goto('/accounts/login/');
+
+ await page.fill('input#id_username', 'glen');
+ await page.fill('input#id_password', 'password');
+
+ await page.click('button[type="submit"]');
+ await page.waitForURL('/accounts/profile/');
+
+ await context.storageState({ path: 'auth.json' });
+
+ await context.close();
+ });
+
+ test.beforeEach(async ({ browser }) => {
+ // Reuse the authentication state in each test
+ const context = await browser.newContext({ storageState: 'auth.json' });
+ const page = await context.newPage();
+ await page.setViewportSize({
+ width: viewports.desktop[0],
+ height: viewports.desktop[1]
+ })
+ await page.goto(`/secr/announcement/`);
+ await page.locator('h1:text("Announcement")').waitFor({ state: 'visible' })
+ await setTimeout(500)
+ // Attach the page to the test context
+ test.info().page = page;
+ })
+
+ test('show to custom', async () => {
+ const page = test.info().page;
+
+ // to_custom should initially be hidden
+ const element = page.locator('#id_to_custom');
+ await expect(element).toBeHidden();
+ await page.selectOption('select#id_to', 'Other...');
+ await expect(element).toBeVisible();
+ })
+
+ test('back button', async () => {
+ const page = test.info().page;
+
+ const element = page.locator('#id_to_custom');
+ await page.selectOption('select#id_to', 'Other...');
+ await expect(element).toBeVisible();
+ await page.fill('input#id_to_custom', 'custom@example.com');
+ await page.selectOption('select#id_frm', 'IETF Chair ');
+ await page.fill('input#id_reply_to', 'greg@example.com');
+ await page.fill('input#id_subject', 'About Stuff');
+ await page.fill('textarea#id_body', 'This is the stuff');
+
+ await page.click('text="Continue"');
+ const h2Locator = page.locator('h2:text("Confirm Announcement")');
+ await h2Locator.waitFor({ state: 'visible' });
+
+ // click back button and check to_custom
+ await page.click('text="Back"');
+ const subjectLocator = page.locator('input#id_subject');
+ await subjectLocator.waitFor({ state: 'visible' });
+ await expect(element).toBeVisible();
+ await expect(element).toHaveValue('custom@example.com');
+ })
+
+})
\ No newline at end of file
diff --git a/playwright/tests/meeting/agenda.spec.js b/playwright/tests/meeting/agenda.spec.js
index d31dbd5c2c..2248027a38 100644
--- a/playwright/tests/meeting/agenda.spec.js
+++ b/playwright/tests/meeting/agenda.spec.js
@@ -238,7 +238,7 @@ test.describe('past - desktop', () => {
// Name column
// -----------
// Event icon
- if (['break', 'plenary'].includes(event.type) || (event.type === 'other' && ['office hours', 'hackathon'].some(s => event.name.toLowerCase().indexOf(s) >= 0))) {
+ if (['break', 'plenary'].includes(event.type) || (event.type === 'other' && event.name.toLowerCase().indexOf('office hours') >= 0)) {
await expect(row.locator('.agenda-table-cell-name > i.bi')).toBeVisible()
}
// Name link
@@ -275,7 +275,7 @@ test.describe('past - desktop', () => {
const eventButtons = row.locator('.agenda-table-cell-links > .agenda-table-cell-links-buttons')
if (event.flags.agenda) {
// Show meeting materials button
- await expect(eventButtons.locator('i.bi.bi-collection')).toBeVisible()
+ await expect(eventButtons.locator(`#btn-btn-${event.id}-mat`)).toBeVisible()
// ZIP materials button
await expect(eventButtons.locator(`#btn-lnk-${event.id}-tar`)).toHaveAttribute('href', `/meeting/${meetingData.meeting.number}/agenda/${event.acronym}-drafts.tgz`)
await expect(eventButtons.locator(`#btn-lnk-${event.id}-tar > i.bi`)).toBeVisible()
@@ -286,7 +286,7 @@ test.describe('past - desktop', () => {
// No meeting materials yet warning badge
await expect(eventButtons.locator('.no-meeting-materials')).toBeVisible()
}
- if (event.name.toLowerCase().includes('hackathon')) {
+ if (event.groupAcronym === 'hackathon') {
// Hackathon Wiki button
const hackathonWikiLink = `https://wiki.ietf.org/meeting/${meetingData.meeting.number}/hackathon`
await expect(eventButtons.locator(`#btn-lnk-${event.id}-wiki`)).toHaveAttribute('href', hackathonWikiLink)
@@ -425,7 +425,7 @@ test.describe('past - desktop', () => {
})
})
// Open dialog
- await page.locator(`#agenda-rowid-${event.id} #btn-lnk-${event.id}-mat`).click()
+ await page.locator(`#agenda-rowid-${event.id} #btn-btn-${event.id}-mat`).click()
await expect(page.locator('.agenda-eventdetails')).toBeVisible()
// Header
await expect(page.locator('.agenda-eventdetails .n-card-header__main > .detail-header > .bi')).toBeVisible()
@@ -507,7 +507,7 @@ test.describe('past - desktop', () => {
})
})
// Open dialog
- await page.locator(`#btn-lnk-${event.id}-mat`).click()
+ await page.locator(`#btn-btn-${event.id}-mat`).click()
await expect(page.locator('.agenda-eventdetails')).toBeVisible()
// Slides Tab
await page.locator('.agenda-eventdetails .detail-nav > a').nth(1).click()
@@ -1158,7 +1158,7 @@ test.describe('future - desktop', () => {
if (event.flags.showAgenda || (['regular', 'plenary', 'other'].includes(event.type) && !['admin', 'closed_meeting', 'officehours', 'social'].includes(event.purpose))) {
if (event.flags.agenda) {
// Show meeting materials button
- await expect(eventButtons.locator('i.bi.bi-collection')).toBeVisible()
+ await expect(eventButtons.locator(`#btn-btn-${event.id}-mat`)).toBeVisible()
// ZIP materials button
await expect(eventButtons.locator(`#btn-lnk-${event.id}-tar`)).toHaveAttribute('href', `/meeting/${meetingData.meeting.number}/agenda/${event.acronym}-drafts.tgz`)
await expect(eventButtons.locator(`#btn-lnk-${event.id}-tar > i.bi`)).toBeVisible()
@@ -1169,7 +1169,7 @@ test.describe('future - desktop', () => {
// No meeting materials yet warning badge
await expect(eventButtons.locator('.no-meeting-materials')).toBeVisible()
}
- if (event.name.toLowerCase().includes('hackathon')) {
+ if (event.groupAcronym === 'hackathon') {
// Hackathon Wiki button
const hackathonWikiLink = `https://wiki.ietf.org/meeting/${meetingData.meeting.number}/hackathon`
await expect(eventButtons.locator(`#btn-lnk-${event.id}-wiki`)).toHaveAttribute('href', hackathonWikiLink)
@@ -1213,13 +1213,18 @@ test.describe('future - desktop', () => {
await expect(eventButtons.locator(`#btn-lnk-${event.id}-remotecallin`)).toHaveAttribute('href', remoteCallInUrl)
await expect(eventButtons.locator(`#btn-lnk-${event.id}-remotecallin > i.bi`)).toBeVisible()
}
- // calendar
+ // Calendar
if (event.links.calendar) {
await expect(eventButtons.locator(`#btn-lnk-${event.id}-calendar`)).toHaveAttribute('href', event.links.calendar)
await expect(eventButtons.locator(`#btn-lnk-${event.id}-calendar > i.bi`)).toBeVisible()
}
} else {
- await expect(eventButtons).toHaveCount(0)
+ if (event.links.calendar) {
+ await expect(eventButtons.locator(`#btn-lnk-${event.id}-calendar`)).toHaveAttribute('href', event.links.calendar)
+ await expect(eventButtons.locator(`#btn-lnk-${event.id}-calendar > i.bi`)).toBeVisible()
+ } else {
+ await expect(eventButtons).toHaveCount(0)
+ }
}
}
}
@@ -1278,22 +1283,7 @@ test.describe('live - desktop', () => {
})
// Override Date in page to fixed time
- await page.addInitScript(`{
- // Extend Date constructor to default to fixed time
- Date = class extends Date {
- constructor(...args) {
- if (args.length === 0) {
- super(${currentTime.toMillis()});
- } else {
- super(...args);
- }
- }
- }
- // Override Date.now() to start from fixed time
- const __DateNowOffset = ${currentTime.toMillis()} - Date.now();
- const __DateNow = Date.now;
- Date.now = () => __DateNow() + __DateNowOffset;
- }`)
+ await commonHelper.overridePageDateTime(page, currentTime)
// Visit agenda page and await Meeting Data API call to complete
await Promise.all([
@@ -1348,6 +1338,89 @@ test.describe('live - desktop', () => {
})
})
+// ====================================================================
+// AGENDA (live meeting) | DESKTOP viewport | Plenary Extended Time Buttons
+// ====================================================================
+
+test.describe('live - desktop - plenary extended time buttons', () => {
+ let meetingData
+ let plenarySessionId
+
+ test.beforeAll(async () => {
+ // Generate meeting data
+ meetingData = meetingHelper.generateAgendaResponse({ dateMode: 'current' })
+ plenarySessionId = meetingData.schedule.find(s => s.type === 'plenary').id
+ })
+
+ test.beforeEach(async ({ page }) => {
+ // Intercept Meeting Data API
+ await page.route(`**/api/meeting/${meetingData.meeting.number}/agenda-data`, route => {
+ route.fulfill({
+ status: 200,
+ contentType: 'application/json',
+ body: JSON.stringify(meetingData)
+ })
+ })
+
+ await page.setViewportSize({
+ width: viewports.desktop[0],
+ height: viewports.desktop[1]
+ })
+ })
+
+ // -> BUTTONS PRESENT AFTER EVENT, SAME DAY
+
+ test('same day - after event', async ({ page }) => {
+ // Override Date in page to fixed time
+ const currentTime = DateTime.fromISO('2022-02-01T13:45:15', { zone: 'Asia/Tokyo' }).plus({ days: 1 }).set({ hour: 20, minute: 30 })
+ await commonHelper.overridePageDateTime(page, currentTime)
+
+ // Visit agenda page and await Meeting Data API call to complete
+ await Promise.all([
+ page.waitForResponse(`**/api/meeting/${meetingData.meeting.number}/agenda-data`),
+ page.goto(`/meeting/${meetingData.meeting.number}/agenda`)
+ ])
+
+ // Wait for page to be ready
+ await page.locator('.agenda h1').waitFor({ state: 'visible' })
+ await setTimeout(500)
+
+ // Check for plenary event
+ await expect(page.locator('.agenda .agenda-table-display-event.agenda-table-type-plenary')).toBeVisible()
+ await page.locator('.agenda .agenda-table-display-event.agenda-table-type-plenary').scrollIntoViewIfNeeded()
+
+ // Check for full video client + on-site tool
+ await expect(page.locator(`.agenda .agenda-table-display-event.agenda-table-type-plenary .agenda-table-cell-links-buttons a#btn-lnk-${plenarySessionId}-video`)).toBeVisible()
+ await expect(page.locator(`.agenda .agenda-table-display-event.agenda-table-type-plenary .agenda-table-cell-links-buttons a#btn-lnk-${plenarySessionId}-onsitetool`)).toBeVisible()
+ })
+
+ // -> BUTTONS NO LONGER PRESENT AFTER EVENT, NEXT DAY
+
+ test('next day - after event', async ({ page }) => {
+ // Override Date in page to fixed time
+ const currentTime = DateTime.fromISO('2022-02-01T13:45:15', { zone: 'Asia/Tokyo' }).plus({ days: 2 }).set({ hour: 2, minute: 30 })
+ await commonHelper.overridePageDateTime(page, currentTime)
+
+ // Visit agenda page and await Meeting Data API call to complete
+ await Promise.all([
+ page.waitForResponse(`**/api/meeting/${meetingData.meeting.number}/agenda-data`),
+ page.goto(`/meeting/${meetingData.meeting.number}/agenda`)
+ ])
+
+ // Wait for page to be ready
+ await page.locator('.agenda h1').waitFor({ state: 'visible' })
+ await setTimeout(500)
+
+ // Check for plenary event
+ await expect(page.locator('.agenda .agenda-table-display-event.agenda-table-type-plenary')).toBeVisible()
+ await page.locator('.agenda .agenda-table-display-event.agenda-table-type-plenary').scrollIntoViewIfNeeded()
+
+ // Check for full video client + on-site tool
+ await expect(page.locator(`.agenda .agenda-table-display-event.agenda-table-type-plenary .agenda-table-cell-links-buttons a#btn-lnk-${plenarySessionId}-video`)).not.toBeVisible()
+ await expect(page.locator(`.agenda .agenda-table-display-event.agenda-table-type-plenary .agenda-table-cell-links-buttons a#btn-lnk-${plenarySessionId}-onsitetool`)).not.toBeVisible()
+ })
+})
+
// ====================================================================
// AGENDA (past meeting) | SMALL DESKTOP/TABLET/MOBILE viewports
// ====================================================================
diff --git a/requirements.txt b/requirements.txt
index 073a6bfa0a..ca9a6740e1 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,81 +1,95 @@
# -*- conf-mode -*-
-setuptools>=51.1.0 # Require this first, to prevent later errors
+setuptools>=80.9.0 # Require this first, to prevent later errors
#
-argon2-cffi>=21.3.0 # For the Argon2 password hasher option
-beautifulsoup4>=4.11.1 # Only used in tests
-bibtexparser>=1.2.0 # Only used in tests
-bleach>=6
-types-bleach>=6
-celery>=5.2.6
-coverage>=4.5.4,<5.0 # Coverage 5.x moves from a json database to SQLite. Moving to 5.x will require substantial rewrites in ietf.utils.test_runner and ietf.release.views
+aiosmtpd>=1.4.6
+argon2-cffi>=25.1.0 # For the Argon2 password hasher option
+beautifulsoup4>=4.13.4 # Only used in tests
+bibtexparser>=1.4.4 # Only used in tests
+bleach>=6.2.0 # project is deprecated but supported
+types-bleach>=6.2.0
+boto3>=1.39.15
+boto3-stubs[s3]>=1.39.15
+botocore>=1.39.15
+celery>=5.5.3
+coverage>=7.9.2
defusedxml>=0.7.1 # for TastyPie when using xml; not a declared dependency
Django>4.2,<5
-django-analytical>=3.1.0
-django-bootstrap5>=21.3
-django-celery-beat>=2.3.0
-django-celery-results>=2.5.1
+django-admin-rangefilter>=0.13.3
+django-analytical>=3.2.0
+django-bootstrap5>=25.1
+django-celery-beat>=2.9.0
+django-celery-results>=2.6.0
django-csp>=3.7
-django-cors-headers>=3.11.0
-django-debug-toolbar>=3.2.4
-django-markup>=1.5 # Limited use - need to reconcile against direct use of markdown
+django-cors-headers>=4.7.0
+django-debug-toolbar>=6.0.0
+django-filter>=24.3
+django-markup>=1.10 # Limited use - need to reconcile against direct use of markdown
django-oidc-provider==0.8.2 # 0.8.3 changes logout flow and claim return
-django-referrer-policy>=1.0
-django-simple-history>=3.0.0
+django-simple-history>=3.10.1
+django-storages>=1.14.6
django-stubs>=4.2.7,<5 # The django-stubs version used determines the the mypy version indicated below
-django-tastypie>=0.14.7,<0.15.0 # Version must be locked in sync with version of Django
-django-vite>=2.0.2,<3
+django-tastypie>=0.15.1 # Version must be kept in sync with Django
+django-vite>=3.1.0
django-widget-tweaks>=1.4.12
-djangorestframework>=3.15,<4
-djlint>=1.0.0 # To auto-indent templates via "djlint --profile django --reformat"
-docutils>=0.18.1 # Used only by dbtemplates for RestructuredText
+djangorestframework>=3.16.0
+docutils>=0.22.0 # Used only by dbtemplates for RestructuredText
+types-docutils>=0.21.0 # should match docutils (0.22.0 not out yet)
drf-spectacular>=0.27
-drf-standardized-errors[openapi] >= 0.14
-types-docutils>=0.18.1
-factory-boy>=3.3
-github3.py>=3.2.0
-gunicorn>=20.1.0
+drf-standardized-errors[openapi] >= 0.15.0
+factory-boy>=3.3.3
+gunicorn>=23.0.0
hashids>=1.3.1
-html2text>=2020.1.16 # Used only to clean comment field of secr/sreq
+html2text>=2025.4.15 # Used only to clean comment field of secr/sreq
html5lib>=1.1 # Only used in tests
-importlib-metadata<8.5.0 # indirect req of Markdown/inflect; https://github.com/ietf-tools/datatracker/issues/7924
-inflect>= 6.0.2
-jsonfield>=3.1.0 # for SubmissionCheck. This is https://github.com/bradjasper/django-jsonfield/.
-jsonschema[format]>=4.2.1
-jwcrypto>=1.2 # for signed notifications - this is aspirational, and is not really used.
-logging_tree>=1.9 # Used only by the showloggers management command
-lxml>=5.3.0
-markdown>=3.3.6
-types-markdown>=3.3.6
-mock>=4.0.3 # Used only by tests, of course
-types-mock>=4.0.3
-mypy~=1.7.0 # Version requirements determined by django-stubs.
-oic>=1.3 # Used only by tests
-Pillow>=9.1.0
-psycopg2>=2.9.6
-pyang>=2.5.3
-pydyf>0.8.0,<0.10.0 # until weasyprint adjusts for 0.10.0 and later
-pyflakes>=2.4.0
-pyopenssl>=22.0.0 # Used by urllib3.contrib, which is used by PyQuery but not marked as a dependency
-pyquery>=1.4.3
-python-dateutil>=2.8.2
-types-python-dateutil>=2.8.2
-python-json-logger>=3.1.0
+httpx>=0.28.1 # Indirect req of typesense, but we import and refer to exceptions
+icalendar>=5.0.0
+inflect>= 7.5.0
+jsonfield>=3.2.0 # deprecated - need to replace with Django's JSONField
+jsonschema[format]>=4.25.0
+jwcrypto>=1.5.6 # for signed notifications - this is aspirational, and is not really used.
+logging_tree>=1.10 # Used only by the showloggers management command
+lxml>=6.0.0
+markdown>=3.8.0
+types-markdown>=3.8.0
+mock>=5.2.0 # should replace with unittest.mock and remove dependency
+types-mock>=5.2.0
+mypy~=1.11.2 # Version requirements loosely determined by django-stubs.
+oic>=1.7.0 # Used only by tests
+opentelemetry-sdk>=1.38.0
+opentelemetry-instrumentation-django>=0.59b0
+opentelemetry-instrumentation-psycopg2>=0.59b0
+opentelemetry-instrumentation-pymemcache>=0.59b0
+opentelemetry-instrumentation-requests>=0.59b0
+opentelemetry-exporter-otlp-proto-http>=1.38.0
+pillow>=11.3.0
+psycopg2>=2.9.10
+pyang>=2.6.1
+pydyf>=0.11.0
+pyflakes>=3.4.0
+pyopenssl>=25.1.0 # Used by urllib3.contrib, which is used by PyQuery but not marked as a dependency
+pyquery>=2.0.1
+python-dateutil>=2.9.0
+types-python-dateutil>=2.9.0
+python-json-logger>=3.3.0
python-magic==0.4.18 # Versions beyond the yanked .19 and .20 introduce form failures
pymemcache>=4.0.0 # for django.core.cache.backends.memcached.PyMemcacheCache
-python-mimeparse>=1.6 # from TastyPie
-pytz==2022.2.1 # Pinned as changes need to be vetted for their effect on Meeting fields
-types-pytz==2022.2.1 # match pytz version
-requests>=2.31.0
-types-requests>=2.27.1
-requests-mock>=1.9.3
+python-mimeparse>=2.0.0 # from TastyPie
+pytz==2025.2 # Pinned as changes need to be vetted for their effect on Meeting fields
+types-pytz==2025.2.0.20251108 # match pytz version
+typesense>=2.0.0
+requests>=2.32.4
+types-requests>=2.32.4
+requests-mock>=1.12.1
rfc2html>=2.0.3
-scout-apm>=2.24.2
-selenium>=4.0
-tblib>=1.7.0 # So that the django test runner provides tracebacks
-tlds>=2022042700 # Used to teach bleach about which TLDs currently exist
-tqdm>=4.64.0
-Unidecode>=1.3.4
-urllib3>=2
-weasyprint>=59
-xml2rfc[pdf]>=3.23.0
+scout-apm>=3.4.0
+selenium>=4.34.2
+tblib>=3.1.0 # So that the django test runner provides tracebacks
+tlds>=2022042700 # Used to teach bleach about which TLDs currently exist
+tqdm>=4.67.1
+unidecode>=1.4.0
+urllib3>=2.5.0
+weasyprint>=66.0
+xml2rfc>=3.30.0
xym>=0.6,<1.0
+zxcvbn>=4.5.0
+types-zxcvbn~=4.5.0.20250223 # match zxcvbn version
diff --git a/vite.config.js b/vite.config.js
index 41a2cb02e0..bde2b9ed57 100644
--- a/vite.config.js
+++ b/vite.config.js
@@ -16,7 +16,8 @@ export default defineConfig(({ command, mode }) => {
main: 'client/main.js',
embedded: 'client/embedded.js'
}
- }
+ },
+ sourcemap: true
},
cacheDir: '.vite',
plugins: [