From f76137eaae3dee819d62e9592f9360d4241e7098 Mon Sep 17 00:00:00 2001
From: Jennifer Richards
Date: Thu, 5 Dec 2024 10:31:09 -0400
Subject: [PATCH 001/460] fix: disable raw/include in RST (#8300)
* fix: disable raw/include in RST
* fix: suppress warnings
---
ietf/settings.py | 3 +++
1 file changed, 3 insertions(+)
diff --git a/ietf/settings.py b/ietf/settings.py
index 6990037585..cf8abe9f4d 100644
--- a/ietf/settings.py
+++ b/ietf/settings.py
@@ -1149,11 +1149,14 @@ def skip_unreadable_post(record):
MARKUP_SETTINGS = {
'restructuredtext': {
'settings_overrides': {
+ 'report_level': 3, # error (3) or severe (4) only
'initial_header_level': 3,
'doctitle_xform': False,
'footnote_references': 'superscript',
'trim_footnote_reference_space': True,
'default_reference_context': 'view',
+ 'raw_enabled': False, # critical for security
+ 'file_insertion_enabled': False, # critical for security
'link_base': ''
}
}
From b39b80fe1a3f218ed0cb25dc7bdb520a89454ebf Mon Sep 17 00:00:00 2001
From: Robert Sparks
Date: Thu, 5 Dec 2024 08:46:14 -0600
Subject: [PATCH 002/460] fix: test file existence using metadata (#8292)
* fix: test file existance using metadata
* fix: use Path more
* fix: don't read the file to see if it exists
* fix: more conservative error handling
* chore: remove unused import
---
ietf/doc/models.py | 18 ++++++++++++++----
ietf/doc/tests.py | 15 +++++++++++++++
ietf/doc/utils.py | 2 +-
3 files changed, 30 insertions(+), 5 deletions(-)
diff --git a/ietf/doc/models.py b/ietf/doc/models.py
index 077502db11..03698c80c3 100644
--- a/ietf/doc/models.py
+++ b/ietf/doc/models.py
@@ -4,7 +4,6 @@
import datetime
import logging
-import io
import os
import django.db
@@ -530,16 +529,27 @@ def replaces(self):
def replaced_by(self):
return set([ r.document for r in self.related_that("replaces") ])
- def text(self, size = -1):
+ def _text_path(self):
path = self.get_file_name()
root, ext = os.path.splitext(path)
txtpath = root+'.txt'
if ext != '.txt' and os.path.exists(txtpath):
path = txtpath
+ return path
+
+ def text_exists(self):
+ path = Path(self._text_path())
+ return path.exists()
+
+ def text(self, size = -1):
+ path = Path(self._text_path())
+ if not path.exists():
+ return None
try:
- with io.open(path, 'rb') as file:
+ with path.open('rb') as file:
raw = file.read(size)
- except IOError:
+ except IOError as e:
+ log.log(f"Error reading text for {path}: {e}")
return None
text = None
try:
diff --git a/ietf/doc/tests.py b/ietf/doc/tests.py
index f0c8e30626..0630fcd8d4 100644
--- a/ietf/doc/tests.py
+++ b/ietf/doc/tests.py
@@ -3318,3 +3318,18 @@ def test_investigate(self):
self.assertEqual(r.status_code, 200)
q = PyQuery(r.content)
self.assertEqual(len(q("#id_name_fragment.is-invalid")), 1)
+
+class LogIOErrorTests(TestCase):
+
+ def test_doc_text_io_error(self):
+
+ d = IndividualDraftFactory()
+
+ with mock.patch("ietf.doc.models.Path") as path_cls_mock:
+ with mock.patch("ietf.doc.models.log.log") as log_mock:
+ path_cls_mock.return_value.exists.return_value = True
+ path_cls_mock.return_value.open.return_value.__enter__.return_value.read.side_effect = IOError("Bad things happened")
+ text = d.text()
+ self.assertIsNone(text)
+ self.assertTrue(log_mock.called)
+ self.assertIn("Bad things happened", log_mock.call_args[0][0])
diff --git a/ietf/doc/utils.py b/ietf/doc/utils.py
index a30430829a..b2bc0997b1 100644
--- a/ietf/doc/utils.py
+++ b/ietf/doc/utils.py
@@ -1081,7 +1081,7 @@ def build_file_urls(doc: Union[Document, DocHistory]):
label = "plain text" if t == "txt" else t
file_urls.append((label, base + doc.name + "-" + doc.rev + "." + t))
- if doc.text():
+ if doc.text_exists():
file_urls.append(("htmlized", urlreverse('ietf.doc.views_doc.document_html', kwargs=dict(name=doc.name, rev=doc.rev))))
file_urls.append(("pdfized", urlreverse('ietf.doc.views_doc.document_pdfized', kwargs=dict(name=doc.name, rev=doc.rev))))
file_urls.append(("bibtex", urlreverse('ietf.doc.views_doc.document_bibtex',kwargs=dict(name=doc.name,rev=doc.rev))))
From 64c5ef16bd96cd8f2cc960a0224cfa502663c7ac Mon Sep 17 00:00:00 2001
From: Greg Wood
Date: Thu, 5 Dec 2024 09:50:07 -0500
Subject: [PATCH 003/460] fix: clarify replace wording (#8244)
* Clarify "Replace" in I-D submission form
Update help text In the "Replacement Information" section of
the "Status" tab
* refactor: update node, eslint, neostandard + fix esm (#8083)
* chore: update dependencies
* fix: eslint + neostandard
* fix: add corepack prompt env var to init script
* docs: Update README.md
---------
Co-authored-by: Robert Sparks
* ci: update base image target version to 20241114T1703
* ci: fix tests.yml workflow
* fix: clarify "Replace" in I-D submission form #8205
update quote syntax issue on previous update help text In the "Replacement Information" section of the "Status" tab to fix #8059
* Revert "refactor: update node, eslint, neostandard + fix esm (#8083)"
This reverts commit 649879efd745470f6e0cc6768d889f45640e1505.
* Revert "ci: update base image target version to 20241114T1703"
This reverts commit f11144017ed788bc7a38a1d028127434f0d50eb4.
* Revert "ci: fix tests.yml workflow"
This reverts commit 39231321c49291565a39608b98740e098c74dda7.
* Update forms.py
---------
Co-authored-by: Nicolas Giard
Co-authored-by: Robert Sparks
Co-authored-by: NGPixel
---
ietf/submit/forms.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/ietf/submit/forms.py b/ietf/submit/forms.py
index bed87b77c2..183e081242 100644
--- a/ietf/submit/forms.py
+++ b/ietf/submit/forms.py
@@ -502,7 +502,7 @@ def clean_name(self):
return name
class ReplacesForm(forms.Form):
- replaces = SearchableDocumentsField(required=False, help_text="Any Internet-Drafts that this document replaces (approval required for replacing an Internet-Draft you are not the author of)")
+ replaces = SearchableDocumentsField(required=False, help_text='Do not enter anything here if you are only submitting the next version of your Internet-Draft. Only enter items here if this submission is intended to replace an I-D with a different name. A typical use of this field is to note what individual I-Ds are replaced by a new -00 Working group I-D. Note that additional approval will be required to replace an I-D for which you are not an author.')
def __init__(self, *args, **kwargs):
self.name = kwargs.pop("name")
From 167752ba765e3ca740ee26b5beca268fc9a5c4aa Mon Sep 17 00:00:00 2001
From: Jennifer Richards
Date: Fri, 6 Dec 2024 13:17:55 -0400
Subject: [PATCH 004/460] feat: log ASN (#8309)
* feat: log ip_src_asnum in nginx
* feat: log asn from gunicorn
---
dev/build/gunicorn.conf.py | 5 ++++-
ietf/utils/jsonlogger.py | 1 +
k8s/nginx-logging.conf | 7 +++++--
3 files changed, 10 insertions(+), 3 deletions(-)
diff --git a/dev/build/gunicorn.conf.py b/dev/build/gunicorn.conf.py
index cabbee0b1e..6666a0d37d 100644
--- a/dev/build/gunicorn.conf.py
+++ b/dev/build/gunicorn.conf.py
@@ -64,18 +64,21 @@ def _describe_request(req):
start and end of handling a request. E.g., do not include a timestamp.
"""
client_ip = "-"
+ asn = "-"
cf_ray = "-"
for header, value in req.headers:
header = header.lower()
if header == "cf-connecting-ip":
client_ip = value
+ elif header == "x-ip-src-asnum":
+ asn = value
elif header == "cf-ray":
cf_ray = value
if req.query:
path = f"{req.path}?{req.query}"
else:
path = req.path
- return f"{req.method} {path} (client_ip={client_ip}, cf_ray={cf_ray})"
+ return f"{req.method} {path} (client_ip={client_ip}, asn={asn}, cf_ray={cf_ray})"
def pre_request(worker, req):
diff --git a/ietf/utils/jsonlogger.py b/ietf/utils/jsonlogger.py
index b02cd7af2b..6502cab0cb 100644
--- a/ietf/utils/jsonlogger.py
+++ b/ietf/utils/jsonlogger.py
@@ -31,4 +31,5 @@ def add_fields(self, log_record, record, message_dict):
log_record.setdefault("cf_connecting_ip", record.args["{cf-connecting-ip}i"])
log_record.setdefault("cf_connecting_ipv6", record.args["{cf-connecting-ipv6}i"])
log_record.setdefault("cf_ray", record.args["{cf-ray}i"])
+ log_record.setdefault("asn", record.args["{x-ip-src-asnum}i"])
log_record.setdefault("is_authenticated", record.args["{x-datatracker-is-authenticated}o"])
diff --git a/k8s/nginx-logging.conf b/k8s/nginx-logging.conf
index 3c4ade4614..0bc7deca81 100644
--- a/k8s/nginx-logging.conf
+++ b/k8s/nginx-logging.conf
@@ -1,4 +1,6 @@
-# Define JSON log format - must be loaded before config that references it
+# Define JSON log format - must be loaded before config that references it.
+# Note that each line is fully enclosed in single quotes. Commas in arrays are
+# intentionally inside the single quotes.
log_format ietfjson escape=json
'{'
'"time":"$${keepempty}time_iso8601",'
@@ -16,5 +18,6 @@ log_format ietfjson escape=json
'"x_forwarded_proto":"$${keepempty}http_x_forwarded_proto",'
'"cf_connecting_ip":"$${keepempty}http_cf_connecting_ip",'
'"cf_connecting_ipv6":"$${keepempty}http_cf_connecting_ipv6",'
- '"cf_ray":"$${keepempty}http_cf_ray"'
+ '"cf_ray":"$${keepempty}http_cf_ray",'
+ '"asn":"$${keepempty}http_x_ip_src_asnum"'
'}';
From 3055d17eb1c8f91a2152142bfce975b6dd2e82c1 Mon Sep 17 00:00:00 2001
From: Robert Sparks
Date: Mon, 9 Dec 2024 10:33:03 -0600
Subject: [PATCH 005/460] fix: remove unreliable statistics (#8307)
---
ietf/settings.py | 4 +-
ietf/stats/tests.py | 127 +--
ietf/stats/views.py | 894 +-----------------
ietf/templates/stats/document_stats.html | 86 --
.../document_stats_author_affiliation.html | 113 ---
.../document_stats_author_citations.html | 72 --
.../document_stats_author_continent.html | 69 --
.../stats/document_stats_author_country.html | 136 ---
.../document_stats_author_documents.html | 69 --
.../stats/document_stats_author_hindex.html | 83 --
.../stats/document_stats_authors.html | 68 --
.../stats/document_stats_format.html | 63 --
.../stats/document_stats_formlang.html | 63 --
.../templates/stats/document_stats_pages.html | 62 --
.../templates/stats/document_stats_words.html | 62 --
.../stats/document_stats_yearly.html | 52 -
.../includes/number_with_details_cell.html | 15 -
ietf/templates/stats/index.html | 9 +-
ietf/templates/stats/meeting_stats.html | 35 -
.../stats/meeting_stats_continent.html | 61 --
.../stats/meeting_stats_country.html | 97 --
.../stats/meeting_stats_overview.html | 160 ----
22 files changed, 18 insertions(+), 2382 deletions(-)
delete mode 100644 ietf/templates/stats/document_stats.html
delete mode 100644 ietf/templates/stats/document_stats_author_affiliation.html
delete mode 100644 ietf/templates/stats/document_stats_author_citations.html
delete mode 100644 ietf/templates/stats/document_stats_author_continent.html
delete mode 100644 ietf/templates/stats/document_stats_author_country.html
delete mode 100644 ietf/templates/stats/document_stats_author_documents.html
delete mode 100644 ietf/templates/stats/document_stats_author_hindex.html
delete mode 100644 ietf/templates/stats/document_stats_authors.html
delete mode 100644 ietf/templates/stats/document_stats_format.html
delete mode 100644 ietf/templates/stats/document_stats_formlang.html
delete mode 100644 ietf/templates/stats/document_stats_pages.html
delete mode 100644 ietf/templates/stats/document_stats_words.html
delete mode 100644 ietf/templates/stats/document_stats_yearly.html
delete mode 100644 ietf/templates/stats/includes/number_with_details_cell.html
delete mode 100644 ietf/templates/stats/meeting_stats.html
delete mode 100644 ietf/templates/stats/meeting_stats_continent.html
delete mode 100644 ietf/templates/stats/meeting_stats_country.html
delete mode 100644 ietf/templates/stats/meeting_stats_overview.html
diff --git a/ietf/settings.py b/ietf/settings.py
index cf8abe9f4d..7c3dc7fa16 100644
--- a/ietf/settings.py
+++ b/ietf/settings.py
@@ -809,8 +809,8 @@ def skip_unreadable_post(record):
SESSION_REQUEST_FROM_EMAIL = 'IETF Meeting Session Request Tool '
SECRETARIAT_SUPPORT_EMAIL = "support@ietf.org"
-SECRETARIAT_ACTION_EMAIL = "ietf-action@ietf.org"
-SECRETARIAT_INFO_EMAIL = "ietf-info@ietf.org"
+SECRETARIAT_ACTION_EMAIL = SECRETARIAT_SUPPORT_EMAIL
+SECRETARIAT_INFO_EMAIL = SECRETARIAT_SUPPORT_EMAIL
# Put real password in settings_local.py
IANA_SYNC_PASSWORD = "secret"
diff --git a/ietf/stats/tests.py b/ietf/stats/tests.py
index f0e8a19c4a..47027277be 100644
--- a/ietf/stats/tests.py
+++ b/ietf/stats/tests.py
@@ -13,22 +13,16 @@
import debug # pyflakes:ignore
from django.urls import reverse as urlreverse
-from django.utils import timezone
from ietf.utils.test_utils import login_testing_unauthorized, TestCase
import ietf.stats.views
-from ietf.submit.models import Submission
-from ietf.doc.factories import WgDraftFactory, WgRfcFactory
-from ietf.doc.models import Document, State, RelatedDocument, NewRevisionDocEvent, DocumentAuthor
+
from ietf.group.factories import RoleFactory
-from ietf.meeting.factories import MeetingFactory, AttendedFactory
+from ietf.meeting.factories import MeetingFactory
from ietf.person.factories import PersonFactory
-from ietf.person.models import Person, Email
-from ietf.name.models import FormalLanguageName, DocRelationshipName, CountryName
from ietf.review.factories import ReviewRequestFactory, ReviewerSettingsFactory, ReviewAssignmentFactory
-from ietf.stats.models import MeetingRegistration, CountryAlias
-from ietf.stats.factories import MeetingRegistrationFactory
+from ietf.stats.models import MeetingRegistration
from ietf.stats.tasks import fetch_meeting_attendance_task
from ietf.stats.utils import get_meeting_registration_data, FetchStats, fetch_attendance_from_meetings
from ietf.utils.timezone import date_today
@@ -41,121 +35,14 @@ def test_stats_index(self):
self.assertEqual(r.status_code, 200)
def test_document_stats(self):
- WgRfcFactory()
- draft = WgDraftFactory()
- DocumentAuthor.objects.create(
- document=draft,
- person=Person.objects.get(email__address="aread@example.org"),
- email=Email.objects.get(address="aread@example.org"),
- country="Germany",
- affiliation="IETF",
- order=1
- )
-
- # create some data for the statistics
- Submission.objects.create(
- authors=[ { "name": "Some Body", "email": "somebody@example.com", "affiliation": "Some Inc.", "country": "US" }],
- pages=30,
- rev=draft.rev,
- words=4000,
- draft=draft,
- file_types=".txt",
- state_id="posted",
- )
-
- draft.formal_languages.add(FormalLanguageName.objects.get(slug="xml"))
- Document.objects.filter(pk=draft.pk).update(words=4000)
- # move it back so it shows up in the yearly summaries
- NewRevisionDocEvent.objects.filter(doc=draft, rev=draft.rev).update(
- time=timezone.now() - datetime.timedelta(days=500))
-
- referencing_draft = Document.objects.create(
- name="draft-ietf-mars-referencing",
- type_id="draft",
- title="Referencing",
- stream_id="ietf",
- abstract="Test",
- rev="00",
- pages=2,
- words=100
- )
- referencing_draft.set_state(State.objects.get(used=True, type="draft", slug="active"))
- RelatedDocument.objects.create(
- source=referencing_draft,
- target=draft,
- relationship=DocRelationshipName.objects.get(slug="refinfo")
- )
- NewRevisionDocEvent.objects.create(
- type="new_revision",
- by=Person.objects.get(name="(System)"),
- doc=referencing_draft,
- desc="New revision available",
- rev=referencing_draft.rev,
- time=timezone.now() - datetime.timedelta(days=1000)
- )
+ r = self.client.get(urlreverse("ietf.stats.views.document_stats"))
+ self.assertRedirects(r, urlreverse("ietf.stats.views.stats_index"))
- # check redirect
- url = urlreverse(ietf.stats.views.document_stats)
-
- authors_url = urlreverse(ietf.stats.views.document_stats, kwargs={ "stats_type": "authors" })
-
- r = self.client.get(url)
- self.assertEqual(r.status_code, 302)
- self.assertTrue(authors_url in r["Location"])
-
- # check various stats types
- for stats_type in ["authors", "pages", "words", "format", "formlang",
- "author/documents", "author/affiliation", "author/country",
- "author/continent", "author/citations", "author/hindex",
- "yearly/affiliation", "yearly/country", "yearly/continent"]:
- for document_type in ["", "rfc", "draft"]:
- for time_choice in ["", "5y"]:
- url = urlreverse(ietf.stats.views.document_stats, kwargs={ "stats_type": stats_type })
- r = self.client.get(url, {
- "type": document_type,
- "time": time_choice,
- })
- self.assertEqual(r.status_code, 200)
- q = PyQuery(r.content)
- self.assertTrue(q('#chart'))
- if not stats_type.startswith("yearly"):
- self.assertTrue(q('table.stats-data'))
-
def test_meeting_stats(self):
- # create some data for the statistics
- meeting = MeetingFactory(type_id='ietf', date=date_today(), number="96")
- MeetingRegistrationFactory(first_name='John', last_name='Smith', country_code='US', email="john.smith@example.us", meeting=meeting, attended=True)
- CountryAlias.objects.get_or_create(alias="US", country=CountryName.objects.get(slug="US"))
- p = MeetingRegistrationFactory(first_name='Jaume', last_name='Guillaume', country_code='FR', email="jaume.guillaume@example.fr", meeting=meeting, attended=False).person
- CountryAlias.objects.get_or_create(alias="FR", country=CountryName.objects.get(slug="FR"))
- AttendedFactory(session__meeting=meeting,person=p)
- # check redirect
- url = urlreverse(ietf.stats.views.meeting_stats)
-
- authors_url = urlreverse(ietf.stats.views.meeting_stats, kwargs={ "stats_type": "overview" })
-
- r = self.client.get(url)
- self.assertEqual(r.status_code, 302)
- self.assertTrue(authors_url in r["Location"])
-
- # check various stats types
- for stats_type in ["overview", "country", "continent"]:
- url = urlreverse(ietf.stats.views.meeting_stats, kwargs={ "stats_type": stats_type })
- r = self.client.get(url)
- self.assertEqual(r.status_code, 200)
- q = PyQuery(r.content)
- self.assertTrue(q('#chart'))
- if stats_type == "overview":
- self.assertTrue(q('table.stats-data'))
+ r = self.client.get(urlreverse("ietf.stats.views.meeting_stats"))
+ self.assertRedirects(r, urlreverse("ietf.stats.views.stats_index"))
- for stats_type in ["country", "continent"]:
- url = urlreverse(ietf.stats.views.meeting_stats, kwargs={ "stats_type": stats_type, "num": meeting.number })
- r = self.client.get(url)
- self.assertEqual(r.status_code, 200)
- q = PyQuery(r.content)
- self.assertTrue(q('#chart'))
- self.assertTrue(q('table.stats-data'))
def test_known_country_list(self):
# check redirect
diff --git a/ietf/stats/views.py b/ietf/stats/views.py
index ea73d9f4fc..504d84e86d 100644
--- a/ietf/stats/views.py
+++ b/ietf/stats/views.py
@@ -2,25 +2,18 @@
# -*- coding: utf-8 -*-
-import os
import calendar
import datetime
-import email.utils
import itertools
import json
import dateutil.relativedelta
from collections import defaultdict
-from django.conf import settings
from django.contrib.auth.decorators import login_required
-from django.core.cache import cache
-from django.db.models import Count, Q, Subquery, OuterRef
from django.http import HttpResponseRedirect
-from django.shortcuts import get_object_or_404, render
+from django.shortcuts import render
from django.urls import reverse as urlreverse
-from django.utils import timezone
-from django.utils.safestring import mark_safe
-from django.utils.text import slugify
+
import debug # pyflakes:ignore
@@ -29,18 +22,12 @@
ReviewAssignmentData,
sum_period_review_assignment_stats,
sum_raw_review_assignment_aggregations)
-from ietf.submit.models import Submission
from ietf.group.models import Role, Group
from ietf.person.models import Person
-from ietf.name.models import ReviewResultName, CountryName, DocRelationshipName, ReviewAssignmentStateName
-from ietf.person.name import plain_name
-from ietf.doc.models import Document, RelatedDocument, State, DocEvent
-from ietf.meeting.models import Meeting
-from ietf.stats.models import MeetingRegistration, CountryAlias
-from ietf.stats.utils import get_aliased_affiliations, get_aliased_countries, compute_hirsch_index
+from ietf.name.models import ReviewResultName, CountryName, ReviewAssignmentStateName
from ietf.ietfauth.utils import has_role
from ietf.utils.response import permission_denied
-from ietf.utils.timezone import date_today, DEADLINE_TZINFO, RPC_TZINFO
+from ietf.utils.timezone import date_today, DEADLINE_TZINFO
def stats_index(request):
@@ -135,632 +122,8 @@ def add_labeled_top_series_from_bins(chart_data, bins, limit):
})
def document_stats(request, stats_type=None):
- def build_document_stats_url(stats_type_override=Ellipsis, get_overrides=None):
- if get_overrides is None:
- get_overrides={}
- kwargs = {
- "stats_type": stats_type if stats_type_override is Ellipsis else stats_type_override,
- }
-
- return urlreverse(document_stats, kwargs={ k: v for k, v in kwargs.items() if v is not None }) + generate_query_string(request.GET, get_overrides)
-
- # the length limitation is to keep the key shorter than memcached's limit
- # of 250 after django has added the key_prefix and key_version parameters
- cache_key = ("stats:document_stats:%s:%s" % (stats_type, slugify(request.META.get('QUERY_STRING',''))))[:228]
- data = cache.get(cache_key)
- if not data:
- names_limit = settings.STATS_NAMES_LIMIT
- # statistics types
- possible_document_stats_types = add_url_to_choices([
- ("authors", "Number of authors"),
- ("pages", "Pages"),
- ("words", "Words"),
- ("format", "Format"),
- ("formlang", "Formal languages"),
- ], lambda slug: build_document_stats_url(stats_type_override=slug))
-
- possible_author_stats_types = add_url_to_choices([
- ("author/documents", "Number of documents"),
- ("author/affiliation", "Affiliation"),
- ("author/country", "Country"),
- ("author/continent", "Continent"),
- ("author/citations", "Citations"),
- ("author/hindex", "h-index"),
- ], lambda slug: build_document_stats_url(stats_type_override=slug))
-
- possible_yearly_stats_types = add_url_to_choices([
- ("yearly/affiliation", "Affiliation"),
- ("yearly/country", "Country"),
- ("yearly/continent", "Continent"),
- ], lambda slug: build_document_stats_url(stats_type_override=slug))
-
-
- if not stats_type:
- return HttpResponseRedirect(build_document_stats_url(stats_type_override=possible_document_stats_types[0][0]))
-
-
- possible_document_types = add_url_to_choices([
- ("", "All"),
- ("rfc", "RFCs"),
- ("draft", "Internet-Drafts"),
- ], lambda slug: build_document_stats_url(get_overrides={ "type": slug }))
-
- document_type = get_choice(request, "type", possible_document_types) or ""
-
-
- possible_time_choices = add_url_to_choices([
- ("", "All time"),
- ("5y", "Past 5 years"),
- ], lambda slug: build_document_stats_url(get_overrides={ "time": slug }))
-
- time_choice = request.GET.get("time") or ""
-
- from_time = None
- if "y" in time_choice:
- try:
- y = int(time_choice.rstrip("y"))
- from_time = timezone.now() - dateutil.relativedelta.relativedelta(years=y)
- except ValueError:
- pass
-
- chart_data = []
- table_data = []
- stats_title = ""
- template_name = stats_type.replace("/", "_")
- bin_size = 1
- alias_data = []
- eu_countries = None
-
-
- if any(stats_type == t[0] for t in possible_document_stats_types):
- # filter documents
- document_filters = Q(type__in=["draft","rfc"]) # TODO - review lots of "rfc is a draft" assumptions below
-
- rfc_state = State.objects.get(type="rfc", slug="published")
- if document_type == "rfc":
- document_filters &= Q(states=rfc_state)
- elif document_type == "draft":
- document_filters &= ~Q(states=rfc_state)
-
- if from_time:
- # this is actually faster than joining in the database,
- # despite the round-trip back and forth
- docs_within_time_constraint = list(Document.objects.filter(
- type="draft",
- docevent__time__gte=from_time,
- docevent__type__in=["published_rfc", "new_revision"],
- ).values_list("pk",flat=True))
-
- document_filters &= Q(pk__in=docs_within_time_constraint)
-
- document_qs = Document.objects.filter(document_filters)
-
- if document_type == "rfc":
- doc_label = "RFC"
- elif document_type == "draft":
- doc_label = "draft"
- else:
- doc_label = "document"
-
- total_docs = document_qs.values_list("name").distinct().count()
-
- if stats_type == "authors":
- stats_title = "Number of authors for each {}".format(doc_label)
-
- bins = defaultdict(set)
-
- for name, author_count in document_qs.values_list("name").annotate(Count("documentauthor")).values_list("name","documentauthor__count"):
- bins[author_count or 0].add(name)
-
- series_data = []
- for author_count, names in sorted(bins.items(), key=lambda t: t[0]):
- percentage = len(names) * 100.0 / (total_docs or 1)
- series_data.append((author_count, percentage))
- table_data.append((author_count, percentage, len(names), list(names)[:names_limit]))
-
- chart_data.append({ "data": series_data })
-
- elif stats_type == "pages":
- stats_title = "Number of pages for each {}".format(doc_label)
-
- bins = defaultdict(set)
-
- for name, pages in document_qs.values_list("name", "pages"):
- bins[pages or 0].add(name)
-
- series_data = []
- for pages, names in sorted(bins.items(), key=lambda t: t[0]):
- percentage = len(names) * 100.0 / (total_docs or 1)
- if pages is not None:
- series_data.append((pages, len(names)))
- table_data.append((pages, percentage, len(names), list(names)[:names_limit]))
-
- chart_data.append({ "data": series_data })
-
- elif stats_type == "words":
- stats_title = "Number of words for each {}".format(doc_label)
-
- bin_size = 500
-
- bins = defaultdict(set)
-
- for name, words in document_qs.values_list("name", "words"):
- bins[put_into_bin(words, bin_size)].add(name)
-
- series_data = []
- for (value, words), names in sorted(bins.items(), key=lambda t: t[0][0]):
- percentage = len(names) * 100.0 / (total_docs or 1)
- if words is not None:
- series_data.append((value, len(names)))
-
- table_data.append((words, percentage, len(names), list(names)[:names_limit]))
-
- chart_data.append({ "data": series_data })
-
- elif stats_type == "format":
- stats_title = "Submission formats for each {}".format(doc_label)
-
- bins = defaultdict(set)
-
- # on new documents, we should have a Submission row with the file types
- submission_types = {}
-
- for doc_name, file_types in Submission.objects.values_list("draft", "file_types").order_by("submission_date", "id"):
- submission_types[doc_name] = file_types
-
- doc_names_with_missing_types = {}
- for doc_name, doc_type, rev in document_qs.values_list("name", "type_id", "rev"):
- types = submission_types.get(doc_name)
- if types:
- for dot_ext in types.split(","):
- bins[dot_ext.lstrip(".").upper()].add(doc_name)
-
- else:
-
- if doc_type == "rfc":
- filename = doc_name
- else:
- filename = doc_name + "-" + rev
-
- doc_names_with_missing_types[filename] = doc_name
-
- # look up the remaining documents on disk
- for filename in itertools.chain(os.listdir(settings.INTERNET_ALL_DRAFTS_ARCHIVE_DIR), os.listdir(settings.RFC_PATH)):
- t = filename.split(".", 1)
- if len(t) != 2:
- continue
-
- basename, ext = t
- ext = ext.lower()
- if not any(ext==allowlisted_ext for allowlisted_ext in settings.DOCUMENT_FORMAT_ALLOWLIST):
- continue
-
- name = doc_names_with_missing_types.get(basename)
-
- if name:
- bins[ext.upper()].add(name)
-
- series_data = []
- for fmt, names in sorted(bins.items(), key=lambda t: t[0]):
- percentage = len(names) * 100.0 / (total_docs or 1)
- series_data.append((fmt, len(names)))
-
- table_data.append((fmt, percentage, len(names), list(names)[:names_limit]))
-
- chart_data.append({ "data": series_data })
-
- elif stats_type == "formlang":
- stats_title = "Formal languages used for each {}".format(doc_label)
-
- bins = defaultdict(set)
-
- for name, formal_language_name in document_qs.values_list("name", "formal_languages__name"):
- bins[formal_language_name or ""].add(name)
-
- series_data = []
- for formal_language, names in sorted(bins.items(), key=lambda t: t[0]):
- percentage = len(names) * 100.0 / (total_docs or 1)
- if formal_language is not None:
- series_data.append((formal_language, len(names)))
- table_data.append((formal_language, percentage, len(names), list(names)[:names_limit]))
-
- chart_data.append({ "data": series_data })
-
- elif any(stats_type == t[0] for t in possible_author_stats_types):
- person_filters = Q(documentauthor__document__type="draft")
-
- # filter persons
- rfc_state = State.objects.get(type="rfc", slug="published")
- if document_type == "rfc":
- person_filters &= Q(documentauthor__document__states=rfc_state)
- elif document_type == "draft":
- person_filters &= ~Q(documentauthor__document__states=rfc_state)
-
- if from_time:
- # this is actually faster than joining in the database,
- # despite the round-trip back and forth
- docs_within_time_constraint = set(Document.objects.filter(
- type="draft",
- docevent__time__gte=from_time,
- docevent__type__in=["published_rfc", "new_revision"],
- ).values_list("pk"))
-
- person_filters &= Q(documentauthor__document__in=docs_within_time_constraint)
-
- person_qs = Person.objects.filter(person_filters)
-
- if document_type == "rfc":
- doc_label = "RFC"
- elif document_type == "draft":
- doc_label = "draft"
- else:
- doc_label = "document"
-
- if stats_type == "author/documents":
- stats_title = "Number of {}s per author".format(doc_label)
-
- bins = defaultdict(set)
-
- person_qs = Person.objects.filter(person_filters)
-
- for name, document_count in person_qs.values_list("name").annotate(Count("documentauthor")):
- bins[document_count or 0].add(name)
-
- total_persons = count_bins(bins)
-
- series_data = []
- for document_count, names in sorted(bins.items(), key=lambda t: t[0]):
- percentage = len(names) * 100.0 / (total_persons or 1)
- series_data.append((document_count, percentage))
- plain_names = sorted([ plain_name(n) for n in names ])
- table_data.append((document_count, percentage, len(plain_names), list(plain_names)[:names_limit]))
-
- chart_data.append({ "data": series_data })
-
- elif stats_type == "author/affiliation":
- stats_title = "Number of {} authors per affiliation".format(doc_label)
-
- bins = defaultdict(set)
-
- person_qs = Person.objects.filter(person_filters)
-
- # Since people don't write the affiliation names in the
- # same way, and we don't want to go back and edit them
- # either, we transform them here.
-
- name_affiliation_set = {
- (name, affiliation)
- for name, affiliation in person_qs.values_list("name", "documentauthor__affiliation")
- }
-
- aliases = get_aliased_affiliations(affiliation for _, affiliation in name_affiliation_set)
-
- for name, affiliation in name_affiliation_set:
- bins[aliases.get(affiliation, affiliation)].add(name)
-
- prune_unknown_bin_with_known(bins)
- total_persons = count_bins(bins)
-
- series_data = []
- for affiliation, names in sorted(bins.items(), key=lambda t: t[0].lower()):
- percentage = len(names) * 100.0 / (total_persons or 1)
- if affiliation:
- series_data.append((affiliation, len(names)))
- plain_names = sorted([ plain_name(n) for n in names ])
- table_data.append((affiliation, percentage, len(plain_names), list(plain_names)[:names_limit]))
-
- series_data.sort(key=lambda t: t[1], reverse=True)
- series_data = series_data[:30]
-
- chart_data.append({ "data": series_data })
-
- for alias, name in sorted(aliases.items(), key=lambda t: t[1]):
- alias_data.append((name, alias))
-
- elif stats_type == "author/country":
- stats_title = "Number of {} authors per country".format(doc_label)
-
- bins = defaultdict(set)
-
- person_qs = Person.objects.filter(person_filters)
-
- # Since people don't write the country names in the
- # same way, and we don't want to go back and edit them
- # either, we transform them here.
-
- name_country_set = {
- (name, country)
- for name, country in person_qs.values_list("name", "documentauthor__country")
- }
-
- aliases = get_aliased_countries(country for _, country in name_country_set)
-
- countries = { c.name: c for c in CountryName.objects.all() }
- eu_name = "EU"
- eu_countries = { c for c in countries.values() if c.in_eu }
-
- for name, country in name_country_set:
- country_name = aliases.get(country, country)
- bins[country_name].add(name)
-
- c = countries.get(country_name)
- if c and c.in_eu:
- bins[eu_name].add(name)
-
- prune_unknown_bin_with_known(bins)
- total_persons = count_bins(bins)
+ return HttpResponseRedirect(urlreverse("ietf.stats.views.stats_index"))
- series_data = []
- for country, names in sorted(bins.items(), key=lambda t: t[0].lower()):
- percentage = len(names) * 100.0 / (total_persons or 1)
- if country:
- series_data.append((country, len(names)))
- plain_names = sorted([ plain_name(n) for n in names ])
- table_data.append((country, percentage, len(plain_names), list(plain_names)[:names_limit]))
-
- series_data.sort(key=lambda t: t[1], reverse=True)
- series_data = series_data[:30]
-
- chart_data.append({ "data": series_data })
-
- for alias, country_name in aliases.items():
- alias_data.append((country_name, alias, countries.get(country_name)))
-
- alias_data.sort()
-
- elif stats_type == "author/continent":
- stats_title = "Number of {} authors per continent".format(doc_label)
-
- bins = defaultdict(set)
-
- person_qs = Person.objects.filter(person_filters)
-
- name_country_set = {
- (name, country)
- for name, country in person_qs.values_list("name", "documentauthor__country")
- }
-
- aliases = get_aliased_countries(country for _, country in name_country_set)
-
- country_to_continent = dict(CountryName.objects.values_list("name", "continent__name"))
-
- for name, country in name_country_set:
- country_name = aliases.get(country, country)
- continent_name = country_to_continent.get(country_name, "")
- bins[continent_name].add(name)
-
- prune_unknown_bin_with_known(bins)
- total_persons = count_bins(bins)
-
- series_data = []
- for continent, names in sorted(bins.items(), key=lambda t: t[0].lower()):
- percentage = len(names) * 100.0 / (total_persons or 1)
- if continent:
- series_data.append((continent, len(names)))
- plain_names = sorted([ plain_name(n) for n in names ])
- table_data.append((continent, percentage, len(plain_names), list(plain_names)[:names_limit]))
-
- series_data.sort(key=lambda t: t[1], reverse=True)
-
- chart_data.append({ "data": series_data })
-
- elif stats_type == "author/citations":
- stats_title = "Number of citations of {}s written by author".format(doc_label)
-
- bins = defaultdict(set)
-
- cite_relationships = list(DocRelationshipName.objects.filter(slug__in=['refnorm', 'refinfo', 'refunk', 'refold']))
- person_filters &= Q(documentauthor__document__relateddocument__relationship__in=cite_relationships)
-
- person_qs = Person.objects.filter(person_filters)
-
- for name, citations in person_qs.values_list("name").annotate(Count("documentauthor__document__relateddocument")):
- bins[citations or 0].add(name)
-
- total_persons = count_bins(bins)
-
- series_data = []
- for citations, names in sorted(bins.items(), key=lambda t: t[0], reverse=True):
- percentage = len(names) * 100.0 / (total_persons or 1)
- series_data.append((citations, percentage))
- plain_names = sorted([ plain_name(n) for n in names ])
- table_data.append((citations, percentage, len(plain_names), list(plain_names)[:names_limit]))
-
- chart_data.append({ "data": sorted(series_data, key=lambda t: t[0]) })
-
- elif stats_type == "author/hindex":
- stats_title = "h-index for {}s written by author".format(doc_label)
-
- bins = defaultdict(set)
-
- cite_relationships = list(DocRelationshipName.objects.filter(slug__in=['refnorm', 'refinfo', 'refunk', 'refold']))
- person_filters &= Q(documentauthor__document__relateddocument__relationship__in=cite_relationships)
-
- person_qs = Person.objects.filter(person_filters)
-
- values = person_qs.values_list("name", "documentauthor__document").annotate(Count("documentauthor__document__relateddocument"))
- for name, ts in itertools.groupby(values.order_by("name"), key=lambda t: t[0]):
- h_index = compute_hirsch_index([citations for _, document, citations in ts])
- bins[h_index or 0].add(name)
-
- total_persons = count_bins(bins)
-
- series_data = []
- for citations, names in sorted(bins.items(), key=lambda t: t[0], reverse=True):
- percentage = len(names) * 100.0 / (total_persons or 1)
- series_data.append((citations, percentage))
- plain_names = sorted([ plain_name(n) for n in names ])
- table_data.append((citations, percentage, len(plain_names), list(plain_names)[:names_limit]))
-
- chart_data.append({ "data": sorted(series_data, key=lambda t: t[0]) })
-
- elif any(stats_type == t[0] for t in possible_yearly_stats_types):
-
- # filter persons
- rfc_state = State.objects.get(type="rfc", slug="published")
- if document_type == "rfc":
- person_filters = Q(documentauthor__document__type="rfc")
- person_filters &= Q(documentauthor__document__states=rfc_state)
- elif document_type == "draft":
- person_filters = Q(documentauthor__document__type="draft")
- person_filters &= ~Q(documentauthor__document__states=rfc_state)
- else:
- person_filters = Q(documentauthor__document__type="rfc")
- person_filters |= Q(documentauthor__document__type="draft")
-
- doc_years = defaultdict(set)
-
- draftevent_qs = DocEvent.objects.filter(
- doc__type="draft",
- type = "new_revision",
- ).values_list("doc","time").order_by("doc")
-
- for doc_id, time in draftevent_qs.iterator():
- # RPC_TZINFO is used to match the timezone handling in Document.pub_date()
- doc_years[doc_id].add(time.astimezone(RPC_TZINFO).year)
-
- rfcevent_qs = (
- DocEvent.objects.filter(doc__type="rfc", type="published_rfc")
- .annotate(
- draft=Subquery(
- RelatedDocument.objects.filter(
- target=OuterRef("doc__pk"), relationship_id="became_rfc"
- ).values_list("source", flat=True)[:1]
- )
- )
- .values_list("doc", "time")
- .order_by("doc")
- )
-
- for doc_id, time in rfcevent_qs.iterator():
- doc_years[doc_id].add(time.astimezone(RPC_TZINFO).year)
-
- person_qs = Person.objects.filter(person_filters)
-
- if document_type == "rfc":
- doc_label = "RFC"
- elif document_type == "draft":
- doc_label = "draft"
- else:
- doc_label = "document"
-
- template_name = "yearly"
-
- years_from = from_time.year if from_time else 1
- years_to = timezone.now().year - 1
-
-
- if stats_type == "yearly/affiliation":
- stats_title = "Number of {} authors per affiliation over the years".format(doc_label)
-
- person_qs = Person.objects.filter(person_filters)
-
- name_affiliation_doc_set = {
- (name, affiliation, doc)
- for name, affiliation, doc in person_qs.values_list("name", "documentauthor__affiliation", "documentauthor__document")
- }
-
- aliases = get_aliased_affiliations(affiliation for _, affiliation, _ in name_affiliation_doc_set)
-
- bins = defaultdict(set)
- for name, affiliation, doc in name_affiliation_doc_set:
- a = aliases.get(affiliation, affiliation)
- if a:
- years = doc_years.get(doc)
- if years:
- for year in years:
- if years_from <= year <= years_to:
- bins[(year, a)].add(name)
-
- add_labeled_top_series_from_bins(chart_data, bins, limit=8)
-
- elif stats_type == "yearly/country":
- stats_title = "Number of {} authors per country over the years".format(doc_label)
-
- person_qs = Person.objects.filter(person_filters)
-
- name_country_doc_set = {
- (name, country, doc)
- for name, country, doc in person_qs.values_list("name", "documentauthor__country", "documentauthor__document")
- }
-
- aliases = get_aliased_countries(country for _, country, _ in name_country_doc_set)
-
- countries = { c.name: c for c in CountryName.objects.all() }
- eu_name = "EU"
- eu_countries = { c for c in countries.values() if c.in_eu }
-
- bins = defaultdict(set)
-
- for name, country, doc in name_country_doc_set:
- country_name = aliases.get(country, country)
- c = countries.get(country_name)
-
- years = doc_years.get(doc)
- if country_name and years:
- for year in years:
- if years_from <= year <= years_to:
- bins[(year, country_name)].add(name)
-
- if c and c.in_eu:
- bins[(year, eu_name)].add(name)
-
- add_labeled_top_series_from_bins(chart_data, bins, limit=8)
-
-
- elif stats_type == "yearly/continent":
- stats_title = "Number of {} authors per continent".format(doc_label)
-
- person_qs = Person.objects.filter(person_filters)
-
- name_country_doc_set = {
- (name, country, doc)
- for name, country, doc in person_qs.values_list("name", "documentauthor__country", "documentauthor__document")
- }
-
- aliases = get_aliased_countries(country for _, country, _ in name_country_doc_set)
-
- country_to_continent = dict(CountryName.objects.values_list("name", "continent__name"))
-
- bins = defaultdict(set)
-
- for name, country, doc in name_country_doc_set:
- country_name = aliases.get(country, country)
- continent_name = country_to_continent.get(country_name, "")
-
- if continent_name:
- years = doc_years.get(doc)
- if years:
- for year in years:
- if years_from <= year <= years_to:
- bins[(year, continent_name)].add(name)
-
- add_labeled_top_series_from_bins(chart_data, bins, limit=8)
-
- data = {
- "chart_data": mark_safe(json.dumps(chart_data)),
- "table_data": table_data,
- "stats_title": stats_title,
- "possible_document_stats_types": possible_document_stats_types,
- "possible_author_stats_types": possible_author_stats_types,
- "possible_yearly_stats_types": possible_yearly_stats_types,
- "stats_type": stats_type,
- "possible_document_types": possible_document_types,
- "document_type": document_type,
- "possible_time_choices": possible_time_choices,
- "time_choice": time_choice,
- "doc_label": doc_label,
- "bin_size": bin_size,
- "show_aliases_url": build_document_stats_url(get_overrides={ "showaliases": "1" }),
- "hide_aliases_url": build_document_stats_url(get_overrides={ "showaliases": None }),
- "alias_data": alias_data,
- "eu_countries": sorted(eu_countries or [], key=lambda c: c.name),
- "content_template": "stats/document_stats_{}.html".format(template_name),
- }
- # Logs are full of these, but nobody is using them
- # log("Cache miss for '%s'. Data size: %sk" % (cache_key, len(str(data))/1000))
- cache.set(cache_key, data, 24*60*60)
- return render(request, "stats/document_stats.html", data)
def known_countries_list(request, stats_type=None, acronym=None):
countries = CountryName.objects.prefetch_related("countryalias_set")
@@ -774,252 +137,7 @@ def known_countries_list(request, stats_type=None, acronym=None):
})
def meeting_stats(request, num=None, stats_type=None):
- meeting = None
- if num is not None:
- meeting = get_object_or_404(Meeting, number=num, type="ietf")
-
- def build_meeting_stats_url(number=None, stats_type_override=Ellipsis, get_overrides=None):
- if get_overrides is None:
- get_overrides = {}
- kwargs = {
- "stats_type": stats_type if stats_type_override is Ellipsis else stats_type_override,
- }
-
- if number is not None:
- kwargs["num"] = number
-
- return urlreverse(meeting_stats, kwargs={ k: v for k, v in kwargs.items() if v is not None }) + generate_query_string(request.GET, get_overrides)
-
- cache_key = ("stats:meeting_stats:%s:%s:%s" % (num, stats_type, slugify(request.META.get('QUERY_STRING',''))))[:228]
- data = cache.get(cache_key)
- if not data:
- names_limit = settings.STATS_NAMES_LIMIT
- # statistics types
- if meeting:
- possible_stats_types = add_url_to_choices([
- ("country", "Country"),
- ("continent", "Continent"),
- ], lambda slug: build_meeting_stats_url(number=meeting.number, stats_type_override=slug))
- else:
- possible_stats_types = add_url_to_choices([
- ("overview", "Overview"),
- ("country", "Country"),
- ("continent", "Continent"),
- ], lambda slug: build_meeting_stats_url(number=None, stats_type_override=slug))
-
- if not stats_type:
- return HttpResponseRedirect(build_meeting_stats_url(number=num, stats_type_override=possible_stats_types[0][0]))
-
- chart_data = []
- piechart_data = []
- table_data = []
- stats_title = ""
- template_name = stats_type
- bin_size = 1
- eu_countries = None
-
- def get_country_mapping(attendees):
- return {
- alias.alias: alias.country
- for alias in CountryAlias.objects.filter(alias__in=set(r.country_code for r in attendees)).select_related("country", "country__continent")
- if alias.alias.isupper()
- }
-
- def reg_name(r):
- return email.utils.formataddr(((r.first_name + " " + r.last_name).strip(), r.email))
-
- if meeting and any(stats_type == t[0] for t in possible_stats_types):
- attendees = MeetingRegistration.objects.filter(
- meeting=meeting,
- reg_type__in=['onsite', 'remote']
- ).filter(
- Q( attended=True) | Q( checkedin=True )
- )
-
- if stats_type == "country":
- stats_title = "Number of attendees for {} {} per country".format(meeting.type.name, meeting.number)
-
- bins = defaultdict(set)
-
- country_mapping = get_country_mapping(attendees)
-
- eu_name = "EU"
- eu_countries = set(CountryName.objects.filter(in_eu=True))
-
- for r in attendees:
- name = reg_name(r)
- c = country_mapping.get(r.country_code)
- bins[c.name if c else ""].add(name)
-
- if c and c.in_eu:
- bins[eu_name].add(name)
-
- prune_unknown_bin_with_known(bins)
- total_attendees = count_bins(bins)
-
- series_data = []
- for country, names in sorted(bins.items(), key=lambda t: t[0].lower()):
- percentage = len(names) * 100.0 / (total_attendees or 1)
- if country:
- series_data.append((country, len(names)))
- table_data.append((country, percentage, len(names), list(names)[:names_limit]))
-
- if country and country != eu_name:
- piechart_data.append({ "name": country, "y": percentage })
-
- series_data.sort(key=lambda t: t[1], reverse=True)
- series_data = series_data[:20]
-
- piechart_data.sort(key=lambda d: d["y"], reverse=True)
- pie_cut_off = 8
- piechart_data = piechart_data[:pie_cut_off] + [{ "name": "Other", "y": sum(d["y"] for d in piechart_data[pie_cut_off:])}]
-
- chart_data.append({ "data": series_data })
-
- elif stats_type == "continent":
- stats_title = "Number of attendees for {} {} per continent".format(meeting.type.name, meeting.number)
-
- bins = defaultdict(set)
-
- country_mapping = get_country_mapping(attendees)
-
- for r in attendees:
- name = reg_name(r)
- c = country_mapping.get(r.country_code)
- bins[c.continent.name if c else ""].add(name)
-
- prune_unknown_bin_with_known(bins)
- total_attendees = count_bins(bins)
-
- series_data = []
- for continent, names in sorted(bins.items(), key=lambda t: t[0].lower()):
- percentage = len(names) * 100.0 / (total_attendees or 1)
- if continent:
- series_data.append((continent, len(names)))
- table_data.append((continent, percentage, len(names), list(names)[:names_limit]))
-
- series_data.sort(key=lambda t: t[1], reverse=True)
-
- chart_data.append({ "data": series_data })
-
-
- elif not meeting and any(stats_type == t[0] for t in possible_stats_types):
- template_name = "overview"
-
- attendees = MeetingRegistration.objects.filter(
- meeting__type="ietf",
- attended=True,
- reg_type__in=['onsite', 'remote']
- ).filter(
- Q( attended=True) | Q( checkedin=True )
- ).select_related('meeting')
-
- if stats_type == "overview":
- stats_title = "Number of attendees per meeting"
-
- continents = {}
-
- meetings = Meeting.objects.filter(type='ietf', date__lte=date_today()).order_by('number')
- for m in meetings:
- country = CountryName.objects.get(slug=m.country)
- continents[country.continent.name] = country.continent.name
-
- bins = defaultdict(set)
-
- for r in attendees:
- meeting_number = int(r.meeting.number)
- name = reg_name(r)
- bins[meeting_number].add(name)
-
- series_data = {}
- for continent in list(continents.keys()):
- series_data[continent] = []
-
- for m in meetings:
- country = CountryName.objects.get(slug=m.country)
- url = build_meeting_stats_url(number=m.number,
- stats_type_override="country")
- for continent in list(continents.keys()):
- if continent == country.continent.name:
- d = {
- "name": "IETF {} - {}, {}".format(int(m.number), m.city, country),
- "x": int(m.number),
- "y": m.attendees,
- "date": m.date.strftime("%d %b %Y"),
- "url": url,
- }
- else:
- d = {
- "x": int(m.number),
- "y": 0,
- }
- series_data[continent].append(d)
- table_data.append((m, url,
- m.attendees, country))
-
- for continent in list(continents.keys()):
-# series_data[continent].sort(key=lambda t: t[0]["x"])
- chart_data.append( { "name": continent,
- "data": series_data[continent] })
-
- table_data.sort(key=lambda t: int(t[0].number), reverse=True)
-
- elif stats_type == "country":
- stats_title = "Number of attendees per country across meetings"
-
- country_mapping = get_country_mapping(attendees)
-
- eu_name = "EU"
- eu_countries = set(CountryName.objects.filter(in_eu=True))
-
- bins = defaultdict(set)
-
- for r in attendees:
- meeting_number = int(r.meeting.number)
- name = reg_name(r)
- c = country_mapping.get(r.country_code)
-
- if c:
- bins[(meeting_number, c.name)].add(name)
- if c.in_eu:
- bins[(meeting_number, eu_name)].add(name)
-
- add_labeled_top_series_from_bins(chart_data, bins, limit=8)
-
-
- elif stats_type == "continent":
- stats_title = "Number of attendees per continent across meetings"
-
- country_mapping = get_country_mapping(attendees)
-
- bins = defaultdict(set)
-
- for r in attendees:
- meeting_number = int(r.meeting.number)
- name = reg_name(r)
- c = country_mapping.get(r.country_code)
-
- if c:
- bins[(meeting_number, c.continent.name)].add(name)
-
- add_labeled_top_series_from_bins(chart_data, bins, limit=8)
- data = {
- "chart_data": mark_safe(json.dumps(chart_data)),
- "piechart_data": mark_safe(json.dumps(piechart_data)),
- "table_data": table_data,
- "stats_title": stats_title,
- "possible_stats_types": possible_stats_types,
- "stats_type": stats_type,
- "bin_size": bin_size,
- "meeting": meeting,
- "eu_countries": sorted(eu_countries or [], key=lambda c: c.name),
- "content_template": "stats/meeting_stats_{}.html".format(template_name),
- }
- # Logs are full of these, but nobody is using them...
- # log("Cache miss for '%s'. Data size: %sk" % (cache_key, len(str(data))/1000))
- cache.set(cache_key, data, 24*60*60)
- #
- return render(request, "stats/meeting_stats.html", data)
+ return HttpResponseRedirect(urlreverse("ietf.stats.views.stats_index"))
@login_required
diff --git a/ietf/templates/stats/document_stats.html b/ietf/templates/stats/document_stats.html
deleted file mode 100644
index 4e66bed37e..0000000000
--- a/ietf/templates/stats/document_stats.html
+++ /dev/null
@@ -1,86 +0,0 @@
-{% extends "base.html" %}
-{% load origin %}
-{% load ietf_filters static %}
-{% block title %}{{ stats_title }}{% endblock %}
-{% block pagehead %}
-
-
-{% endblock %}
-{% block content %}
- {% origin %}
-
Internet-Draft and RFC statistics
-
-
-
- {% for slug, label, url in possible_document_stats_types %}
- {{ label }}
- {% endfor %}
-
-
-
-
-
- {% for slug, label, url in possible_author_stats_types %}
- {{ label }}
- {% endfor %}
-
-
-
-
-
- {% for slug, label, url in possible_yearly_stats_types %}
- {{ label }}
- {% endfor %}
-
-
-
Options
-
-
-
- {% for slug, label, url in possible_document_types %}
- {{ label }}
- {% endfor %}
-
-
-
-
-
- {% for slug, label, url in possible_time_choices %}
- {{ label }}
- {% endfor %}
-
-
-
- Please Note: The author information in the datatracker about RFCs
- with numbers lower than about 1300 and Internet-Drafts from before 2001 is
- unreliable and in many cases absent. For this reason, statistics on these
- pages does not show correct author stats for corpus selections that involve such
- documents.
-
- {% include content_template %}
-{% endblock %}
-{% block js %}
-
-
-
-{% endblock %}
\ No newline at end of file
diff --git a/ietf/templates/stats/document_stats_author_affiliation.html b/ietf/templates/stats/document_stats_author_affiliation.html
deleted file mode 100644
index 9c798cb924..0000000000
--- a/ietf/templates/stats/document_stats_author_affiliation.html
+++ /dev/null
@@ -1,113 +0,0 @@
-{% load origin %}
-{% origin %}
-
-
-
Data
-
-
-
-
Affiliation
-
Percentage of authors
-
Authors
-
-
- {% if table_data %}
-
- {% for affiliation, percentage, count, names in table_data %}
-
-
{{ affiliation|default:"(unknown)" }}
-
{{ percentage|floatformat:2 }}%
-
{% include "stats/includes/number_with_details_cell.html" %}
-
- {% endfor %}
-
- {% endif %}
-
-
- The statistics are based entirely on the author affiliation
- provided with each Internet-Draft. Since this may vary across documents, an
- author may be counted with more than one affiliation, making the
- total sum more than 100%.
-
-
Affiliation Aliases
-
- In generating the above statistics, some heuristics have been
- applied to determine the affiliations of each author.
-
-
- {% if table_data %}
-
- {% for continent, percentage, count, names in table_data %}
-
-
{{ continent|default:"(unknown)" }}
-
{{ percentage|floatformat:2 }}%
-
{% include "stats/includes/number_with_details_cell.html" %}
-
- {% endfor %}
-
- {% endif %}
-
-
- The statistics are based entirely on the author addresses provided
- with each Internet-Draft. Since this varies across documents, a traveling
- author may be counted in more than country, making the total sum
- more than 100%.
-
\ No newline at end of file
diff --git a/ietf/templates/stats/document_stats_author_country.html b/ietf/templates/stats/document_stats_author_country.html
deleted file mode 100644
index 72299cc397..0000000000
--- a/ietf/templates/stats/document_stats_author_country.html
+++ /dev/null
@@ -1,136 +0,0 @@
-{% load origin %}
-{% origin %}
-
-
-
Data
-
-
-
-
Country
-
Percentage of authors
-
Authors
-
-
- {% if table_data %}
-
- {% for country, percentage, count, names in table_data %}
-
-
{{ country|default:"(unknown)" }}
-
{{ percentage|floatformat:2 }}%
-
{% include "stats/includes/number_with_details_cell.html" %}
-
- {% endfor %}
-
- {% endif %}
-
-
- The statistics are based entirely on the author addresses provided
- with each Internet-Draft. Since this varies across documents, a traveling
- author may be counted in more than country, making the total sum
- more than 100%.
-
-
- In case no country information is found for an author in the time
- period, the author is counted as (unknown).
-
-
- EU (European Union) is not a country, but has been added for reference, as the sum of
- all current EU member countries:
- {% for c in eu_countries %}
- {{ c.name }}{% if not forloop.last %},{% endif %}
- {% endfor %}
- .
-
-
Country Aliases
-
- In generating the above statistics, some heuristics have been
- applied to figure out which country each author is from.
-
- Note: since you're an admin, some extra links are visible. You
- can either correct a document author entry directly in case the
- information is obviously missing or add an alias if an unknown
- country name
- is being used.
-
- {% endif %}
- {% if alias_data %}
-
-
-
Country
-
Alias
-
- {% if alias_data %}
-
- {% for name, alias, country in alias_data %}
-
-
- {% if country and request.user.is_staff %}
- {{ name|default:"(unknown)" }}
- {% else %}
- {{ name|default:"(unknown)" }}
- {% endif %}
-
-
- {{ alias }}
- {% if request.user.is_staff and name != "EU" %}
-
- Matching authors
-
- {% endif %}
-
-
- {% if table_data %}
-
- {% for h_index, percentage, count, names in table_data %}
-
-
{{ h_index }}
-
{{ percentage|floatformat:2 }}%
-
{% include "stats/includes/number_with_details_cell.html" with content_limit=25 %}
-
- {% endfor %}
-
- {% endif %}
-
-
- Hirsch index or h-index is a
-
- measure of the
- productivity and impact of the publications of an author
- .
- An
- author with an h-index of 5 has had 5 publications each cited at
- least 5 times - to increase the index to 6, the 5 publications plus
- 1 more would have to have been cited at least 6 times, each. Thus a
- high h-index requires many highly-cited publications.
-
-
- Note that the h-index calculations do not exclude self-references.
-
-
-
- {% for country, percentage, count, names in table_data %}
-
-
{{ country|default:"(unknown)" }}
-
{{ percentage|floatformat:2 }}%
-
{% include "stats/includes/number_with_details_cell.html" %}
-
- {% endfor %}
-
-
-
- EU (European Union) is not a country, but has been added for reference, as the sum of
- all current EU member countries:
- {% for c in eu_countries %}
- {{ c.name }}{% if not forloop.last %},{% endif %}
- {% endfor %}
- .
-
\ No newline at end of file
diff --git a/ietf/templates/stats/meeting_stats_overview.html b/ietf/templates/stats/meeting_stats_overview.html
deleted file mode 100644
index 1136e458b8..0000000000
--- a/ietf/templates/stats/meeting_stats_overview.html
+++ /dev/null
@@ -1,160 +0,0 @@
-{% load origin %}
-{% origin %}
-
-
-{% if table_data %}
-
Data
-
-
-
-
Meeting
-
Date
-
City
-
Country
-
Continent
-
Attendees
-
-
-
- {% for meeting, url, count, country in table_data %}
-
{% include "stats/includes/number_with_details_cell.html" %}
- {% else %}
-
{{ meeting.number }}
-
{{ meeting.date }}
-
{{ meeting.city }}
-
{{ country.name }}
-
{{ country.continent }}
-
{% include "stats/includes/number_with_details_cell.html" %}
- {% endif %}
-
- {% endfor %}
-
-
-{% endif %}
From 2ec7a71edfad24176034a1aa6a46ce66ddfb396b Mon Sep 17 00:00:00 2001
From: Robert Sparks
Date: Mon, 9 Dec 2024 10:34:53 -0600
Subject: [PATCH 006/460] chore: remove unused setting from various
settings_local templates (#8311)
---
dev/deploy-to-container/settings_local.py | 1 -
dev/diff/settings_local.py | 1 -
dev/tests/settings_local.py | 1 -
docker/configs/settings_local.py | 1 -
4 files changed, 4 deletions(-)
diff --git a/dev/deploy-to-container/settings_local.py b/dev/deploy-to-container/settings_local.py
index ae698e20b6..07bf0a7511 100644
--- a/dev/deploy-to-container/settings_local.py
+++ b/dev/deploy-to-container/settings_local.py
@@ -40,7 +40,6 @@
SUBMIT_YANG_CATALOG_MODEL_DIR = '/assets/ietf-ftp/yang/catalogmod/'
SUBMIT_YANG_DRAFT_MODEL_DIR = '/assets/ietf-ftp/yang/draftmod/'
-SUBMIT_YANG_INVAL_MODEL_DIR = '/assets/ietf-ftp/yang/invalmod/'
SUBMIT_YANG_IANA_MODEL_DIR = '/assets/ietf-ftp/yang/ianamod/'
SUBMIT_YANG_RFC_MODEL_DIR = '/assets/ietf-ftp/yang/rfcmod/'
diff --git a/dev/diff/settings_local.py b/dev/diff/settings_local.py
index 774c7797cf..6bcee46b61 100644
--- a/dev/diff/settings_local.py
+++ b/dev/diff/settings_local.py
@@ -37,7 +37,6 @@
SUBMIT_YANG_CATALOG_MODEL_DIR = '/assets/ietf-ftp/yang/catalogmod/'
SUBMIT_YANG_DRAFT_MODEL_DIR = '/assets/ietf-ftp/yang/draftmod/'
-SUBMIT_YANG_INVAL_MODEL_DIR = '/assets/ietf-ftp/yang/invalmod/'
SUBMIT_YANG_IANA_MODEL_DIR = '/assets/ietf-ftp/yang/ianamod/'
SUBMIT_YANG_RFC_MODEL_DIR = '/assets/ietf-ftp/yang/rfcmod/'
diff --git a/dev/tests/settings_local.py b/dev/tests/settings_local.py
index 20941359d4..afadb3760b 100644
--- a/dev/tests/settings_local.py
+++ b/dev/tests/settings_local.py
@@ -36,7 +36,6 @@
SUBMIT_YANG_CATALOG_MODEL_DIR = '/assets/ietf-ftp/yang/catalogmod/'
SUBMIT_YANG_DRAFT_MODEL_DIR = '/assets/ietf-ftp/yang/draftmod/'
-SUBMIT_YANG_INVAL_MODEL_DIR = '/assets/ietf-ftp/yang/invalmod/'
SUBMIT_YANG_IANA_MODEL_DIR = '/assets/ietf-ftp/yang/ianamod/'
SUBMIT_YANG_RFC_MODEL_DIR = '/assets/ietf-ftp/yang/rfcmod/'
diff --git a/docker/configs/settings_local.py b/docker/configs/settings_local.py
index 5d9859c19b..a1c19c80cf 100644
--- a/docker/configs/settings_local.py
+++ b/docker/configs/settings_local.py
@@ -26,7 +26,6 @@
SUBMIT_YANG_CATALOG_MODEL_DIR = '/assets/ietf-ftp/yang/catalogmod/'
SUBMIT_YANG_DRAFT_MODEL_DIR = '/assets/ietf-ftp/yang/draftmod/'
-SUBMIT_YANG_INVAL_MODEL_DIR = '/assets/ietf-ftp/yang/invalmod/'
SUBMIT_YANG_IANA_MODEL_DIR = '/assets/ietf-ftp/yang/ianamod/'
SUBMIT_YANG_RFC_MODEL_DIR = '/assets/ietf-ftp/yang/rfcmod/'
From 6f1c308ab3142a8f52df6d417767cb583d38a957 Mon Sep 17 00:00:00 2001
From: Jennifer Richards
Date: Mon, 9 Dec 2024 14:56:09 -0400
Subject: [PATCH 007/460] chore: drop unused cf-connecting-ipv6 header (#8319)
Only used in certain configurations of Pseudo IPv4.
---
ietf/utils/jsonlogger.py | 1 -
k8s/nginx-logging.conf | 1 -
2 files changed, 2 deletions(-)
diff --git a/ietf/utils/jsonlogger.py b/ietf/utils/jsonlogger.py
index 6502cab0cb..1fc453ad9e 100644
--- a/ietf/utils/jsonlogger.py
+++ b/ietf/utils/jsonlogger.py
@@ -29,7 +29,6 @@ def add_fields(self, log_record, record, message_dict):
log_record.setdefault("x_forwarded_for", record.args["{x-forwarded-for}i"])
log_record.setdefault("x_forwarded_proto", record.args["{x-forwarded-proto}i"])
log_record.setdefault("cf_connecting_ip", record.args["{cf-connecting-ip}i"])
- log_record.setdefault("cf_connecting_ipv6", record.args["{cf-connecting-ipv6}i"])
log_record.setdefault("cf_ray", record.args["{cf-ray}i"])
log_record.setdefault("asn", record.args["{x-ip-src-asnum}i"])
log_record.setdefault("is_authenticated", record.args["{x-datatracker-is-authenticated}o"])
diff --git a/k8s/nginx-logging.conf b/k8s/nginx-logging.conf
index 0bc7deca81..673d7a29ab 100644
--- a/k8s/nginx-logging.conf
+++ b/k8s/nginx-logging.conf
@@ -17,7 +17,6 @@ log_format ietfjson escape=json
'"x_forwarded_for":"$${keepempty}http_x_forwarded_for",'
'"x_forwarded_proto":"$${keepempty}http_x_forwarded_proto",'
'"cf_connecting_ip":"$${keepempty}http_cf_connecting_ip",'
- '"cf_connecting_ipv6":"$${keepempty}http_cf_connecting_ipv6",'
'"cf_ray":"$${keepempty}http_cf_ray",'
'"asn":"$${keepempty}http_x_ip_src_asnum"'
'}';
From 8e325829a36515344800ef64b850f1a5af2f5160 Mon Sep 17 00:00:00 2001
From: Robert Sparks
Date: Tue, 10 Dec 2024 09:57:08 -0600
Subject: [PATCH 008/460] chore: pin django-oidc-provider until we can adapt to
changes in 0.8.3 (#8320)
---
requirements.txt | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/requirements.txt b/requirements.txt
index f974113d8f..66397091ad 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -17,7 +17,7 @@ django-csp>=3.7
django-cors-headers>=3.11.0
django-debug-toolbar>=3.2.4
django-markup>=1.5 # Limited use - need to reconcile against direct use of markdown
-django-oidc-provider>=0.8.1 # 0.8 dropped Django 2 support
+django-oidc-provider==0.8.2 # 0.8.3 changes logout flow and claim return
django-referrer-policy>=1.0
django-simple-history>=3.0.0
django-stubs>=4.2.7,<5 # The django-stubs version used determines the the mypy version indicated below
From 6b77807c05145230c337809322ffa96606c140fd Mon Sep 17 00:00:00 2001
From: rjsparks
Date: Tue, 10 Dec 2024 16:08:48 +0000
Subject: [PATCH 009/460] ci: update base image target version to 20241210T1557
---
dev/build/Dockerfile | 2 +-
dev/build/TARGET_BASE | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/dev/build/Dockerfile b/dev/build/Dockerfile
index 7af27e7d13..bab7c1fab3 100644
--- a/dev/build/Dockerfile
+++ b/dev/build/Dockerfile
@@ -1,4 +1,4 @@
-FROM ghcr.io/ietf-tools/datatracker-app-base:20241127T2054
+FROM ghcr.io/ietf-tools/datatracker-app-base:20241210T1557
LABEL maintainer="IETF Tools Team "
ENV DEBIAN_FRONTEND=noninteractive
diff --git a/dev/build/TARGET_BASE b/dev/build/TARGET_BASE
index e4b05ed700..bca669bb51 100644
--- a/dev/build/TARGET_BASE
+++ b/dev/build/TARGET_BASE
@@ -1 +1 @@
-20241127T2054
+20241210T1557
From 9b372a31b4c70a031bb20a1cb2ee8189551d01ce Mon Sep 17 00:00:00 2001
From: Jennifer Richards
Date: Thu, 12 Dec 2024 13:40:49 -0400
Subject: [PATCH 010/460] chore: update import for python-json-logger (#8330)
The "jsonlogger" module became "json" in 3.1.0
---
ietf/utils/jsonlogger.py | 4 ++--
requirements.txt | 2 +-
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/ietf/utils/jsonlogger.py b/ietf/utils/jsonlogger.py
index 1fc453ad9e..589132977d 100644
--- a/ietf/utils/jsonlogger.py
+++ b/ietf/utils/jsonlogger.py
@@ -1,9 +1,9 @@
# Copyright The IETF Trust 2024, All Rights Reserved
-from pythonjsonlogger import jsonlogger
+from pythonjsonlogger.json import JsonFormatter
import time
-class DatatrackerJsonFormatter(jsonlogger.JsonFormatter):
+class DatatrackerJsonFormatter(JsonFormatter):
converter = time.gmtime # use UTC
default_msec_format = "%s.%03d" # '.' instead of ','
diff --git a/requirements.txt b/requirements.txt
index 66397091ad..ec5fc60b5f 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -59,7 +59,7 @@ pyopenssl>=22.0.0 # Used by urllib3.contrib, which is used by PyQuery but not
pyquery>=1.4.3
python-dateutil>=2.8.2
types-python-dateutil>=2.8.2
-python-json-logger>=2.0.7
+python-json-logger>=3.1.0
python-magic==0.4.18 # Versions beyond the yanked .19 and .20 introduce form failures
pymemcache>=4.0.0 # for django.core.cache.backends.memcached.PyMemcacheCache
python-mimeparse>=1.6 # from TastyPie
From a2f27d3d516e6914d947f013198bba2f19ece402 Mon Sep 17 00:00:00 2001
From: rjsparks
Date: Thu, 12 Dec 2024 17:53:09 +0000
Subject: [PATCH 011/460] ci: update base image target version to 20241212T1741
---
dev/build/Dockerfile | 2 +-
dev/build/TARGET_BASE | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/dev/build/Dockerfile b/dev/build/Dockerfile
index bab7c1fab3..a923bf693f 100644
--- a/dev/build/Dockerfile
+++ b/dev/build/Dockerfile
@@ -1,4 +1,4 @@
-FROM ghcr.io/ietf-tools/datatracker-app-base:20241210T1557
+FROM ghcr.io/ietf-tools/datatracker-app-base:20241212T1741
LABEL maintainer="IETF Tools Team "
ENV DEBIAN_FRONTEND=noninteractive
diff --git a/dev/build/TARGET_BASE b/dev/build/TARGET_BASE
index bca669bb51..b5d33714f2 100644
--- a/dev/build/TARGET_BASE
+++ b/dev/build/TARGET_BASE
@@ -1 +1 @@
-20241210T1557
+20241212T1741
From 70ab7112165ffa1e686649311bb14b51721d6f77 Mon Sep 17 00:00:00 2001
From: Jennifer Richards
Date: Thu, 12 Dec 2024 18:48:44 -0400
Subject: [PATCH 012/460] refactor: separate concerns / rename notify_events
(#8328)
* refactor: separate signal receiver from work
* test: split test to match code structure
* test: fix test
* refactor: reorg signals in community app
---
ietf/community/apps.py | 12 ++++
ietf/community/models.py | 35 +---------
ietf/community/signals.py | 44 ++++++++++++
ietf/community/tests.py | 67 ++++++++++---------
ietf/utils/management/commands/loadrelated.py | 4 +-
5 files changed, 96 insertions(+), 66 deletions(-)
create mode 100644 ietf/community/apps.py
create mode 100644 ietf/community/signals.py
diff --git a/ietf/community/apps.py b/ietf/community/apps.py
new file mode 100644
index 0000000000..ab0a6d6054
--- /dev/null
+++ b/ietf/community/apps.py
@@ -0,0 +1,12 @@
+# Copyright The IETF Trust 2024, All Rights Reserved
+
+from django.apps import AppConfig
+
+
+class CommunityConfig(AppConfig):
+ name = "ietf.community"
+
+ def ready(self):
+ """Initialize the app after the registry is populated"""
+ # implicitly connects @receiver-decorated signals
+ from . import signals # pyflakes: ignore
diff --git a/ietf/community/models.py b/ietf/community/models.py
index 0407730107..6945918f9a 100644
--- a/ietf/community/models.py
+++ b/ietf/community/models.py
@@ -1,19 +1,14 @@
# Copyright The IETF Trust 2012-2020, All Rights Reserved
# -*- coding: utf-8 -*-
-
-from django.conf import settings
-from django.db import models, transaction
-from django.db.models import signals
+from django.db import models
from django.urls import reverse as urlreverse
-from ietf.doc.models import Document, DocEvent, State
+from ietf.doc.models import Document, State
from ietf.group.models import Group
from ietf.person.models import Person, Email
from ietf.utils.models import ForeignKey
-from .tasks import notify_event_to_subscribers_task
-
class CommunityList(models.Model):
person = ForeignKey(Person, blank=True, null=True)
@@ -98,29 +93,3 @@ class EmailSubscription(models.Model):
def __str__(self):
return "%s to %s (%s changes)" % (self.email, self.community_list, self.notify_on)
-
-
-def notify_events(sender, instance, **kwargs):
- if not isinstance(instance, DocEvent):
- return
-
- if not kwargs.get("created", False):
- return # only notify on creation
-
- if instance.doc.type_id != 'draft':
- return
-
- if getattr(instance, "skip_community_list_notification", False):
- return
-
- # kludge alert: queuing a celery task in response to a signal can cause unexpected attempts to
- # start a Celery task during tests. To prevent this, don't queue a celery task if we're running
- # tests.
- if settings.SERVER_MODE != "test":
- # Wrap in on_commit in case a transaction is open
- transaction.on_commit(
- lambda: notify_event_to_subscribers_task.delay(event_id=instance.pk)
- )
-
-
-signals.post_save.connect(notify_events)
diff --git a/ietf/community/signals.py b/ietf/community/signals.py
new file mode 100644
index 0000000000..20ee761129
--- /dev/null
+++ b/ietf/community/signals.py
@@ -0,0 +1,44 @@
+# Copyright The IETF Trust 2024, All Rights Reserved
+
+from django.conf import settings
+from django.db import transaction
+from django.db.models.signals import post_save
+from django.dispatch import receiver
+
+from ietf.doc.models import DocEvent
+from .tasks import notify_event_to_subscribers_task
+
+
+def notify_of_event(event: DocEvent):
+ """Send subscriber notification emails for a 'draft'-related DocEvent
+
+ If the event is attached to a draft of type 'doc', queues a task to send notification emails to
+ community list subscribers. No emails will be sent when SERVER_MODE is 'test'.
+ """
+ if event.doc.type_id != "draft":
+ return
+
+ if getattr(event, "skip_community_list_notification", False):
+ return
+
+ # kludge alert: queuing a celery task in response to a signal can cause unexpected attempts to
+ # start a Celery task during tests. To prevent this, don't queue a celery task if we're running
+ # tests.
+ if settings.SERVER_MODE != "test":
+ # Wrap in on_commit in case a transaction is open
+ transaction.on_commit(
+ lambda: notify_event_to_subscribers_task.delay(event_id=event.pk)
+ )
+
+
+# dispatch_uid ensures only a single signal receiver binding is made
+@receiver(post_save, dispatch_uid="notify_of_events_receiver_uid")
+def notify_of_events_receiver(sender, instance, **kwargs):
+ """Call notify_of_event after saving a new DocEvent"""
+ if not isinstance(instance, DocEvent):
+ return
+
+ if not kwargs.get("created", False):
+ return # only notify on creation
+
+ notify_of_event(instance)
diff --git a/ietf/community/tests.py b/ietf/community/tests.py
index 743242f11b..9bd7789958 100644
--- a/ietf/community/tests.py
+++ b/ietf/community/tests.py
@@ -1,7 +1,6 @@
# Copyright The IETF Trust 2016-2023, All Rights Reserved
# -*- coding: utf-8 -*-
-
import mock
from pyquery import PyQuery
@@ -11,6 +10,7 @@
import debug # pyflakes:ignore
from ietf.community.models import CommunityList, SearchRule, EmailSubscription
+from ietf.community.signals import notify_of_event
from ietf.community.utils import docs_matching_community_list_rule, community_list_rules_matching_doc
from ietf.community.utils import reset_name_contains_index_for_rule, notify_event_to_subscribers
from ietf.community.tasks import notify_event_to_subscribers_task
@@ -431,53 +431,58 @@ def test_subscription_for_group(self):
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
- # Mock out the on_commit call so we can tell whether the task was actually queued
- @mock.patch("ietf.submit.views.transaction.on_commit", side_effect=lambda x: x())
- @mock.patch("ietf.community.models.notify_event_to_subscribers_task")
- def test_notification_signal_receiver(self, mock_notify_task, mock_on_commit):
- """Saving a DocEvent should notify subscribers
+ @mock.patch("ietf.community.signals.notify_of_event")
+ def test_notification_signal_receiver(self, mock_notify_of_event):
+ """Saving a newly created DocEvent should notify subscribers
- This implicitly tests that notify_events is hooked up to the post_save signal.
+ This implicitly tests that notify_of_event_receiver is hooked up to the post_save signal.
"""
# Arbitrary model that's not a DocEvent
- person = PersonFactory()
- mock_notify_task.reset_mock() # clear any calls that resulted from the factories
- # be careful overriding SERVER_MODE - we do it here because the method
- # under test does not make this call when in "test" mode
- with override_settings(SERVER_MODE="not-test"):
- person.save()
- self.assertFalse(mock_notify_task.delay.called)
-
+ person = PersonFactory.build() # builds but does not save...
+ mock_notify_of_event.reset_mock() # clear any calls that resulted from the factories
+ person.save()
+ self.assertFalse(mock_notify_of_event.called)
+
# build a DocEvent that is not yet persisted
doc = DocumentFactory()
- d = DocEventFactory.build(by=person, doc=doc)
- # mock_notify_task.reset_mock() # clear any calls that resulted from the factories
+ event = DocEventFactory.build(by=person, doc=doc) # builds but does not save...
+ mock_notify_of_event.reset_mock() # clear any calls that resulted from the factories
+ event.save()
+ self.assertEqual(mock_notify_of_event.call_count, 1, "notify_task should be run on creation of DocEvent")
+ self.assertEqual(mock_notify_of_event.call_args, mock.call(event))
+
+ # save the existing DocEvent and see that no notification is sent
+ mock_notify_of_event.reset_mock()
+ event.save()
+ self.assertFalse(mock_notify_of_event.called, "notify_task should not be run save of on existing DocEvent")
+
+ # Mock out the on_commit call so we can tell whether the task was actually queued
+ @mock.patch("ietf.submit.views.transaction.on_commit", side_effect=lambda x: x())
+ @mock.patch("ietf.community.signals.notify_event_to_subscribers_task")
+ def test_notify_of_event(self, mock_notify_task, mock_on_commit):
+ """The community notification task should be called as intended"""
+ person = PersonFactory() # builds but does not save...
+ doc = DocumentFactory()
+ event = DocEventFactory(by=person, doc=doc)
# be careful overriding SERVER_MODE - we do it here because the method
# under test does not make this call when in "test" mode
with override_settings(SERVER_MODE="not-test"):
- d.save()
- self.assertEqual(mock_notify_task.delay.call_count, 1, "notify_task should be run on creation of DocEvent")
- self.assertEqual(mock_notify_task.delay.call_args, mock.call(event_id = d.pk))
-
- mock_notify_task.reset_mock()
- with override_settings(SERVER_MODE="not-test"):
- d.save()
- self.assertFalse(mock_notify_task.delay.called, "notify_task should not be run save of on existing DocEvent")
-
+ notify_of_event(event)
+ self.assertTrue(mock_notify_task.delay.called, "notify_task should run for a DocEvent on a draft")
mock_notify_task.reset_mock()
- d = DocEventFactory.build(by=person, doc=doc)
- d.skip_community_list_notification = True
+
+ event.skip_community_list_notification = True
# be careful overriding SERVER_MODE - we do it here because the method
# under test does not make this call when in "test" mode
with override_settings(SERVER_MODE="not-test"):
- d.save()
+ notify_of_event(event)
self.assertFalse(mock_notify_task.delay.called, "notify_task should not run when skip_community_list_notification is set")
- d = DocEventFactory.build(by=person, doc=DocumentFactory(type_id="rfc"))
+ event = DocEventFactory.build(by=person, doc=DocumentFactory(type_id="rfc"))
# be careful overriding SERVER_MODE - we do it here because the method
# under test does not make this call when in "test" mode
with override_settings(SERVER_MODE="not-test"):
- d.save()
+ notify_of_event(event)
self.assertFalse(mock_notify_task.delay.called, "notify_task should not run on a document with type 'rfc'")
@mock.patch("ietf.utils.mail.send_mail_text")
diff --git a/ietf/utils/management/commands/loadrelated.py b/ietf/utils/management/commands/loadrelated.py
index da9d00d5dc..d8ae19dc77 100644
--- a/ietf/utils/management/commands/loadrelated.py
+++ b/ietf/utils/management/commands/loadrelated.py
@@ -23,7 +23,7 @@
import debug # pyflakes:ignore
-from ietf.community.models import notify_events
+from ietf.community.signals import notify_of_events_receiver
class Command(loaddata.Command):
help = ("""
@@ -62,7 +62,7 @@ def handle(self, *args, **options):
#
self.serialization_formats = serializers.get_public_serializer_formats()
#
- post_save.disconnect(notify_events)
+ post_save.disconnect(notify_of_events_receiver())
#
connection = connections[self.using]
self.fixture_count = 0
From c747e972017142c81af5162207155ca31df7aeba Mon Sep 17 00:00:00 2001
From: Robert Sparks
Date: Fri, 13 Dec 2024 11:48:19 -0600
Subject: [PATCH 013/460] fix: keep draft-iesg state on expiration. Update
action holders. (#8321)
* fix: keep draft-iesg state on expiration. Update action holders
* feat: task to repair docs in dead because expiry
* fix: restore all to-date flows through update_action_holders
* fix: Fetch the System user following more regular conventions
* fix: better signal test
---
ietf/doc/expire.py | 62 +++++++++++++++++++++--------
ietf/doc/tasks.py | 6 +++
ietf/doc/tests_draft.py | 86 ++++++++++++++++++++++++++++++++++++++---
ietf/doc/tests_tasks.py | 7 ++++
ietf/doc/utils.py | 82 ++++++++++++++++++++++-----------------
ietf/doc/views_draft.py | 3 +-
6 files changed, 186 insertions(+), 60 deletions(-)
diff --git a/ietf/doc/expire.py b/ietf/doc/expire.py
index 681ca963a3..0581c9a03f 100644
--- a/ietf/doc/expire.py
+++ b/ietf/doc/expire.py
@@ -3,6 +3,8 @@
# expiry of Internet-Drafts
+import debug # pyflakes:ignore
+
from django.conf import settings
from django.utils import timezone
@@ -11,12 +13,12 @@
from typing import List, Optional # pyflakes:ignore
+from ietf.doc.utils import new_state_change_event, update_action_holders
from ietf.utils import log
from ietf.utils.mail import send_mail
-from ietf.doc.models import Document, DocEvent, State, IESG_SUBSTATE_TAGS
+from ietf.doc.models import Document, DocEvent, State, StateDocEvent
from ietf.person.models import Person
from ietf.meeting.models import Meeting
-from ietf.doc.utils import add_state_change_event, update_action_holders
from ietf.mailtrigger.utils import gather_address_lists
from ietf.utils.timezone import date_today, datetime_today, DEADLINE_TZINFO
@@ -161,24 +163,11 @@ def expire_draft(doc):
events = []
- # change the state
- if doc.latest_event(type='started_iesg_process'):
- new_state = State.objects.get(used=True, type="draft-iesg", slug="dead")
- prev_state = doc.get_state(new_state.type_id)
- prev_tags = doc.tags.filter(slug__in=IESG_SUBSTATE_TAGS)
- if new_state != prev_state:
- doc.set_state(new_state)
- doc.tags.remove(*prev_tags)
- e = add_state_change_event(doc, system, prev_state, new_state, prev_tags=prev_tags, new_tags=[])
- if e:
- events.append(e)
- e = update_action_holders(doc, prev_state, new_state, prev_tags=prev_tags, new_tags=[])
- if e:
- events.append(e)
-
events.append(DocEvent.objects.create(doc=doc, rev=doc.rev, by=system, type="expired_document", desc="Document has expired"))
+ prev_draft_state=doc.get_state("draft")
doc.set_state(State.objects.get(used=True, type="draft", slug="expired"))
+ events.append(update_action_holders(doc, prev_draft_state, doc.get_state("draft"),[],[]))
doc.save_with_history(events)
def clean_up_draft_files():
@@ -238,3 +227,42 @@ def move_file_to(subdir):
except Document.DoesNotExist:
# All uses of this past 2014 seem related to major system failures.
move_file_to("unknown_ids")
+
+
+def repair_dead_on_expire():
+ by = Person.objects.get(name="(System)")
+ id_exists = State.objects.get(type="draft-iesg", slug="idexists")
+ dead = State.objects.get(type="draft-iesg", slug="dead")
+ dead_drafts = Document.objects.filter(
+ states__type="draft-iesg", states__slug="dead", type_id="draft"
+ )
+ for d in dead_drafts:
+ dead_event = d.latest_event(
+ StateDocEvent, state_type="draft-iesg", state__slug="dead"
+ )
+ if dead_event is not None:
+ if d.docevent_set.filter(type="expired_document").exists():
+ closest_expiry = min(
+ [
+ abs(e.time - dead_event.time)
+ for e in d.docevent_set.filter(type="expired_document")
+ ]
+ )
+ if closest_expiry.total_seconds() < 60:
+ d.set_state(id_exists)
+ events = []
+ e = DocEvent(
+ doc=d,
+ rev=d.rev,
+ type="added_comment",
+ by=by,
+ desc="IESG Dead state was set due only to document expiry - changing IESG state to ID-Exists",
+ )
+ e.skip_community_list_notification = True
+ e.save()
+ events.append(e)
+ e = new_state_change_event(d, by, dead, id_exists)
+ e.skip_community_list_notification = True
+ e.save()
+ events.append(e)
+ d.save_with_history(events)
diff --git a/ietf/doc/tasks.py b/ietf/doc/tasks.py
index f1de459dd8..b7f89e1f9b 100644
--- a/ietf/doc/tasks.py
+++ b/ietf/doc/tasks.py
@@ -18,6 +18,7 @@
in_draft_expire_freeze,
get_expired_drafts,
expirable_drafts,
+ repair_dead_on_expire,
send_expire_notice_for_draft,
expire_draft,
clean_up_draft_files,
@@ -61,6 +62,11 @@ def expire_ids_task():
raise
+@shared_task
+def repair_dead_on_expire_task():
+ repair_dead_on_expire()
+
+
@shared_task
def notify_expirations_task(notify_days=14):
for doc in get_soon_to_expire_drafts(notify_days):
diff --git a/ietf/doc/tests_draft.py b/ietf/doc/tests_draft.py
index 09a7b38999..84959625c9 100644
--- a/ietf/doc/tests_draft.py
+++ b/ietf/doc/tests_draft.py
@@ -19,10 +19,10 @@
import debug # pyflakes:ignore
-from ietf.doc.expire import get_expired_drafts, send_expire_notice_for_draft, expire_draft
-from ietf.doc.factories import EditorialDraftFactory, IndividualDraftFactory, WgDraftFactory, RgDraftFactory, DocEventFactory
+from ietf.doc.expire import expirable_drafts, get_expired_drafts, repair_dead_on_expire, send_expire_notice_for_draft, expire_draft
+from ietf.doc.factories import EditorialDraftFactory, IndividualDraftFactory, StateDocEventFactory, WgDraftFactory, RgDraftFactory, DocEventFactory
from ietf.doc.models import ( Document, DocReminder, DocEvent,
- ConsensusDocEvent, LastCallDocEvent, RelatedDocument, State, TelechatDocEvent,
+ ConsensusDocEvent, LastCallDocEvent, RelatedDocument, State, StateDocEvent, TelechatDocEvent,
WriteupDocEvent, DocRelationshipName, IanaExpertDocEvent )
from ietf.doc.utils import get_tags_for_stream_id, create_ballot_if_not_open
from ietf.doc.views_draft import AdoptDraftForm
@@ -36,7 +36,7 @@
from ietf.utils.test_utils import login_testing_unauthorized
from ietf.utils.mail import outbox, empty_outbox, get_payload_text
from ietf.utils.test_utils import TestCase
-from ietf.utils.timezone import date_today, datetime_from_date, DEADLINE_TZINFO
+from ietf.utils.timezone import date_today, datetime_today, datetime_from_date, DEADLINE_TZINFO
class ChangeStateTests(TestCase):
@@ -763,13 +763,16 @@ def test_expire_drafts(self):
txt = "%s-%s.txt" % (draft.name, draft.rev)
self.write_draft_file(txt, 5000)
+ self.assertFalse(expirable_drafts(Document.objects.filter(pk=draft.pk)).exists())
+ draft.set_state(State.objects.get(used=True, type="draft-iesg", slug="idexists"))
+ self.assertTrue(expirable_drafts(Document.objects.filter(pk=draft.pk)).exists())
expire_draft(draft)
draft = Document.objects.get(name=draft.name)
self.assertEqual(draft.get_state_slug(), "expired")
- self.assertEqual(draft.get_state_slug("draft-iesg"), "dead")
+ self.assertEqual(draft.get_state_slug("draft-iesg"), "idexists")
self.assertTrue(draft.latest_event(type="expired_document"))
- self.assertCountEqual(draft.action_holders.all(), [])
+ self.assertEqual(draft.action_holders.count(), 0)
self.assertIn('Removed all action holders', draft.latest_event(type='changed_action_holders').desc)
self.assertTrue(not os.path.exists(os.path.join(settings.INTERNET_DRAFT_PATH, txt)))
self.assertTrue(os.path.exists(os.path.join(settings.INTERNET_DRAFT_ARCHIVE_DIR, txt)))
@@ -842,6 +845,77 @@ def test_clean_up_draft_files(self):
self.assertTrue(not os.path.exists(os.path.join(settings.INTERNET_DRAFT_PATH, txt)))
self.assertTrue(os.path.exists(os.path.join(settings.INTERNET_DRAFT_ARCHIVE_DIR, txt)))
+ @mock.patch("ietf.community.signals.notify_of_event")
+ def test_repair_dead_on_expire(self, mock_notify):
+
+ # Create a draft in iesg idexists - ensure it doesn't get new docevents.
+ # Create a draft in iesg dead with no expires within the window - ensure it doesn't get new docevents and its state doesn't change.
+ # Create a draft in iesg dead with an expiry in the window - ensure it gets the right doc events, iesg state changes, draft state doesn't change.
+ last_year = datetime_today() - datetime.timedelta(days=365)
+
+ not_dead = WgDraftFactory(name="draft-not-dead")
+ not_dead_event_count = not_dead.docevent_set.count()
+
+ dead_not_from_expires = WgDraftFactory(name="draft-dead-not-from-expiring")
+ dead_not_from_expires.set_state(
+ State.objects.get(type="draft-iesg", slug="dead")
+ )
+ StateDocEventFactory(
+ doc=dead_not_from_expires, state=("draft-iesg", "dead"), time=last_year
+ )
+ DocEventFactory(
+ doc=dead_not_from_expires,
+ type="expired_document",
+ time=last_year + datetime.timedelta(days=1),
+ )
+ dead_not_from_expires_event_count = dead_not_from_expires.docevent_set.count()
+
+ dead_from_expires = []
+ dead_from_expires_event_count = dict()
+ for delta in [-5, 5]:
+ d = WgDraftFactory(
+ name=f"draft-dead-from-expiring-just-{'before' if delta<0 else 'after'}"
+ )
+ d.set_state(State.objects.get(type="draft-iesg", slug="dead"))
+ StateDocEventFactory(doc=d, state=("draft-iesg", "dead"), time=last_year)
+ DocEventFactory(
+ doc=d,
+ type="expired_document",
+ time=last_year + datetime.timedelta(seconds=delta),
+ )
+ dead_from_expires.append(d)
+ dead_from_expires_event_count[d] = d.docevent_set.count()
+
+ notified_during_factory_work = mock_notify.call_count
+ for call_args in mock_notify.call_args_list:
+ e = call_args.args[0]
+ self.assertTrue(isinstance(e,DocEvent))
+ self.assertFalse(hasattr(e,"skip_community_list_notification"))
+
+ repair_dead_on_expire()
+
+ self.assertEqual(not_dead.docevent_set.count(), not_dead_event_count)
+ self.assertEqual(
+ dead_not_from_expires.docevent_set.count(),
+ dead_not_from_expires_event_count,
+ )
+ for d in dead_from_expires:
+ self.assertEqual(
+ d.docevent_set.count(), dead_from_expires_event_count[d] + 2
+ )
+ self.assertIn(
+ "due only to document expiry", d.latest_event(type="added_comment").desc
+ )
+ self.assertEqual(
+ d.latest_event(StateDocEvent).desc,
+ "IESG state changed to I-D Exists from Dead",
+ )
+ self.assertEqual(mock_notify.call_count, 4+notified_during_factory_work)
+ for call_args in mock_notify.call_args_list[-4:]:
+ e = call_args.args[0]
+ self.assertTrue(isinstance(e,DocEvent))
+ self.assertTrue(hasattr(e,"skip_community_list_notification"))
+ self.assertTrue(e.skip_community_list_notification)
class ExpireLastCallTests(TestCase):
def test_expire_last_call(self):
diff --git a/ietf/doc/tests_tasks.py b/ietf/doc/tests_tasks.py
index b75f58656b..135b52f604 100644
--- a/ietf/doc/tests_tasks.py
+++ b/ietf/doc/tests_tasks.py
@@ -1,4 +1,6 @@
# Copyright The IETF Trust 2024, All Rights Reserved
+
+import debug # pyflakes:ignore
import datetime
import mock
@@ -19,6 +21,7 @@
generate_idnits2_rfcs_obsoleted_task,
generate_idnits2_rfc_status_task,
notify_expirations_task,
+ repair_dead_on_expire_task,
)
class TaskTests(TestCase):
@@ -96,6 +99,10 @@ def test_expire_last_calls_task(self, mock_get_expired, mock_expire):
self.assertEqual(mock_expire.call_args_list[1], mock.call(docs[1]))
self.assertEqual(mock_expire.call_args_list[2], mock.call(docs[2]))
+ @mock.patch("ietf.doc.tasks.repair_dead_on_expire")
+ def test_repair_dead_on_expire_task(self, mock_repair):
+ repair_dead_on_expire_task()
+ self.assertEqual(mock_repair.call_count, 1)
class Idnits2SupportTests(TestCase):
settings_temp_path_overrides = TestCase.settings_temp_path_overrides + ['DERIVED_DIR']
diff --git a/ietf/doc/utils.py b/ietf/doc/utils.py
index b2bc0997b1..ff19dfbde6 100644
--- a/ietf/doc/utils.py
+++ b/ietf/doc/utils.py
@@ -491,8 +491,9 @@ def update_action_holders(doc, prev_state=None, new_state=None, prev_tags=None,
Returns an event describing the change which should be passed to doc.save_with_history()
- Only cares about draft-iesg state changes. Places where other state types are updated
- may not call this method. If you add rules for updating action holders on other state
+ Only cares about draft-iesg state changes and draft expiration.
+ Places where other state types are updated may not call this method.
+ If you add rules for updating action holders on other state
types, be sure this is called in the places that change that state.
"""
# Should not call this with different state types
@@ -511,41 +512,50 @@ def update_action_holders(doc, prev_state=None, new_state=None, prev_tags=None,
# Remember original list of action holders to later check if it changed
prev_set = list(doc.action_holders.all())
-
- # Update the action holders. To get this right for people with more
- # than one relationship to the document, do removals first, then adds.
- # Remove outdated action holders
- iesg_state_changed = (prev_state != new_state) and (getattr(new_state, "type_id", None) == "draft-iesg")
- if iesg_state_changed:
- # Clear the action_holders list on a state change. This will reset the age of any that get added back.
+
+ if new_state and new_state.type_id=="draft" and new_state.slug=="expired":
doc.action_holders.clear()
- if tags.removed("need-rev"):
- # Removed the 'need-rev' tag - drop authors from the action holders list
- DocumentActionHolder.objects.filter(document=doc, person__in=doc.authors()).delete()
- elif tags.added("need-rev"):
- # Remove the AD if we're asking for a new revision
- DocumentActionHolder.objects.filter(document=doc, person=doc.ad).delete()
-
- # Add new action holders
- if doc.ad:
- # AD is an action holder unless specified otherwise for the new state
- if iesg_state_changed and new_state.slug not in DocumentActionHolder.CLEAR_ACTION_HOLDERS_STATES:
- doc.action_holders.add(doc.ad)
- # If AD follow-up is needed, make sure they are an action holder
- if tags.added("ad-f-up"):
- doc.action_holders.add(doc.ad)
- # Authors get the action if a revision is needed
- if tags.added("need-rev"):
- for auth in doc.authors():
- doc.action_holders.add(auth)
-
- # Now create an event if we changed the set
- return add_action_holder_change_event(
- doc,
- Person.objects.get(name='(System)'),
- prev_set,
- reason='IESG state changed',
- )
+ return add_action_holder_change_event(
+ doc,
+ Person.objects.get(name='(System)'),
+ prev_set,
+ reason='draft expired',
+ )
+ else:
+ # Update the action holders. To get this right for people with more
+ # than one relationship to the document, do removals first, then adds.
+ # Remove outdated action holders
+ iesg_state_changed = (prev_state != new_state) and (getattr(new_state, "type_id", None) == "draft-iesg")
+ if iesg_state_changed:
+ # Clear the action_holders list on a state change. This will reset the age of any that get added back.
+ doc.action_holders.clear()
+ if tags.removed("need-rev"):
+ # Removed the 'need-rev' tag - drop authors from the action holders list
+ DocumentActionHolder.objects.filter(document=doc, person__in=doc.authors()).delete()
+ elif tags.added("need-rev"):
+ # Remove the AD if we're asking for a new revision
+ DocumentActionHolder.objects.filter(document=doc, person=doc.ad).delete()
+
+ # Add new action holders
+ if doc.ad:
+ # AD is an action holder unless specified otherwise for the new state
+ if iesg_state_changed and new_state.slug not in DocumentActionHolder.CLEAR_ACTION_HOLDERS_STATES:
+ doc.action_holders.add(doc.ad)
+ # If AD follow-up is needed, make sure they are an action holder
+ if tags.added("ad-f-up"):
+ doc.action_holders.add(doc.ad)
+ # Authors get the action if a revision is needed
+ if tags.added("need-rev"):
+ for auth in doc.authors():
+ doc.action_holders.add(auth)
+
+ # Now create an event if we changed the set
+ return add_action_holder_change_event(
+ doc,
+ Person.objects.get(name='(System)'),
+ prev_set,
+ reason='IESG state changed',
+ )
def update_documentauthors(doc, new_docauthors, by=None, basis=None):
diff --git a/ietf/doc/views_draft.py b/ietf/doc/views_draft.py
index d709aedd42..34104b2005 100644
--- a/ietf/doc/views_draft.py
+++ b/ietf/doc/views_draft.py
@@ -95,7 +95,8 @@ def change_state(request, name):
and logging the change as a comment."""
doc = get_object_or_404(Document, name=name)
- if (not doc.latest_event(type="started_iesg_process")) or doc.get_state_slug() == "expired":
+ # Steer ADs towards "Begin IESG Processing"
+ if doc.get_state_slug("draft-iesg")=="idexists" and not has_role(request.user,"Secretariat"):
raise Http404
login = request.user.person
From 981f7aea3e3641356de92207f2529622764e8828 Mon Sep 17 00:00:00 2001
From: Robert Sparks
Date: Fri, 13 Dec 2024 15:14:13 -0600
Subject: [PATCH 014/460] chore: remove unused setting (#8336)
---
ietf/settings.py | 1 -
test/settings_local_test.py | 1 -
2 files changed, 2 deletions(-)
diff --git a/ietf/settings.py b/ietf/settings.py
index 7c3dc7fa16..4e678b3808 100644
--- a/ietf/settings.py
+++ b/ietf/settings.py
@@ -750,7 +750,6 @@ def skip_unreadable_post(record):
STATUS_CHANGE_PATH = '/a/ietfdata/doc/status-change'
AGENDA_PATH = '/a/www/www6s/proceedings/'
MEETINGHOST_LOGO_PATH = AGENDA_PATH # put these in the same place as other proceedings files
-IPR_DOCUMENT_PATH = '/a/www/ietf-ftp/ietf/IPR/'
# Move drafts to this directory when they expire
INTERNET_DRAFT_ARCHIVE_DIR = '/a/ietfdata/doc/draft/collection/draft-archive/'
# The following directory contains copies of all drafts - it used to be
diff --git a/test/settings_local_test.py b/test/settings_local_test.py
index 06d810e4af..7097f76459 100644
--- a/test/settings_local_test.py
+++ b/test/settings_local_test.py
@@ -5,6 +5,5 @@
SERVER_MODE = 'test'
-IPR_DOCUMENT_PATH = '/home/ietf/adm/IPR/'
SITE_ID = 1
From d59e102c9f57893a18d56246dc8a8e8fb5414f32 Mon Sep 17 00:00:00 2001
From: Robert Sparks
Date: Fri, 13 Dec 2024 15:17:01 -0600
Subject: [PATCH 015/460] fix: cleanup ftp copy of repository on move to
archive (#8335)
---
ietf/doc/expire.py | 7 +++++++
1 file changed, 7 insertions(+)
diff --git a/ietf/doc/expire.py b/ietf/doc/expire.py
index 0581c9a03f..63955d091a 100644
--- a/ietf/doc/expire.py
+++ b/ietf/doc/expire.py
@@ -150,10 +150,17 @@ def move_file(f):
pass
else:
raise
+
+ def remove_ftp_copy(f):
+ mark = Path(settings.FTP_DIR) / "internet-drafts" / f
+ if mark.exists():
+ mark.unlink()
+
src_dir = Path(settings.INTERNET_DRAFT_PATH)
for file in src_dir.glob("%s-%s.*" % (doc.name, rev)):
move_file(str(file.name))
+ remove_ftp_copy(str(file.name))
def expire_draft(doc):
# clean up files
From 98b251487dfa685f0b318e6a02b47a84f6f65876 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Fri, 13 Dec 2024 19:15:44 -0500
Subject: [PATCH 016/460] chore(deps): bump nanoid in /dev/deploy-to-container
in the npm group (#8293)
Bumps the npm group in /dev/deploy-to-container with 1 update: [nanoid](https://github.com/ai/nanoid).
Updates `nanoid` from 5.0.8 to 5.0.9
- [Release notes](https://github.com/ai/nanoid/releases)
- [Changelog](https://github.com/ai/nanoid/blob/main/CHANGELOG.md)
- [Commits](https://github.com/ai/nanoid/compare/5.0.8...5.0.9)
---
updated-dependencies:
- dependency-name: nanoid
dependency-type: direct:production
update-type: version-update:semver-patch
dependency-group: npm
...
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
dev/deploy-to-container/package-lock.json | 14 +++++++-------
dev/deploy-to-container/package.json | 2 +-
2 files changed, 8 insertions(+), 8 deletions(-)
diff --git a/dev/deploy-to-container/package-lock.json b/dev/deploy-to-container/package-lock.json
index d787f6aa9e..08b6eb6c62 100644
--- a/dev/deploy-to-container/package-lock.json
+++ b/dev/deploy-to-container/package-lock.json
@@ -8,7 +8,7 @@
"dependencies": {
"dockerode": "^4.0.2",
"fs-extra": "^11.2.0",
- "nanoid": "5.0.8",
+ "nanoid": "5.0.9",
"nanoid-dictionary": "5.0.0-beta.1",
"slugify": "1.6.6",
"tar": "^7.4.3",
@@ -546,9 +546,9 @@
"optional": true
},
"node_modules/nanoid": {
- "version": "5.0.8",
- "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.0.8.tgz",
- "integrity": "sha512-TcJPw+9RV9dibz1hHUzlLVy8N4X9TnwirAjrU08Juo6BNKggzVfP2ZJ/3ZUSq15Xl5i85i+Z89XBO90pB2PghQ==",
+ "version": "5.0.9",
+ "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.0.9.tgz",
+ "integrity": "sha512-Aooyr6MXU6HpvvWXKoVoXwKMs/KyVakWwg7xQfv5/S/RIgJMy0Ifa45H9qqYy7pTCszrHzP21Uk4PZq2HpEM8Q==",
"funding": [
{
"type": "github",
@@ -1346,9 +1346,9 @@
"optional": true
},
"nanoid": {
- "version": "5.0.8",
- "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.0.8.tgz",
- "integrity": "sha512-TcJPw+9RV9dibz1hHUzlLVy8N4X9TnwirAjrU08Juo6BNKggzVfP2ZJ/3ZUSq15Xl5i85i+Z89XBO90pB2PghQ=="
+ "version": "5.0.9",
+ "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.0.9.tgz",
+ "integrity": "sha512-Aooyr6MXU6HpvvWXKoVoXwKMs/KyVakWwg7xQfv5/S/RIgJMy0Ifa45H9qqYy7pTCszrHzP21Uk4PZq2HpEM8Q=="
},
"nanoid-dictionary": {
"version": "5.0.0-beta.1",
diff --git a/dev/deploy-to-container/package.json b/dev/deploy-to-container/package.json
index 530d1f3b8b..8f16279cea 100644
--- a/dev/deploy-to-container/package.json
+++ b/dev/deploy-to-container/package.json
@@ -4,7 +4,7 @@
"dependencies": {
"dockerode": "^4.0.2",
"fs-extra": "^11.2.0",
- "nanoid": "5.0.8",
+ "nanoid": "5.0.9",
"nanoid-dictionary": "5.0.0-beta.1",
"slugify": "1.6.6",
"tar": "^7.4.3",
From 4a1d29e86ca7bf1d5fca42f1061769c5383ff859 Mon Sep 17 00:00:00 2001
From: Nicolas Giard
Date: Mon, 16 Dec 2024 09:51:33 -0500
Subject: [PATCH 017/460] feat: add wiki button to agenda list for hackathon
sessions (#8133)
* feat: add wiki button to agenda list for hackathon sessions
* fix: update client/agenda/AgendaScheduleList.vue
Co-authored-by: Matthew Holloway
* fix: broken tests
---------
Co-authored-by: Matthew Holloway
Co-authored-by: Robert Sparks
---
client/agenda/AgendaScheduleList.vue | 14 +++++++++++-
client/shared/urls.json | 1 +
ietf/templates/meeting/agenda.html | 2 +-
playwright/tests/meeting/agenda.spec.js | 30 ++++++++++++++++++-------
4 files changed, 37 insertions(+), 10 deletions(-)
diff --git a/client/agenda/AgendaScheduleList.vue b/client/agenda/AgendaScheduleList.vue
index 0cac7e39ee..e7c14797ee 100644
--- a/client/agenda/AgendaScheduleList.vue
+++ b/client/agenda/AgendaScheduleList.vue
@@ -284,6 +284,7 @@ const meetingEvents = computed(() => {
const purposesWithoutLinks = ['admin', 'closed_meeting', 'officehours', 'social']
if (item.flags.showAgenda || (typesWithLinks.includes(item.type) && !purposesWithoutLinks.includes(item.purpose))) {
if (item.flags.agenda) {
+ // -> Meeting Materials
links.push({
id: `lnk-${item.id}-tar`,
label: 'Download meeting materials as .tar archive',
@@ -305,7 +306,18 @@ const meetingEvents = computed(() => {
color: 'red'
})
}
- if (agendaStore.usesNotes) {
+ // -> Point to Wiki for Hackathon sessions, HedgeDocs otherwise
+ if (item.name.toLowerCase().includes('hackathon')) {
+ links.push({
+ id: `lnk-${item.id}-wiki`,
+ label: 'Wiki',
+ icon: 'book',
+ href: getUrl('hackathonWiki', {
+ meetingNumber: agendaStore.meeting.number
+ }),
+ color: 'blue'
+ })
+ } else if (agendaStore.usesNotes) {
links.push({
id: `lnk-${item.id}-note`,
label: 'Notepad for note-takers',
diff --git a/client/shared/urls.json b/client/shared/urls.json
index 285caa07d2..15410d68df 100644
--- a/client/shared/urls.json
+++ b/client/shared/urls.json
@@ -1,5 +1,6 @@
{
"bofDefinition": "https://www.ietf.org/how/bofs/",
+ "hackathonWiki": "https://wiki.ietf.org/meeting/{meetingNumber}/hackathon",
"meetingCalIcs": "/meeting/{meetingNumber}/agenda.ics",
"meetingDetails": "/meeting/{meetingNumber}/session/{eventAcronym}/",
"meetingMaterialsPdf": "/meeting/{meetingNumber}/agenda/{eventAcronym}-drafts.pdf",
diff --git a/ietf/templates/meeting/agenda.html b/ietf/templates/meeting/agenda.html
index f3228aa5ad..089141bde8 100644
--- a/ietf/templates/meeting/agenda.html
+++ b/ietf/templates/meeting/agenda.html
@@ -101,7 +101,7 @@
{% endblock %}
diff --git a/playwright/tests/meeting/agenda.spec.js b/playwright/tests/meeting/agenda.spec.js
index 109e1b3b81..d31dbd5c2c 100644
--- a/playwright/tests/meeting/agenda.spec.js
+++ b/playwright/tests/meeting/agenda.spec.js
@@ -286,10 +286,17 @@ test.describe('past - desktop', () => {
// No meeting materials yet warning badge
await expect(eventButtons.locator('.no-meeting-materials')).toBeVisible()
}
- // Notepad button
- const hedgeDocLink = `https://notes.ietf.org/notes-ietf-${meetingData.meeting.number}-${event.type === 'plenary' ? 'plenary' : event.acronym}`
- await expect(eventButtons.locator(`#btn-lnk-${event.id}-note`)).toHaveAttribute('href', hedgeDocLink)
- await expect(eventButtons.locator(`#btn-lnk-${event.id}-note > i.bi`)).toBeVisible()
+ if (event.name.toLowerCase().includes('hackathon')) {
+ // Hackathon Wiki button
+ const hackathonWikiLink = `https://wiki.ietf.org/meeting/${meetingData.meeting.number}/hackathon`
+ await expect(eventButtons.locator(`#btn-lnk-${event.id}-wiki`)).toHaveAttribute('href', hackathonWikiLink)
+ await expect(eventButtons.locator(`#btn-lnk-${event.id}-wiki > i.bi`)).toBeVisible()
+ } else {
+ // Notepad button
+ const hedgeDocLink = `https://notes.ietf.org/notes-ietf-${meetingData.meeting.number}-${event.type === 'plenary' ? 'plenary' : event.acronym}`
+ await expect(eventButtons.locator(`#btn-lnk-${event.id}-note`)).toHaveAttribute('href', hedgeDocLink)
+ await expect(eventButtons.locator(`#btn-lnk-${event.id}-note > i.bi`)).toBeVisible()
+ }
// Chat logs
await expect(eventButtons.locator(`#btn-lnk-${event.id}-logs`)).toHaveAttribute('href', event.links.chatArchive)
await expect(eventButtons.locator(`#btn-lnk-${event.id}-logs > i.bi`)).toBeVisible()
@@ -1162,10 +1169,17 @@ test.describe('future - desktop', () => {
// No meeting materials yet warning badge
await expect(eventButtons.locator('.no-meeting-materials')).toBeVisible()
}
- // Notepad button
- const hedgeDocLink = `https://notes.ietf.org/notes-ietf-${meetingData.meeting.number}-${event.type === 'plenary' ? 'plenary' : event.acronym}`
- await expect(eventButtons.locator(`#btn-lnk-${event.id}-note`)).toHaveAttribute('href', hedgeDocLink)
- await expect(eventButtons.locator(`#btn-lnk-${event.id}-note > i.bi`)).toBeVisible()
+ if (event.name.toLowerCase().includes('hackathon')) {
+ // Hackathon Wiki button
+ const hackathonWikiLink = `https://wiki.ietf.org/meeting/${meetingData.meeting.number}/hackathon`
+ await expect(eventButtons.locator(`#btn-lnk-${event.id}-wiki`)).toHaveAttribute('href', hackathonWikiLink)
+ await expect(eventButtons.locator(`#btn-lnk-${event.id}-wiki > i.bi`)).toBeVisible()
+ } else {
+ // Notepad button
+ const hedgeDocLink = `https://notes.ietf.org/notes-ietf-${meetingData.meeting.number}-${event.type === 'plenary' ? 'plenary' : event.acronym}`
+ await expect(eventButtons.locator(`#btn-lnk-${event.id}-note`)).toHaveAttribute('href', hedgeDocLink)
+ await expect(eventButtons.locator(`#btn-lnk-${event.id}-note > i.bi`)).toBeVisible()
+ }
// Chat room
await expect(eventButtons.locator(`#btn-lnk-${event.id}-room`)).toHaveAttribute('href', event.links.chat)
await expect(eventButtons.locator(`#btn-lnk-${event.id}-room > i.bi`)).toBeVisible()
From 171e50b1cd2f57bd9c4b099e1d33e3444d127834 Mon Sep 17 00:00:00 2001
From: Nicolas Giard
Date: Tue, 17 Dec 2024 11:08:35 -0500
Subject: [PATCH 018/460] fix: homepage logo width (#8338)
* fix: homepage logo width
* fix: make Robert happy
---
ietf/static/css/ietf.scss | 6 +++++-
1 file changed, 5 insertions(+), 1 deletion(-)
diff --git a/ietf/static/css/ietf.scss b/ietf/static/css/ietf.scss
index e2d5cb3959..f1f2b94a19 100644
--- a/ietf/static/css/ietf.scss
+++ b/ietf/static/css/ietf.scss
@@ -236,9 +236,13 @@ th,
// Helper to constrain the size of the main logo
.ietflogo {
- width: 75%;
+ width: 100%;
max-width: 300px;
}
+.ietflogo > img {
+ min-width: 100px;
+ width: 100%;
+}
// Make revision numbers pagination items fixed-width
.revision-list {
From 427045d45687f242228ed830b51a5c21dc005eb4 Mon Sep 17 00:00:00 2001
From: Jennifer Richards
Date: Tue, 17 Dec 2024 15:09:44 -0400
Subject: [PATCH 019/460] chore: remove unused method (#8341)
* chore: remove unused method
* chore: adjust another comment
---
ietf/submit/utils.py | 69 ++------------------------------------------
1 file changed, 2 insertions(+), 67 deletions(-)
diff --git a/ietf/submit/utils.py b/ietf/submit/utils.py
index e6cbcb12f7..5906785048 100644
--- a/ietf/submit/utils.py
+++ b/ietf/submit/utils.py
@@ -770,70 +770,6 @@ def save_files(form):
log.log("saved file %s" % name)
return file_name
-def get_draft_meta(form, saved_files):
- authors = []
- file_name = saved_files
-
- if form.cleaned_data['xml']:
- # Some meta-information, such as the page-count, can only
- # be retrieved from the generated text file. Provide a
- # parsed draft object to get at that kind of information.
- file_name['txt'] = os.path.join(settings.IDSUBMIT_STAGING_PATH, '%s-%s.txt' % (form.filename, form.revision))
- file_size = os.stat(file_name['txt']).st_size
- with io.open(file_name['txt']) as txt_file:
- form.parsed_draft = PlaintextDraft(txt_file.read(), txt_file.name)
- else:
- file_size = form.cleaned_data['txt'].size
-
- if form.authors:
- authors = form.authors
- else:
- # If we don't have an xml file, try to extract the
- # relevant information from the text file
- for author in form.parsed_draft.get_author_list():
- full_name, first_name, middle_initial, last_name, name_suffix, email, country, company = author
-
- name = full_name.replace("\n", "").replace("\r", "").replace("<", "").replace(">", "").strip()
-
- if email:
- try:
- validate_email(email)
- except ValidationError:
- email = ""
-
- def turn_into_unicode(s):
- if s is None:
- return ""
-
- if isinstance(s, str):
- return s
- else:
- try:
- return s.decode("utf-8")
- except UnicodeDecodeError:
- try:
- return s.decode("latin-1")
- except UnicodeDecodeError:
- return ""
-
- name = turn_into_unicode(name)
- email = turn_into_unicode(email)
- company = turn_into_unicode(company)
-
- authors.append({
- "name": name,
- "email": email,
- "affiliation": company,
- "country": country
- })
-
- if form.abstract:
- abstract = form.abstract
- else:
- abstract = form.parsed_draft.get_abstract()
-
- return authors, abstract, file_name, file_size
-
def get_submission(form):
# See if there is a Submission in state waiting-for-draft
@@ -1272,8 +1208,7 @@ def process_submission_xml(filename, revision):
def _turn_into_unicode(s: Optional[Union[str, bytes]]):
"""Decode a possibly null string-like item as a string
- Copied from ietf.submit.utils.get_draft_meta(), would be nice to
- ditch this.
+ Would be nice to ditch this.
"""
if s is None:
return ""
@@ -1317,7 +1252,7 @@ def process_submission_text(filename, revision):
if title:
title = _normalize_title(title)
- # Drops \r, \n, <, >. Based on get_draft_meta() behavior
+ # Translation taable drops \r, \n, <, >.
trans_table = str.maketrans("", "", "\r\n<>")
authors = [
{
From fdd378390f60ee666d47815940a7cede38d23dc1 Mon Sep 17 00:00:00 2001
From: Robert Sparks
Date: Thu, 19 Dec 2024 12:06:38 -0600
Subject: [PATCH 020/460] feat: copy the 1wg files to so... many... places...
(#8347)
---
ietf/group/tasks.py | 35 ++++++++++++++++++++---------------
ietf/group/tests_info.py | 2 ++
ietf/settings.py | 2 ++
3 files changed, 24 insertions(+), 15 deletions(-)
diff --git a/ietf/group/tasks.py b/ietf/group/tasks.py
index f19246fb55..8b4c994ba1 100644
--- a/ietf/group/tasks.py
+++ b/ietf/group/tasks.py
@@ -43,23 +43,28 @@ def generate_wg_charters_files_task():
encoding="utf8",
)
- charter_copy_dest = getattr(settings, "CHARTER_COPY_PATH", None)
- if charter_copy_dest is not None:
- if not Path(charter_copy_dest).is_dir():
- log.log(
- f"Error copying 1wg-charter files to {charter_copy_dest}: it does not exist or is not a directory"
- )
- else:
- try:
- shutil.copy2(charters_file, charter_copy_dest)
- except IOError as err:
- log.log(f"Error copying {charters_file} to {charter_copy_dest}: {err}")
- try:
- shutil.copy2(charters_by_acronym_file, charter_copy_dest)
- except IOError as err:
+ charter_copy_dests = [
+ getattr(settings, "CHARTER_COPY_PATH", None),
+ getattr(settings, "CHARTER_COPY_OTHER_PATH", None),
+ getattr(settings, "CHARTER_COPY_THIRD_PATH", None),
+ ]
+ for charter_copy_dest in charter_copy_dests:
+ if charter_copy_dest is not None:
+ if not Path(charter_copy_dest).is_dir():
log.log(
- f"Error copying {charters_by_acronym_file} to {charter_copy_dest}: {err}"
+ f"Error copying 1wg-charter files to {charter_copy_dest}: it does not exist or is not a directory"
)
+ else:
+ try:
+ shutil.copy2(charters_file, charter_copy_dest)
+ except IOError as err:
+ log.log(f"Error copying {charters_file} to {charter_copy_dest}: {err}")
+ try:
+ shutil.copy2(charters_by_acronym_file, charter_copy_dest)
+ except IOError as err:
+ log.log(
+ f"Error copying {charters_by_acronym_file} to {charter_copy_dest}: {err}"
+ )
@shared_task
diff --git a/ietf/group/tests_info.py b/ietf/group/tests_info.py
index 35c8b2b0b7..32d919c779 100644
--- a/ietf/group/tests_info.py
+++ b/ietf/group/tests_info.py
@@ -62,6 +62,8 @@ class GroupPagesTests(TestCase):
settings_temp_path_overrides = TestCase.settings_temp_path_overrides + [
"CHARTER_PATH",
"CHARTER_COPY_PATH",
+ "CHARTER_COPY_OTHER_PATH", # Note: not explicitly testing use of
+ "CHARTER_COPY_THIRD_PATH", # either of these settings
"GROUP_SUMMARY_PATH",
]
diff --git a/ietf/settings.py b/ietf/settings.py
index 4e678b3808..efd04c6068 100644
--- a/ietf/settings.py
+++ b/ietf/settings.py
@@ -744,6 +744,8 @@ def skip_unreadable_post(record):
RFC_PATH = '/a/www/ietf-ftp/rfc/'
CHARTER_PATH = '/a/ietfdata/doc/charter/'
CHARTER_COPY_PATH = '/a/www/ietf-ftp/ietf' # copy 1wg-charters files here if set
+CHARTER_COPY_OTHER_PATH = '/a/www/ftp/ietf'
+CHARTER_COPY_THIRD_PATH = '/a/www/ftp/charter'
GROUP_SUMMARY_PATH = '/a/www/ietf-ftp/ietf'
BOFREQ_PATH = '/a/ietfdata/doc/bofreq/'
CONFLICT_REVIEW_PATH = '/a/ietfdata/doc/conflict-review'
From d25af5e71bcb75b4b4ba61d41c3e7655b9975a57 Mon Sep 17 00:00:00 2001
From: Robert Sparks
Date: Thu, 19 Dec 2024 18:01:33 -0600
Subject: [PATCH 021/460] fix: clean ftp copy of repository when cleaning
repository (#8354)
---
ietf/doc/expire.py | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/ietf/doc/expire.py b/ietf/doc/expire.py
index 63955d091a..57af2ad91f 100644
--- a/ietf/doc/expire.py
+++ b/ietf/doc/expire.py
@@ -213,11 +213,11 @@ def splitext(fn):
def move_file_to(subdir):
# Similar to move_draft_files_to_archive
- # ghostlinkd would keep this in the combined all archive since it would
- # be sourced from a different place. But when ghostlinkd is removed, nothing
- # new is needed here - the file will already exist in the combined archive
shutil.move(path,
os.path.join(settings.INTERNET_DRAFT_ARCHIVE_DIR, subdir, basename))
+ mark = Path(settings.FTP_DIR) / "internet-drafts" / basename
+ if mark.exists():
+ mark.unlink()
try:
doc = Document.objects.get(name=filename, rev=revision)
From 8cadc6988f1bb815c2d35578b146ddc2c1daea6a Mon Sep 17 00:00:00 2001
From: Robert Sparks
Date: Fri, 20 Dec 2024 08:50:53 -0600
Subject: [PATCH 022/460] chore: remove obviated manual INSTALL instructions
(#8355)
---
dev/INSTALL | 157 ----------------------------------------------------
1 file changed, 157 deletions(-)
delete mode 100644 dev/INSTALL
diff --git a/dev/INSTALL b/dev/INSTALL
deleted file mode 100644
index 9a8004010d..0000000000
--- a/dev/INSTALL
+++ /dev/null
@@ -1,157 +0,0 @@
-==============================================================================
- IETF Datatracker
-==============================================================================
-
-------------------------------------------------------------------------------
- Installation Instructions
-------------------------------------------------------------------------------
-
-General Instructions for Deployment of a New Release
-====================================================
-
- 0. Prepare to hold different roles at different stages of the instructions below.
- You will need to be root, wwwrun, and some user in group docker.
- Consider using separate shells for the wwwrun and other roles. These instructions
- are written assuming you will only use one shell.
-
- 1. Make a directory to hold the new release as wwwrun::
- sudo su - -s /bin/bash wwwrun
- mkdir /a/www/ietf-datatracker/${releasenumber}
- cd /a/www/ietf-datatracker/${releasenumber}
-
- 2. Fetch the release tarball from github
- (see https://github.com/ietf-tools/datatracker/releases)::
-
- wget https://github.com/ietf-tools/datatracker/releases/download/${releasenumber}/release.tar.gz
- tar xzvf release.tar.gz
-
- 3. Copy ietf/settings_local.py from previous release::
-
- cp ../web/ietf/settings_local.py ietf/
-
- 4. Setup a new virtual environment and install requirements::
-
- python3.9 -mvenv env
- source env/bin/activate
- pip install -r requirements.txt
- pip freeze > frozen-requirements.txt
-
- (The pip freeze command records the exact versions of the Python libraries that pip installed.
- This is used by the celery docker container to ensure it uses the same library versions as
- the datatracker service.)
-
- 5. Move static files into place for CDN (/a/www/www6s/lib/dt):
-
- ietf/manage.py collectstatic
-
- 6. Run system checks (which patches the just installed modules)::
-
- ietf/manage.py check
-
- 7. Switch to the docker directory and update images as a user in group docker:
-
- exit
- cd /a/docker/datatracker
- docker image tag ghcr.io/ietf-tools/datatracker-celery:latest datatracker-celery-fallback
- docker image tag ghcr.io/ietf-tools/datatracker-mq:latest datatracker-mq-fallback
- docker-compose pull
-
- 8. Stop and remove the async task containers:
- Wait for this to finish cleanly. Usually this will only be a few seconds, but it may take up
- to about 10 minutes for the 'down' command to complete if a long-running task is in progress.
-
- docker-compose down
-
- 9. Stop the datatracker and remove the web link so cron or other applications
- don't run code in the older deployment.
-
- sudo systemctl stop datatracker.socket datatracker.service
- rm /a/www/ietf-datatracker/web
-
- 10. Return to the release directory and run migrations as wwwrun:
-
- sudo su - -s /bin/bash wwwrun
- cd /a/www/ietf-datatracker/${releasenumber}
- ietf/manage.py migrate
-
- Take note if any migrations were executed.
-
- 11. Back out one directory level, then re-point the 'web' symlink::
-
- cd ..
- ln -s ${releasenumber} web
-
- 12. Start the datatracker service (it is no longer necessary to restart apache) ::
-
- exit
- sudo systemctl start datatracker.service datatracker.socket
-
- 13. Start async task worker and message broker:
-
- cd /a/docker/datatracker
- bash startcommand
-
- 14. Verify operation:
-
- http://datatracker.ietf.org/
-
- 15. If install failed and there were no migrations at step 9, revert web symlink and docker update and repeat the
- restart in steps 11 and 12. To revert the docker update:
-
- cd /a/docker/datatracker
- docker-compose down
- docker image rm ghcr.io/ietf-tools/datatracker-celery:latest ghcr.io/ietf-tools/datatracker-mq:latest
- docker image tag datatracker-celery-fallback ghcr.io/ietf-tools/datatracker-celery:latest
- docker image tag datatracker-mq-fallback ghcr.io/ietf-tools/datatracker-mq:latest
- cd -
-
- If there were migrations at step 10, they will need to be reversed before the restart at step 12.
- If it's not obvious what to do to reverse the migrations, contact the dev team.
-
-
-Patching a Production Release
-=============================
-
-Sometimes it can prove necessary to patch an existing release.
-The following process should be used:
-
- 1. Code and test the patch on an copy of the release with any
- previously applied patches put in place.
-
- 2. Produce a patch file, named with date and subject::
-
- $ git diff > 2013-03-25-ballot-calculation.patch
-
- 3. Move the patch file to the production server, and place it in
- '/a/www/ietf-datatracker/patches/'
-
- 4. Make a recursive copy of the production code to a new directory, named with a patch number.
-
- /a/www/ietf-datatracker $ rsync -a web/ ${releasenumber}.p1/
-
- 5. Apply the patch::
-
- /a/www/ietf-datatracker $ cd ${releasenumber}.p1/
- /a/www/ietf-datatracker/${releasnumber}.p1 $ patch -p1 \
- < ../patches/2013-03-25-ballot-calculation.patch
-
- This must not produce any messages about failing to apply any chunks;
- if it does, go back to 1. and figure out why.
-
- 6. Edit ``.../ietf/__init__.py`` in the new patched release to indicate the patch
- version in the ``__patch__`` string.
-
- 7. Stop the async task container (this may take a few minutes if tasks are in progress):
-
- cd /a/docker/datatracker
- docker-compose down
-
- 8. Change the 'web' symlink, reload etc. as described in
- `General Instructions for Deployment of a New Release`_.
-
- 9. Start async task worker:
-
- cd /a/docker/datatracker
- bash startcommand
-
-
From 8b7fa1199a72133fd3dd87d148a48cf08ad033e8 Mon Sep 17 00:00:00 2001
From: Robert Sparks
Date: Fri, 20 Dec 2024 08:53:12 -0600
Subject: [PATCH 023/460] chore: remove single-use repair_dead_on_expire task
(#8348)
* chore: remove single-use repair_dead_on_expire task
* fix: remove repair_dead_on_expire
* chore: remove abandoned imports
---
ietf/doc/expire.py | 42 ++--------------------
ietf/doc/tasks.py | 6 ----
ietf/doc/tests_draft.py | 79 +++--------------------------------------
ietf/doc/tests_tasks.py | 5 ---
4 files changed, 6 insertions(+), 126 deletions(-)
diff --git a/ietf/doc/expire.py b/ietf/doc/expire.py
index 57af2ad91f..98554bae0e 100644
--- a/ietf/doc/expire.py
+++ b/ietf/doc/expire.py
@@ -13,10 +13,10 @@
from typing import List, Optional # pyflakes:ignore
-from ietf.doc.utils import new_state_change_event, update_action_holders
+from ietf.doc.utils import update_action_holders
from ietf.utils import log
from ietf.utils.mail import send_mail
-from ietf.doc.models import Document, DocEvent, State, StateDocEvent
+from ietf.doc.models import Document, DocEvent, State
from ietf.person.models import Person
from ietf.meeting.models import Meeting
from ietf.mailtrigger.utils import gather_address_lists
@@ -235,41 +235,3 @@ def move_file_to(subdir):
# All uses of this past 2014 seem related to major system failures.
move_file_to("unknown_ids")
-
-def repair_dead_on_expire():
- by = Person.objects.get(name="(System)")
- id_exists = State.objects.get(type="draft-iesg", slug="idexists")
- dead = State.objects.get(type="draft-iesg", slug="dead")
- dead_drafts = Document.objects.filter(
- states__type="draft-iesg", states__slug="dead", type_id="draft"
- )
- for d in dead_drafts:
- dead_event = d.latest_event(
- StateDocEvent, state_type="draft-iesg", state__slug="dead"
- )
- if dead_event is not None:
- if d.docevent_set.filter(type="expired_document").exists():
- closest_expiry = min(
- [
- abs(e.time - dead_event.time)
- for e in d.docevent_set.filter(type="expired_document")
- ]
- )
- if closest_expiry.total_seconds() < 60:
- d.set_state(id_exists)
- events = []
- e = DocEvent(
- doc=d,
- rev=d.rev,
- type="added_comment",
- by=by,
- desc="IESG Dead state was set due only to document expiry - changing IESG state to ID-Exists",
- )
- e.skip_community_list_notification = True
- e.save()
- events.append(e)
- e = new_state_change_event(d, by, dead, id_exists)
- e.skip_community_list_notification = True
- e.save()
- events.append(e)
- d.save_with_history(events)
diff --git a/ietf/doc/tasks.py b/ietf/doc/tasks.py
index b7f89e1f9b..f1de459dd8 100644
--- a/ietf/doc/tasks.py
+++ b/ietf/doc/tasks.py
@@ -18,7 +18,6 @@
in_draft_expire_freeze,
get_expired_drafts,
expirable_drafts,
- repair_dead_on_expire,
send_expire_notice_for_draft,
expire_draft,
clean_up_draft_files,
@@ -62,11 +61,6 @@ def expire_ids_task():
raise
-@shared_task
-def repair_dead_on_expire_task():
- repair_dead_on_expire()
-
-
@shared_task
def notify_expirations_task(notify_days=14):
for doc in get_soon_to_expire_drafts(notify_days):
diff --git a/ietf/doc/tests_draft.py b/ietf/doc/tests_draft.py
index 84959625c9..2405806682 100644
--- a/ietf/doc/tests_draft.py
+++ b/ietf/doc/tests_draft.py
@@ -19,10 +19,10 @@
import debug # pyflakes:ignore
-from ietf.doc.expire import expirable_drafts, get_expired_drafts, repair_dead_on_expire, send_expire_notice_for_draft, expire_draft
-from ietf.doc.factories import EditorialDraftFactory, IndividualDraftFactory, StateDocEventFactory, WgDraftFactory, RgDraftFactory, DocEventFactory
+from ietf.doc.expire import expirable_drafts, get_expired_drafts, send_expire_notice_for_draft, expire_draft
+from ietf.doc.factories import EditorialDraftFactory, IndividualDraftFactory, WgDraftFactory, RgDraftFactory, DocEventFactory
from ietf.doc.models import ( Document, DocReminder, DocEvent,
- ConsensusDocEvent, LastCallDocEvent, RelatedDocument, State, StateDocEvent, TelechatDocEvent,
+ ConsensusDocEvent, LastCallDocEvent, RelatedDocument, State, TelechatDocEvent,
WriteupDocEvent, DocRelationshipName, IanaExpertDocEvent )
from ietf.doc.utils import get_tags_for_stream_id, create_ballot_if_not_open
from ietf.doc.views_draft import AdoptDraftForm
@@ -36,7 +36,7 @@
from ietf.utils.test_utils import login_testing_unauthorized
from ietf.utils.mail import outbox, empty_outbox, get_payload_text
from ietf.utils.test_utils import TestCase
-from ietf.utils.timezone import date_today, datetime_today, datetime_from_date, DEADLINE_TZINFO
+from ietf.utils.timezone import date_today, datetime_from_date, DEADLINE_TZINFO
class ChangeStateTests(TestCase):
@@ -845,77 +845,6 @@ def test_clean_up_draft_files(self):
self.assertTrue(not os.path.exists(os.path.join(settings.INTERNET_DRAFT_PATH, txt)))
self.assertTrue(os.path.exists(os.path.join(settings.INTERNET_DRAFT_ARCHIVE_DIR, txt)))
- @mock.patch("ietf.community.signals.notify_of_event")
- def test_repair_dead_on_expire(self, mock_notify):
-
- # Create a draft in iesg idexists - ensure it doesn't get new docevents.
- # Create a draft in iesg dead with no expires within the window - ensure it doesn't get new docevents and its state doesn't change.
- # Create a draft in iesg dead with an expiry in the window - ensure it gets the right doc events, iesg state changes, draft state doesn't change.
- last_year = datetime_today() - datetime.timedelta(days=365)
-
- not_dead = WgDraftFactory(name="draft-not-dead")
- not_dead_event_count = not_dead.docevent_set.count()
-
- dead_not_from_expires = WgDraftFactory(name="draft-dead-not-from-expiring")
- dead_not_from_expires.set_state(
- State.objects.get(type="draft-iesg", slug="dead")
- )
- StateDocEventFactory(
- doc=dead_not_from_expires, state=("draft-iesg", "dead"), time=last_year
- )
- DocEventFactory(
- doc=dead_not_from_expires,
- type="expired_document",
- time=last_year + datetime.timedelta(days=1),
- )
- dead_not_from_expires_event_count = dead_not_from_expires.docevent_set.count()
-
- dead_from_expires = []
- dead_from_expires_event_count = dict()
- for delta in [-5, 5]:
- d = WgDraftFactory(
- name=f"draft-dead-from-expiring-just-{'before' if delta<0 else 'after'}"
- )
- d.set_state(State.objects.get(type="draft-iesg", slug="dead"))
- StateDocEventFactory(doc=d, state=("draft-iesg", "dead"), time=last_year)
- DocEventFactory(
- doc=d,
- type="expired_document",
- time=last_year + datetime.timedelta(seconds=delta),
- )
- dead_from_expires.append(d)
- dead_from_expires_event_count[d] = d.docevent_set.count()
-
- notified_during_factory_work = mock_notify.call_count
- for call_args in mock_notify.call_args_list:
- e = call_args.args[0]
- self.assertTrue(isinstance(e,DocEvent))
- self.assertFalse(hasattr(e,"skip_community_list_notification"))
-
- repair_dead_on_expire()
-
- self.assertEqual(not_dead.docevent_set.count(), not_dead_event_count)
- self.assertEqual(
- dead_not_from_expires.docevent_set.count(),
- dead_not_from_expires_event_count,
- )
- for d in dead_from_expires:
- self.assertEqual(
- d.docevent_set.count(), dead_from_expires_event_count[d] + 2
- )
- self.assertIn(
- "due only to document expiry", d.latest_event(type="added_comment").desc
- )
- self.assertEqual(
- d.latest_event(StateDocEvent).desc,
- "IESG state changed to I-D Exists from Dead",
- )
- self.assertEqual(mock_notify.call_count, 4+notified_during_factory_work)
- for call_args in mock_notify.call_args_list[-4:]:
- e = call_args.args[0]
- self.assertTrue(isinstance(e,DocEvent))
- self.assertTrue(hasattr(e,"skip_community_list_notification"))
- self.assertTrue(e.skip_community_list_notification)
class ExpireLastCallTests(TestCase):
def test_expire_last_call(self):
diff --git a/ietf/doc/tests_tasks.py b/ietf/doc/tests_tasks.py
index 135b52f604..67997acd85 100644
--- a/ietf/doc/tests_tasks.py
+++ b/ietf/doc/tests_tasks.py
@@ -21,7 +21,6 @@
generate_idnits2_rfcs_obsoleted_task,
generate_idnits2_rfc_status_task,
notify_expirations_task,
- repair_dead_on_expire_task,
)
class TaskTests(TestCase):
@@ -99,10 +98,6 @@ def test_expire_last_calls_task(self, mock_get_expired, mock_expire):
self.assertEqual(mock_expire.call_args_list[1], mock.call(docs[1]))
self.assertEqual(mock_expire.call_args_list[2], mock.call(docs[2]))
- @mock.patch("ietf.doc.tasks.repair_dead_on_expire")
- def test_repair_dead_on_expire_task(self, mock_repair):
- repair_dead_on_expire_task()
- self.assertEqual(mock_repair.call_count, 1)
class Idnits2SupportTests(TestCase):
settings_temp_path_overrides = TestCase.settings_temp_path_overrides + ['DERIVED_DIR']
From 7d464fcc558a0e03a535e23bb0dd01f6e7a8dabd Mon Sep 17 00:00:00 2001
From: Robert Sparks
Date: Thu, 9 Jan 2025 11:23:20 -0600
Subject: [PATCH 024/460] fix: keep the ftp dir in sync when the datatracker
creates artifacts (#8401)
* chore: fix typo in log message
* fix: write new conflict reviews to the ftp directory
* fix: log exception description
* chore: avoid reusing a name for mypy
* fix: write non-meeting-related slides to ftp/slides
* fix: write status changes to ftp dir
* fix: copy draft yang modules to the ftp dir
the last commit above (see #8401) has no tests :(. There aren't tests of this yang scaffolding to add to. Adding a set of tests should be an independent larger effort.
---
ietf/doc/tests_conflict_review.py | 14 +++++++++++---
ietf/doc/tests_material.py | 13 +++++++++++--
ietf/doc/tests_status_change.py | 17 +++++++++++++----
ietf/doc/utils_charter.py | 6 +++---
ietf/doc/views_conflict_review.py | 14 ++++++++++++--
ietf/doc/views_material.py | 16 ++++++++++++++--
ietf/doc/views_status_change.py | 23 +++++++++++++++++------
ietf/submit/checkers.py | 10 ++++++++++
ietf/submit/utils.py | 4 ++++
9 files changed, 95 insertions(+), 22 deletions(-)
diff --git a/ietf/doc/tests_conflict_review.py b/ietf/doc/tests_conflict_review.py
index 485f5655eb..d2f94922b2 100644
--- a/ietf/doc/tests_conflict_review.py
+++ b/ietf/doc/tests_conflict_review.py
@@ -4,6 +4,7 @@
import io
import os
+from pathlib import Path
from pyquery import PyQuery
from textwrap import wrap
@@ -387,7 +388,7 @@ def setUp(self):
class ConflictReviewSubmitTests(TestCase):
- settings_temp_path_overrides = TestCase.settings_temp_path_overrides + ['CONFLICT_REVIEW_PATH']
+ settings_temp_path_overrides = TestCase.settings_temp_path_overrides + ['CONFLICT_REVIEW_PATH','FTP_PATH']
def test_initial_submission(self):
doc = Document.objects.get(name='conflict-review-imaginary-irtf-submission')
url = urlreverse('ietf.doc.views_conflict_review.submit',kwargs=dict(name=doc.name))
@@ -403,9 +404,15 @@ def test_initial_submission(self):
# Right now, nothing to test - we let people put whatever the web browser will let them put into that textbox
# sane post using textbox
- path = os.path.join(settings.CONFLICT_REVIEW_PATH, '%s-%s.txt' % (doc.name, doc.rev))
+ basename = f"{doc.name}-{doc.rev}.txt"
+ path = Path(settings.CONFLICT_REVIEW_PATH) / basename
+ ftp_dir = Path(settings.FTP_DIR) / "conflict-reviews"
+ if not ftp_dir.exists():
+ ftp_dir.mkdir()
+ ftp_path = ftp_dir / basename
self.assertEqual(doc.rev,'00')
- self.assertFalse(os.path.exists(path))
+ self.assertFalse(path.exists())
+ self.assertFalse(ftp_path.exists())
r = self.client.post(url,dict(content="Some initial review text\n",submit_response="1"))
self.assertEqual(r.status_code,302)
doc = Document.objects.get(name='conflict-review-imaginary-irtf-submission')
@@ -413,6 +420,7 @@ def test_initial_submission(self):
with io.open(path) as f:
self.assertEqual(f.read(),"Some initial review text\n")
f.close()
+ self.assertTrue(ftp_path.exists())
self.assertTrue( "submission-00" in doc.latest_event(NewRevisionDocEvent).desc)
def test_subsequent_submission(self):
diff --git a/ietf/doc/tests_material.py b/ietf/doc/tests_material.py
index 065ff09a98..aaea8fec3d 100644
--- a/ietf/doc/tests_material.py
+++ b/ietf/doc/tests_material.py
@@ -28,7 +28,7 @@
class GroupMaterialTests(TestCase):
- settings_temp_path_overrides = TestCase.settings_temp_path_overrides + ['AGENDA_PATH']
+ settings_temp_path_overrides = TestCase.settings_temp_path_overrides + ['AGENDA_PATH', 'FTP_DIR']
def setUp(self):
super().setUp()
self.materials_dir = self.tempdir("materials")
@@ -37,6 +37,10 @@ def setUp(self):
self.slides_dir.mkdir()
self.saved_document_path_pattern = settings.DOCUMENT_PATH_PATTERN
settings.DOCUMENT_PATH_PATTERN = self.materials_dir + "/{doc.type_id}/"
+ self.assertTrue(Path(settings.FTP_DIR).exists())
+ ftp_slides_dir = Path(settings.FTP_DIR) / "slides"
+ if not ftp_slides_dir.exists():
+ ftp_slides_dir.mkdir()
self.meeting_slides_dir = Path(settings.AGENDA_PATH) / "42" / "slides"
if not self.meeting_slides_dir.exists():
@@ -112,7 +116,12 @@ def test_upload_slides(self):
self.assertEqual(doc.title, "Test File - with fancy title")
self.assertEqual(doc.get_state_slug(), "active")
- with io.open(os.path.join(self.materials_dir, "slides", doc.name + "-" + doc.rev + ".pdf")) as f:
+ basename=f"{doc.name}-{doc.rev}.pdf"
+ filepath=Path(self.materials_dir) / "slides" / basename
+ with filepath.open() as f:
+ self.assertEqual(f.read(), content)
+ ftp_filepath=Path(settings.FTP_DIR) / "slides" / basename
+ with ftp_filepath.open() as f:
self.assertEqual(f.read(), content)
# check that posting same name is prevented
diff --git a/ietf/doc/tests_status_change.py b/ietf/doc/tests_status_change.py
index bec48ed4ef..bd4da4c092 100644
--- a/ietf/doc/tests_status_change.py
+++ b/ietf/doc/tests_status_change.py
@@ -4,6 +4,7 @@
import io
import os
+from pathlib import Path
import debug # pyflakes:ignore
@@ -540,7 +541,7 @@ def setUp(self):
DocumentFactory(type_id='statchg',name='status-change-imaginary-mid-review',notify='notify@example.org')
class StatusChangeSubmitTests(TestCase):
- settings_temp_path_overrides = TestCase.settings_temp_path_overrides + ['STATUS_CHANGE_PATH']
+ settings_temp_path_overrides = TestCase.settings_temp_path_overrides + ['STATUS_CHANGE_PATH', 'FTP_PATH']
def test_initial_submission(self):
doc = Document.objects.get(name='status-change-imaginary-mid-review')
url = urlreverse('ietf.doc.views_status_change.submit',kwargs=dict(name=doc.name))
@@ -556,14 +557,19 @@ def test_initial_submission(self):
# Right now, nothing to test - we let people put whatever the web browser will let them put into that textbox
# sane post using textbox
- path = os.path.join(settings.STATUS_CHANGE_PATH, '%s-%s.txt' % (doc.name, doc.rev))
self.assertEqual(doc.rev,'00')
- self.assertFalse(os.path.exists(path))
+ basename = f"{doc.name}-{doc.rev}.txt"
+ filepath = Path(settings.STATUS_CHANGE_PATH) / basename
+ ftp_filepath = Path(settings.FTP_DIR) / "status-changes" / basename
+ self.assertFalse(filepath.exists())
+ self.assertFalse(ftp_filepath.exists())
r = self.client.post(url,dict(content="Some initial review text\n",submit_response="1"))
self.assertEqual(r.status_code,302)
doc = Document.objects.get(name='status-change-imaginary-mid-review')
self.assertEqual(doc.rev,'00')
- with io.open(path) as f:
+ with filepath.open() as f:
+ self.assertEqual(f.read(),"Some initial review text\n")
+ with ftp_filepath.open() as f:
self.assertEqual(f.read(),"Some initial review text\n")
self.assertTrue( "mid-review-00" in doc.latest_event(NewRevisionDocEvent).desc)
@@ -628,3 +634,6 @@ def test_subsequent_submission(self):
def setUp(self):
super().setUp()
DocumentFactory(type_id='statchg',name='status-change-imaginary-mid-review',notify='notify@example.org')
+ ftp_subdir=Path(settings.FTP_DIR)/"status-changes"
+ if not ftp_subdir.exists():
+ ftp_subdir.mkdir()
diff --git a/ietf/doc/utils_charter.py b/ietf/doc/utils_charter.py
index b29d1e303c..287ce8cece 100644
--- a/ietf/doc/utils_charter.py
+++ b/ietf/doc/utils_charter.py
@@ -112,10 +112,10 @@ def fix_charter_revision_after_approval(charter, by):
)
try:
os.link(new, ftp_filepath)
- except IOError:
+ except IOError as ex:
log(
- "There was an error creating a harlink at %s pointing to %s"
- % (ftp_filepath, new)
+ "There was an error creating a hardlink at %s pointing to %s: %s"
+ % (ftp_filepath, new, ex)
)
events = []
diff --git a/ietf/doc/views_conflict_review.py b/ietf/doc/views_conflict_review.py
index ec5a18c7a2..e55661ccdf 100644
--- a/ietf/doc/views_conflict_review.py
+++ b/ietf/doc/views_conflict_review.py
@@ -5,6 +5,7 @@
import datetime
import io
import os
+from pathlib import Path
from django import forms
from django.shortcuts import render, get_object_or_404, redirect
@@ -181,12 +182,21 @@ def clean_txt(self):
return get_cleaned_text_file_content(self.cleaned_data["txt"])
def save(self, review):
- filename = os.path.join(settings.CONFLICT_REVIEW_PATH, '%s-%s.txt' % (review.name, review.rev))
- with io.open(filename, 'w', encoding='utf-8') as destination:
+ basename = f"{review.name}-{review.rev}.txt"
+ filepath = Path(settings.CONFLICT_REVIEW_PATH) / basename
+ with filepath.open('w', encoding='utf-8') as destination:
if self.cleaned_data['txt']:
destination.write(self.cleaned_data['txt'])
else:
destination.write(self.cleaned_data['content'])
+ ftp_filepath = Path(settings.FTP_DIR) / "conflict-reviews" / basename
+ try:
+ os.link(filepath, ftp_filepath) # Path.hardlink_to is not available until 3.10
+ except IOError as e:
+ log.log(
+ "There was an error creating a hardlink at %s pointing to %s: %s"
+ % (ftp_filepath, filepath, e)
+ )
#This is very close to submit on charter - can we get better reuse?
@role_required('Area Director','Secretariat')
diff --git a/ietf/doc/views_material.py b/ietf/doc/views_material.py
index b646ecf2fc..361bf5f1e2 100644
--- a/ietf/doc/views_material.py
+++ b/ietf/doc/views_material.py
@@ -3,8 +3,8 @@
# views for managing group materials (slides, ...)
-import io
import os
+from pathlib import Path
import re
from django import forms
@@ -162,9 +162,21 @@ def edit_material(request, name=None, acronym=None, action=None, doc_type=None):
f = form.cleaned_data["material"]
file_ext = os.path.splitext(f.name)[1]
- with io.open(os.path.join(doc.get_file_path(), doc.name + "-" + doc.rev + file_ext), 'wb+') as dest:
+ basename = f"{doc.name}-{doc.rev}{file_ext}" # Note the lack of a . before file_ext - see os.path.splitext
+ filepath = Path(doc.get_file_path()) / basename
+ with filepath.open('wb+') as dest:
for chunk in f.chunks():
dest.write(chunk)
+ if not doc.meeting_related():
+ log.assertion('doc.type_id == "slides"')
+ ftp_filepath = Path(settings.FTP_DIR) / doc.type_id / basename
+ try:
+ os.link(filepath, ftp_filepath) # Path.hardlink is not available until 3.10
+ except IOError as ex:
+ log.log(
+ "There was an error creating a hardlink at %s pointing to %s: %s"
+ % (ftp_filepath, filepath, ex)
+ )
if prev_rev != doc.rev:
e = NewRevisionDocEvent(type="new_revision", doc=doc, rev=doc.rev)
diff --git a/ietf/doc/views_status_change.py b/ietf/doc/views_status_change.py
index 9034971ebf..33b822348a 100644
--- a/ietf/doc/views_status_change.py
+++ b/ietf/doc/views_status_change.py
@@ -5,6 +5,7 @@
import datetime
import io
import os
+from pathlib import Path
import re
from typing import Dict # pyflakes:ignore
@@ -33,6 +34,7 @@
from ietf.mailtrigger.utils import gather_address_lists
from ietf.name.models import DocRelationshipName, StdLevelName
from ietf.person.models import Person
+from ietf.utils.log import log
from ietf.utils.mail import send_mail_preformatted
from ietf.utils.textupload import get_cleaned_text_file_content
from ietf.utils.timezone import date_today, DEADLINE_TZINFO
@@ -154,12 +156,21 @@ def clean_txt(self):
return get_cleaned_text_file_content(self.cleaned_data["txt"])
def save(self, doc):
- filename = os.path.join(settings.STATUS_CHANGE_PATH, '%s-%s.txt' % (doc.name, doc.rev))
- with io.open(filename, 'w', encoding='utf-8') as destination:
- if self.cleaned_data['txt']:
- destination.write(self.cleaned_data['txt'])
- else:
- destination.write(self.cleaned_data['content'])
+ basename = f"{doc.name}-{doc.rev}.txt"
+ filename = Path(settings.STATUS_CHANGE_PATH) / basename
+ with io.open(filename, 'w', encoding='utf-8') as destination:
+ if self.cleaned_data['txt']:
+ destination.write(self.cleaned_data['txt'])
+ else:
+ destination.write(self.cleaned_data['content'])
+ try:
+ ftp_filename = Path(settings.FTP_DIR) / "status-changes" / basename
+ os.link(filename, ftp_filename) # Path.hardlink is not available until 3.10
+ except IOError as ex:
+ log(
+ "There was an error creating a hardlink at %s pointing to %s: %s"
+ % (ftp_filename, filename, ex)
+ )
#This is very close to submit on charter - can we get better reuse?
@role_required('Area Director','Secretariat')
diff --git a/ietf/submit/checkers.py b/ietf/submit/checkers.py
index d29e2a2355..89908748a7 100644
--- a/ietf/submit/checkers.py
+++ b/ietf/submit/checkers.py
@@ -4,6 +4,7 @@
import io
import os
+from pathlib import Path
import re
import shutil
import sys
@@ -280,6 +281,15 @@ def check_file_txt(self, path):
dest = os.path.join(settings.SUBMIT_YANG_DRAFT_MODEL_DIR, model)
shutil.move(path, dest)
+ ftp_dest = Path(settings.FTP_DIR) / "yang" / "draftmod" / model
+ try:
+ os.link(dest, ftp_dest)
+ except IOError as ex:
+ log(
+ "There was an error creating a hardlink at %s pointing to %s: %s"
+ % (ftp_dest, dest, ex)
+ )
+
# summary result
results.append({
diff --git a/ietf/submit/utils.py b/ietf/submit/utils.py
index 5906785048..16cccc9b59 100644
--- a/ietf/submit/utils.py
+++ b/ietf/submit/utils.py
@@ -8,6 +8,7 @@
import os
import pathlib
import re
+import subprocess
import sys
import time
import traceback
@@ -1596,3 +1597,6 @@ def active(dirent):
modfile.unlink()
except UnicodeDecodeError as e:
log.log(f"Error processing {item.name}: {e}")
+
+ ftp_moddir = Path(settings.FTP_DIR) / "yang" / "draftmod"
+ subprocess.call(("/usr/bin/rsync", "-aq", "--delete", moddir, ftp_moddir))
From e5c4a9f2983a8d47842ad3e7a76167364fd2081d Mon Sep 17 00:00:00 2001
From: Robert Sparks
Date: Thu, 9 Jan 2025 13:07:51 -0600
Subject: [PATCH 025/460] feat: additional filesystem monitoring (#8405)
* feat: additional filesystem monitoring
* chore: rename setting for tmp directory
* fix: restructure path to new endpoint
---------
Co-authored-by: Jennifer Richards
---
dev/deploy-to-container/settings_local.py | 1 +
dev/diff/settings_local.py | 1 +
dev/tests/settings_local.py | 1 +
docker/configs/settings_local.py | 1 +
docker/scripts/app-create-dirs.sh | 1 +
ietf/api/tests.py | 8 ++++++++
ietf/api/urls.py | 2 ++
ietf/api/views.py | 20 +++++++++++++++++++-
ietf/settings.py | 1 +
9 files changed, 35 insertions(+), 1 deletion(-)
diff --git a/dev/deploy-to-container/settings_local.py b/dev/deploy-to-container/settings_local.py
index 07bf0a7511..0a991ae9fe 100644
--- a/dev/deploy-to-container/settings_local.py
+++ b/dev/deploy-to-container/settings_local.py
@@ -64,6 +64,7 @@
BIBXML_BASE_PATH = '/assets/ietfdata/derived/bibxml'
IDSUBMIT_REPOSITORY_PATH = INTERNET_DRAFT_PATH
FTP_DIR = '/assets/ftp'
+NFS_METRICS_TMP_DIR = '/assets/tmp'
NOMCOM_PUBLIC_KEYS_DIR = 'data/nomcom_keys/public_keys/'
SLIDE_STAGING_PATH = '/test/staging/'
diff --git a/dev/diff/settings_local.py b/dev/diff/settings_local.py
index 6bcee46b61..95d1e481c9 100644
--- a/dev/diff/settings_local.py
+++ b/dev/diff/settings_local.py
@@ -60,6 +60,7 @@
INTERNET_ALL_DRAFTS_ARCHIVE_DIR = '/assets/ietf-ftp/internet-drafts/'
BIBXML_BASE_PATH = '/assets/ietfdata/derived/bibxml'
FTP_DIR = '/assets/ftp'
+NFS_METRICS_TMP_DIR = '/assets/tmp'
NOMCOM_PUBLIC_KEYS_DIR = 'data/nomcom_keys/public_keys/'
SLIDE_STAGING_PATH = 'test/staging/'
diff --git a/dev/tests/settings_local.py b/dev/tests/settings_local.py
index afadb3760b..7b10bee06a 100644
--- a/dev/tests/settings_local.py
+++ b/dev/tests/settings_local.py
@@ -59,6 +59,7 @@
INTERNET_ALL_DRAFTS_ARCHIVE_DIR = '/assets/ietf-ftp/internet-drafts/'
BIBXML_BASE_PATH = '/assets/ietfdata/derived/bibxml'
FTP_DIR = '/assets/ftp'
+NFS_METRICS_TMP_DIR = '/assets/tmp'
NOMCOM_PUBLIC_KEYS_DIR = 'data/nomcom_keys/public_keys/'
SLIDE_STAGING_PATH = 'test/staging/'
diff --git a/docker/configs/settings_local.py b/docker/configs/settings_local.py
index a1c19c80cf..5df5d15e82 100644
--- a/docker/configs/settings_local.py
+++ b/docker/configs/settings_local.py
@@ -50,6 +50,7 @@
BIBXML_BASE_PATH = '/assets/ietfdata/derived/bibxml'
IDSUBMIT_REPOSITORY_PATH = INTERNET_DRAFT_PATH
FTP_DIR = '/assets/ftp'
+NFS_METRICS_TMP_DIR = '/assets/tmp'
NOMCOM_PUBLIC_KEYS_DIR = 'data/nomcom_keys/public_keys/'
SLIDE_STAGING_PATH = '/assets/www6s/staging/'
diff --git a/docker/scripts/app-create-dirs.sh b/docker/scripts/app-create-dirs.sh
index 50431f4793..3eb328a280 100755
--- a/docker/scripts/app-create-dirs.sh
+++ b/docker/scripts/app-create-dirs.sh
@@ -29,6 +29,7 @@ for sub in \
/assets/www6/iesg \
/assets/www6/iesg/evaluation \
/assets/media/photo \
+ /assets/tmp \
/assets/ftp \
/assets/ftp/charter \
/assets/ftp/internet-drafts \
diff --git a/ietf/api/tests.py b/ietf/api/tests.py
index a8d6ac4e57..6e7fb374fe 100644
--- a/ietf/api/tests.py
+++ b/ietf/api/tests.py
@@ -970,6 +970,14 @@ def test_api_appauth(self):
self.assertEqual(jsondata['success'], True)
self.client.logout()
+ @override_settings(APP_API_TOKENS={"ietf.api.views.nfs_metrics": ["valid-token"]})
+ def test_api_nfs_metrics(self):
+ url = urlreverse("ietf.api.views.nfs_metrics")
+ r = self.client.get(url)
+ self.assertEqual(r.status_code, 403)
+ r = self.client.get(url, headers={"X-Api-Key": "valid-token"})
+ self.assertContains(r, 'nfs_latency_seconds{operation="write"}')
+
def test_api_get_session_matherials_no_agenda_meeting_url(self):
meeting = MeetingFactory(type_id='ietf')
session = SessionFactory(meeting=meeting)
diff --git a/ietf/api/urls.py b/ietf/api/urls.py
index a9aaaf5805..b0dbaf91ce 100644
--- a/ietf/api/urls.py
+++ b/ietf/api/urls.py
@@ -82,6 +82,8 @@
url(r'^version/?$', api_views.version),
# Application authentication API key
url(r'^appauth/(?Pauthortools|bibxml)$', api_views.app_auth),
+ # NFS metrics endpoint
+ url(r'^metrics/nfs/?$', api_views.nfs_metrics),
# latest versions
url(r'^rfcdiff-latest-json/%(name)s(?:-%(rev)s)?(\.txt|\.html)?/?$' % settings.URL_REGEXPS, api_views.rfcdiff_latest_json),
url(r'^rfcdiff-latest-json/(?P[Rr][Ff][Cc] [0-9]+?)(\.txt|\.html)?/?$', api_views.rfcdiff_latest_json),
diff --git a/ietf/api/views.py b/ietf/api/views.py
index 3e56757528..2fd9d2730f 100644
--- a/ietf/api/views.py
+++ b/ietf/api/views.py
@@ -3,7 +3,10 @@
import base64
import binascii
+import datetime
import json
+from pathlib import Path
+from tempfile import NamedTemporaryFile
import jsonschema
import pytz
import re
@@ -264,7 +267,22 @@ def app_auth(request, app: Literal["authortools", "bibxml"]):
json.dumps({'success': True}),
content_type='application/json')
-
+@requires_api_token
+@csrf_exempt
+def nfs_metrics(request):
+ with NamedTemporaryFile(dir=settings.NFS_METRICS_TMP_DIR,delete=False) as fp:
+ fp.close()
+ mark = datetime.datetime.now()
+ with open(fp.name, mode="w") as f:
+ f.write("whyioughta"*1024)
+ write_latency = (datetime.datetime.now() - mark).total_seconds()
+ mark = datetime.datetime.now()
+ with open(fp.name, "r") as f:
+ _=f.read()
+ read_latency = (datetime.datetime.now() - mark).total_seconds()
+ Path(f.name).unlink()
+ response=f'nfs_latency_seconds{{operation="write"}} {write_latency}\nnfs_latency_seconds{{operation="read"}} {read_latency}\n'
+ return HttpResponse(response)
def find_doc_for_rfcdiff(name, rev):
"""rfcdiff lookup heuristics
diff --git a/ietf/settings.py b/ietf/settings.py
index efd04c6068..b452864be6 100644
--- a/ietf/settings.py
+++ b/ietf/settings.py
@@ -761,6 +761,7 @@ def skip_unreadable_post(record):
DERIVED_DIR = '/a/ietfdata/derived'
FTP_DIR = '/a/ftp'
ALL_ID_DOWNLOAD_DIR = '/a/www/www6s/download'
+NFS_METRICS_TMP_DIR = '/a/tmp'
DOCUMENT_FORMAT_ALLOWLIST = ["txt", "ps", "pdf", "xml", "html", ]
From e108a3ca2c81b31c23775bba2f443bd1d2d15d23 Mon Sep 17 00:00:00 2001
From: Nicolas Giard
Date: Fri, 10 Jan 2025 15:05:44 -0500
Subject: [PATCH 026/460] chore(k8s): add inter-pod affinity
---
k8s/auth.yaml | 10 ++++++++++
k8s/beat.yaml | 10 ++++++++++
k8s/celery.yaml | 10 ++++++++++
k8s/memcached.yaml | 10 ++++++++++
k8s/rabbitmq.yaml | 10 ++++++++++
5 files changed, 50 insertions(+)
diff --git a/k8s/auth.yaml b/k8s/auth.yaml
index 2bdb064447..392e306b54 100644
--- a/k8s/auth.yaml
+++ b/k8s/auth.yaml
@@ -15,6 +15,16 @@ spec:
labels:
app: auth
spec:
+ affinity:
+ podAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ - labelSelector:
+ matchExpressions:
+ - key: app
+ operator: In
+ values:
+ - datatracker
+ topologyKey: "kubernetes.io/hostname"
securityContext:
runAsNonRoot: true
containers:
diff --git a/k8s/beat.yaml b/k8s/beat.yaml
index 72d74e11e4..9a8fe2f0a4 100644
--- a/k8s/beat.yaml
+++ b/k8s/beat.yaml
@@ -17,6 +17,16 @@ spec:
labels:
app: beat
spec:
+ affinity:
+ podAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ - labelSelector:
+ matchExpressions:
+ - key: app
+ operator: In
+ values:
+ - datatracker
+ topologyKey: "kubernetes.io/hostname"
securityContext:
runAsNonRoot: true
containers:
diff --git a/k8s/celery.yaml b/k8s/celery.yaml
index 10f58f0161..15f3bf0c7c 100644
--- a/k8s/celery.yaml
+++ b/k8s/celery.yaml
@@ -17,6 +17,16 @@ spec:
labels:
app: celery
spec:
+ affinity:
+ podAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ - labelSelector:
+ matchExpressions:
+ - key: app
+ operator: In
+ values:
+ - datatracker
+ topologyKey: "kubernetes.io/hostname"
securityContext:
runAsNonRoot: true
containers:
diff --git a/k8s/memcached.yaml b/k8s/memcached.yaml
index 5a4c9f0aed..8f73f3d0d5 100644
--- a/k8s/memcached.yaml
+++ b/k8s/memcached.yaml
@@ -13,6 +13,16 @@ spec:
labels:
app: memcached
spec:
+ affinity:
+ podAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ - labelSelector:
+ matchExpressions:
+ - key: app
+ operator: In
+ values:
+ - datatracker
+ topologyKey: "kubernetes.io/hostname"
securityContext:
runAsNonRoot: true
containers:
diff --git a/k8s/rabbitmq.yaml b/k8s/rabbitmq.yaml
index b016b3a5ab..3cab7ff565 100644
--- a/k8s/rabbitmq.yaml
+++ b/k8s/rabbitmq.yaml
@@ -13,6 +13,16 @@ spec:
labels:
app: rabbitmq
spec:
+ affinity:
+ podAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ - labelSelector:
+ matchExpressions:
+ - key: app
+ operator: In
+ values:
+ - datatracker
+ topologyKey: "kubernetes.io/hostname"
securityContext:
runAsNonRoot: true
containers:
From e1af5e7049f272556bcc5974c5f2d3c81fab9721 Mon Sep 17 00:00:00 2001
From: Jennifer Richards
Date: Fri, 10 Jan 2025 19:02:47 -0400
Subject: [PATCH 027/460] chore: remove dumprelated.py and loadrelated.py
(#8412)
* fix: correctly disconnect post_save signal
* chore: remove dumprelated.py and loadrelated.py
These have not been used in quite some time and would need
a careful review before trusting. Taking them out to avoid
accidents.
---
ietf/utils/management/commands/dumprelated.py | 209 ------------------
ietf/utils/management/commands/loadrelated.py | 129 -----------
2 files changed, 338 deletions(-)
delete mode 100644 ietf/utils/management/commands/dumprelated.py
delete mode 100644 ietf/utils/management/commands/loadrelated.py
diff --git a/ietf/utils/management/commands/dumprelated.py b/ietf/utils/management/commands/dumprelated.py
deleted file mode 100644
index 66fbb33bf1..0000000000
--- a/ietf/utils/management/commands/dumprelated.py
+++ /dev/null
@@ -1,209 +0,0 @@
-# Copyright The IETF Trust 2018-2020, All Rights Reserved
-# -*- coding: utf-8 -*-
-
-
-import io
-import warnings
-from collections import OrderedDict
-
-from django.apps import apps
-from django.contrib.admin.utils import NestedObjects
-from django.core import serializers
-from django.core.management.base import BaseCommand, CommandError
-from django.core.management.utils import parse_apps_and_model_labels
-from django.db import DEFAULT_DB_ALIAS, router
-
-import debug # pyflakes:ignore
-debug.debug = True
-
-class ProxyModelWarning(Warning):
- pass
-
-
-class Command(BaseCommand):
- help = (
- "Output a database object and its related objects as a fixture of the given format "
- )
-
- def add_arguments(self, parser):
- parser.add_argument(
- 'args', metavar='app_label.ModelName', nargs=1,
- help='Specifies the app_label.ModelName for which to dump objects given by --pks',
- )
- parser.add_argument(
- '--format', default='json', dest='format',
- help='Specifies the output serialization format for fixtures.',
- )
- parser.add_argument(
- '--indent', default=None, dest='indent', type=int,
- help='Specifies the indent level to use when pretty-printing output.',
- )
- parser.add_argument(
- '--database', action='store', dest='database',
- default=DEFAULT_DB_ALIAS,
- help='Nominates a specific database to dump fixtures from. '
- 'Defaults to the "default" database.',
- )
- parser.add_argument(
- '-e', '--exclude', dest='exclude', action='append', default=[],
- help='An app_label or app_label.ModelName to exclude '
- '(use multiple --exclude to exclude multiple apps/models).',
- )
- parser.add_argument(
- '--natural-foreign', action='store_true', dest='use_natural_foreign_keys', default=False,
- help='Use natural foreign keys if they are available.',
- )
- parser.add_argument(
- '--natural-primary', action='store_true', dest='use_natural_primary_keys', default=False,
- help='Use natural primary keys if they are available.',
- )
- parser.add_argument(
- '-o', '--output', default=None, dest='output',
- help='Specifies file to which the output is written.'
- )
- parser.add_argument(
- '--pks', dest='primary_keys', required=True,
- help="Only dump objects with given primary keys. Accepts a comma-separated "
- "list of keys. This option only works when you specify one model.",
- )
-
- def handle(self, *app_labels, **options):
- format = options['format']
- indent = options['indent']
- using = options['database']
- excludes = options['exclude']
- output = options['output']
- show_traceback = options['traceback']
- use_natural_foreign_keys = options['use_natural_foreign_keys']
- use_natural_primary_keys = options['use_natural_primary_keys']
- pks = options['primary_keys']
-
- if pks:
- primary_keys = [pk.strip() for pk in pks.split(',')]
- else:
- primary_keys = []
-
- excluded_models, excluded_apps = parse_apps_and_model_labels(excludes)
-
- if len(app_labels) == 0:
- if primary_keys:
- raise CommandError("You can only use --pks option with one model")
- app_list = OrderedDict(
- (app_config, None) for app_config in apps.get_app_configs()
- if app_config.models_module is not None and app_config not in excluded_apps
- )
- else:
- if len(app_labels) > 1 and primary_keys:
- raise CommandError("You can only use --pks option with one model")
- app_list = OrderedDict()
- for label in app_labels:
- try:
- app_label, model_label = label.split('.')
- try:
- app_config = apps.get_app_config(app_label)
- except LookupError as e:
- raise CommandError(str(e))
- if app_config.models_module is None or app_config in excluded_apps:
- continue
- try:
- model = app_config.get_model(model_label)
- except LookupError:
- raise CommandError("Unknown model: %s.%s" % (app_label, model_label))
-
- app_list_value = app_list.setdefault(app_config, [])
-
- # We may have previously seen a "all-models" request for
- # this app (no model qualifier was given). In this case
- # there is no need adding specific models to the list.
- if app_list_value is not None:
- if model not in app_list_value:
- app_list_value.append(model)
- except ValueError:
- if primary_keys:
- raise CommandError("You can only use --pks option with one model")
- # This is just an app - no model qualifier
- app_label = label
- try:
- app_config = apps.get_app_config(app_label)
- except LookupError as e:
- raise CommandError(str(e))
- if app_config.models_module is None or app_config in excluded_apps:
- continue
- app_list[app_config] = None
-
- # Check that the serialization format exists; this is a shortcut to
- # avoid collating all the objects and _then_ failing.
- if format not in serializers.get_public_serializer_formats():
- try:
- serializers.get_serializer(format)
- except serializers.SerializerDoesNotExist:
- pass
-
- raise CommandError("Unknown serialization format: %s" % format)
-
- def flatten(l):
- if isinstance(l, list):
- for el in l:
- if isinstance(el, list):
- for sub in flatten(el):
- yield sub
- else:
- yield el
- else:
- yield l
-
- def get_objects(count_only=False):
- """
- Collate the objects to be serialized. If count_only is True, just
- count the number of objects to be serialized.
- """
- models = serializers.sort_dependencies(list(app_list.items()))
- for model in models:
- if model in excluded_models:
- continue
- if model._meta.proxy and model._meta.proxy_for_model not in models:
- warnings.warn(
- "%s is a proxy model and won't be serialized." % model._meta.label,
- category=ProxyModelWarning,
- )
- if not model._meta.proxy and router.allow_migrate_model(using, model):
- objects = model._default_manager
-
- queryset = objects.using(using).order_by(model._meta.pk.name)
- if primary_keys:
- queryset = queryset.filter(pk__in=primary_keys)
- if count_only:
- yield queryset.order_by().count()
- else:
- for obj in queryset.iterator():
- collector = NestedObjects(using=using)
- collector.collect([obj,])
- object_list = list(flatten(collector.nested()))
- object_list.reverse()
- for o in object_list:
- yield o
-
- try:
- self.stdout.ending = None
- progress_output = None
- object_count = 0
- # If dumpdata is outputting to stdout, there is no way to display progress
- if (output and self.stdout.isatty() and options['verbosity'] > 0):
- progress_output = self.stdout
- object_count = sum(get_objects(count_only=True))
- stream = io.open(output, 'w') if output else None
- try:
- serializers.serialize(
- format, get_objects(), indent=indent,
- use_natural_foreign_keys=use_natural_foreign_keys,
- use_natural_primary_keys=use_natural_primary_keys,
- stream=stream or self.stdout, progress_output=progress_output,
- object_count=object_count,
- )
- finally:
- if stream:
- stream.close()
- except Exception as e:
- if show_traceback:
- raise
- raise CommandError("Unable to serialize database: %s" % e)
diff --git a/ietf/utils/management/commands/loadrelated.py b/ietf/utils/management/commands/loadrelated.py
deleted file mode 100644
index d8ae19dc77..0000000000
--- a/ietf/utils/management/commands/loadrelated.py
+++ /dev/null
@@ -1,129 +0,0 @@
-# Copyright The IETF Trust 2018-2020, All Rights Reserved
-# -*- coding: utf-8 -*-
-
-
-import gzip
-import os
-#import sys
-import tqdm
-import zipfile
-
-try:
- import bz2
- has_bz2 = True
-except ImportError:
- has_bz2 = False
-
-from django.core.exceptions import ObjectDoesNotExist
-from django.core import serializers
-from django.db import DEFAULT_DB_ALIAS, DatabaseError, IntegrityError, connections
-from django.db.models.signals import post_save
-from django.utils.encoding import force_str
-import django.core.management.commands.loaddata as loaddata
-
-import debug # pyflakes:ignore
-
-from ietf.community.signals import notify_of_events_receiver
-
-class Command(loaddata.Command):
- help = ("""
-
- Load a fixture of related objects to the database. The fixture is expected
- to contain a set of related objects, created with the 'dumprelated' management
- command. It differs from the 'loaddata' command in that it silently ignores
- attempts to load duplicate entries, and continues loading subsequent entries.
-
- """)
-
- def add_arguments(self, parser):
- parser.add_argument('args', metavar='fixture', nargs='+', help='Fixture files.')
- parser.add_argument(
- '--database', action='store', dest='database', default=DEFAULT_DB_ALIAS,
- help='Nominates a specific database to load fixtures into. Defaults to the "default" database.',
- )
- parser.add_argument(
- '--ignorenonexistent', '-i', action='store_true', dest='ignore', default=False,
- help='Ignores entries in the serialized data for fields that do not '
- 'currently exist on the model.',
- )
-
- def handle(self, *args, **options):
- self.ignore = options['ignore']
- self.using = options['database']
- self.verbosity = options['verbosity']
- #
- self.compression_formats = {
- None: (open, 'rb'),
- 'gz': (gzip.GzipFile, 'rb'),
- 'zip': (SingleZipReader, 'r'),
- }
- if has_bz2:
- self.compression_formats['bz2'] = (bz2.BZ2File, 'r')
- #
- self.serialization_formats = serializers.get_public_serializer_formats()
- #
- post_save.disconnect(notify_of_events_receiver())
- #
- connection = connections[self.using]
- self.fixture_count = 0
- self.loaded_object_count = 0
- self.fixture_object_count = 0
- #
- for arg in args:
- fixture_file = arg
- self.stdout.write("Loading objects from %s" % fixture_file)
- _, ser_fmt, cmp_fmt = self.parse_name(os.path.basename(fixture_file))
- open_method, mode = self.compression_formats[cmp_fmt]
- fixture = open_method(fixture_file, mode)
- objects_in_fixture = 0
- self.stdout.write("Getting object count...\b\b\b", ending='')
- self.stdout.flush()
- for o in serializers.deserialize(ser_fmt, fixture, using=self.using, ignorenonexistent=self.ignore,):
- objects_in_fixture += 1
- self.stdout.write(" %d" % objects_in_fixture)
- #
- fixture = open_method(fixture_file, mode)
- self.fixture_count += 1
- objects = serializers.deserialize(ser_fmt, fixture, using=self.using, ignorenonexistent=self.ignore,)
- with connection.constraint_checks_disabled():
- for obj in tqdm.tqdm(objects, total=objects_in_fixture):
- try:
- obj.save(using=self.using)
- self.loaded_object_count += 1
- except (DatabaseError, IntegrityError, ObjectDoesNotExist, AttributeError) as e:
- error_msg = force_str(e)
- if "Duplicate entry" in error_msg:
- pass
- else:
- self.stderr.write("Could not load %(app_label)s.%(object_name)s(pk=%(pk)s): %(error_msg)s" % {
- 'app_label': obj.object._meta.app_label,
- 'object_name': obj.object._meta.object_name,
- 'pk': obj.object.pk,
- 'error_msg': error_msg,
- }, )
- self.fixture_object_count += objects_in_fixture
-
- if self.verbosity >= 1:
- if self.fixture_object_count == self.loaded_object_count:
- self.stdout.write(
- "Installed %d object(s) from %d fixture(s)"
- % (self.loaded_object_count, self.fixture_count)
- )
- else:
- self.stdout.write(
- "Installed %d object(s) (of %d) from %d fixture(s)"
- % (self.loaded_object_count, self.fixture_object_count, self.fixture_count)
- )
-
-
-class SingleZipReader(zipfile.ZipFile):
-
- def __init__(self, *args, **kwargs):
- zipfile.ZipFile.__init__(self, *args, **kwargs)
- if len(self.namelist()) != 1:
- raise ValueError("Zip-compressed fixtures must contain one file.")
-
- def read(self):
- return zipfile.ZipFile.read(self, self.namelist()[0])
-
-
From 56985d105651b7d0ca30345dcb8472c5975f787b Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Mon, 13 Jan 2025 15:01:32 -0500
Subject: [PATCH 028/460] chore(deps): bump ncipollo/release-action from 1.14.0
to 1.15.0 (#8420)
Bumps [ncipollo/release-action](https://github.com/ncipollo/release-action) from 1.14.0 to 1.15.0.
- [Release notes](https://github.com/ncipollo/release-action/releases)
- [Commits](https://github.com/ncipollo/release-action/compare/v1.14.0...v1.15.0)
---
updated-dependencies:
- dependency-name: ncipollo/release-action
dependency-type: direct:production
update-type: version-update:semver-minor
...
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
.github/workflows/build.yml | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 46cb150e05..4771adde97 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -97,7 +97,7 @@ jobs:
echo "IS_RELEASE=true" >> $GITHUB_ENV
- name: Create Draft Release
- uses: ncipollo/release-action@v1.14.0
+ uses: ncipollo/release-action@v1.15.0
if: ${{ github.ref_name == 'release' }}
with:
prerelease: true
@@ -311,7 +311,7 @@ jobs:
histCoveragePath: historical-coverage.json
- name: Create Release
- uses: ncipollo/release-action@v1.14.0
+ uses: ncipollo/release-action@v1.15.0
if: ${{ env.SHOULD_DEPLOY == 'true' }}
with:
allowUpdates: true
@@ -324,7 +324,7 @@ jobs:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Update Baseline Coverage
- uses: ncipollo/release-action@v1.14.0
+ uses: ncipollo/release-action@v1.15.0
if: ${{ github.event.inputs.updateCoverage == 'true' || github.ref_name == 'release' }}
with:
allowUpdates: true
From 135f759b3065e66c45862e9f6ad832084edb3c76 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Mon, 13 Jan 2025 15:02:45 -0500
Subject: [PATCH 029/460] chore(deps): bump dockerode in
/dev/deploy-to-container in the npm group (#8393)
Bumps the npm group in /dev/deploy-to-container with 1 update: [dockerode](https://github.com/apocas/dockerode).
Updates `dockerode` from 4.0.2 to 4.0.3
- [Release notes](https://github.com/apocas/dockerode/releases)
- [Commits](https://github.com/apocas/dockerode/compare/v4.0.2...v4.0.3)
---
updated-dependencies:
- dependency-name: dockerode
dependency-type: direct:production
update-type: version-update:semver-patch
dependency-group: npm
...
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
dev/deploy-to-container/package-lock.json | 394 +++++++++++++++++++---
dev/deploy-to-container/package.json | 2 +-
2 files changed, 340 insertions(+), 56 deletions(-)
diff --git a/dev/deploy-to-container/package-lock.json b/dev/deploy-to-container/package-lock.json
index 08b6eb6c62..86cb1f732f 100644
--- a/dev/deploy-to-container/package-lock.json
+++ b/dev/deploy-to-container/package-lock.json
@@ -6,7 +6,7 @@
"": {
"name": "deploy-to-container",
"dependencies": {
- "dockerode": "^4.0.2",
+ "dockerode": "^4.0.3",
"fs-extra": "^11.2.0",
"nanoid": "5.0.9",
"nanoid-dictionary": "5.0.0-beta.1",
@@ -23,6 +23,35 @@
"resolved": "https://registry.npmjs.org/@balena/dockerignore/-/dockerignore-1.0.2.tgz",
"integrity": "sha512-wMue2Sy4GAVTk6Ic4tJVcnfdau+gx2EnG7S+uAEe+TWJFqE4YoWN4/H8MSLj4eYJKxGg26lZwboEniNiNwZQ6Q=="
},
+ "node_modules/@grpc/grpc-js": {
+ "version": "1.12.5",
+ "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.12.5.tgz",
+ "integrity": "sha512-d3iiHxdpg5+ZcJ6jnDSOT8Z0O0VMVGy34jAnYLUX8yd36b1qn8f1TwOA/Lc7TsOh03IkPJ38eGI5qD2EjNkoEA==",
+ "dependencies": {
+ "@grpc/proto-loader": "^0.7.13",
+ "@js-sdsl/ordered-map": "^4.4.2"
+ },
+ "engines": {
+ "node": ">=12.10.0"
+ }
+ },
+ "node_modules/@grpc/proto-loader": {
+ "version": "0.7.13",
+ "resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.7.13.tgz",
+ "integrity": "sha512-AiXO/bfe9bmxBjxxtYxFAXGZvMaN5s8kO+jBHAJCON8rJoB5YS/D6X7ZNc6XQkuHNmyl4CYaMI1fJ/Gn27RGGw==",
+ "dependencies": {
+ "lodash.camelcase": "^4.3.0",
+ "long": "^5.0.0",
+ "protobufjs": "^7.2.5",
+ "yargs": "^17.7.2"
+ },
+ "bin": {
+ "proto-loader-gen-types": "build/bin/proto-loader-gen-types.js"
+ },
+ "engines": {
+ "node": ">=6"
+ }
+ },
"node_modules/@isaacs/cliui": {
"version": "8.0.2",
"resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz",
@@ -123,6 +152,15 @@
"node": ">=18.0.0"
}
},
+ "node_modules/@js-sdsl/ordered-map": {
+ "version": "4.4.2",
+ "resolved": "https://registry.npmjs.org/@js-sdsl/ordered-map/-/ordered-map-4.4.2.tgz",
+ "integrity": "sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw==",
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/js-sdsl"
+ }
+ },
"node_modules/@pkgjs/parseargs": {
"version": "0.11.0",
"resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz",
@@ -132,6 +170,68 @@
"node": ">=14"
}
},
+ "node_modules/@protobufjs/aspromise": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz",
+ "integrity": "sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ=="
+ },
+ "node_modules/@protobufjs/base64": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz",
+ "integrity": "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg=="
+ },
+ "node_modules/@protobufjs/codegen": {
+ "version": "2.0.4",
+ "resolved": "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz",
+ "integrity": "sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg=="
+ },
+ "node_modules/@protobufjs/eventemitter": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz",
+ "integrity": "sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q=="
+ },
+ "node_modules/@protobufjs/fetch": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz",
+ "integrity": "sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==",
+ "dependencies": {
+ "@protobufjs/aspromise": "^1.1.1",
+ "@protobufjs/inquire": "^1.1.0"
+ }
+ },
+ "node_modules/@protobufjs/float": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz",
+ "integrity": "sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ=="
+ },
+ "node_modules/@protobufjs/inquire": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz",
+ "integrity": "sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q=="
+ },
+ "node_modules/@protobufjs/path": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz",
+ "integrity": "sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA=="
+ },
+ "node_modules/@protobufjs/pool": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz",
+ "integrity": "sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw=="
+ },
+ "node_modules/@protobufjs/utf8": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz",
+ "integrity": "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw=="
+ },
+ "node_modules/@types/node": {
+ "version": "22.10.5",
+ "resolved": "https://registry.npmjs.org/@types/node/-/node-22.10.5.tgz",
+ "integrity": "sha512-F8Q+SeGimwOo86fiovQh8qiXfFEh2/ocYv7tU5pJ3EXMSSxk1Joj5wefpFK2fHTf/N6HKGSxIDBT9f3gCxXPkQ==",
+ "dependencies": {
+ "undici-types": "~6.20.0"
+ }
+ },
"node_modules/ansi-regex": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
@@ -246,14 +346,14 @@
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="
},
"node_modules/cpu-features": {
- "version": "0.0.9",
- "resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.9.tgz",
- "integrity": "sha512-AKjgn2rP2yJyfbepsmLfiYcmtNn/2eUvocUyM/09yB0YDiz39HteK/5/T4Onf0pmdYDMgkBoGvRLvEguzyL7wQ==",
+ "version": "0.0.10",
+ "resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.10.tgz",
+ "integrity": "sha512-9IkYqtX3YHPCzoVg1Py+o9057a3i0fp7S530UWokCSaFVTc7CwXPRiOjRjBQQ18ZCNafx78YfnG+HALxtVmOGA==",
"hasInstallScript": true,
"optional": true,
"dependencies": {
"buildcheck": "~0.0.6",
- "nan": "^2.17.0"
+ "nan": "^2.19.0"
},
"engines": {
"node": ">=10.0.0"
@@ -273,11 +373,11 @@
}
},
"node_modules/debug": {
- "version": "4.3.4",
- "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz",
- "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==",
+ "version": "4.4.0",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz",
+ "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==",
"dependencies": {
- "ms": "2.1.2"
+ "ms": "^2.1.3"
},
"engines": {
"node": ">=6.0"
@@ -289,9 +389,9 @@
}
},
"node_modules/docker-modem": {
- "version": "5.0.3",
- "resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-5.0.3.tgz",
- "integrity": "sha512-89zhop5YVhcPEt5FpUFGr3cDyceGhq/F9J+ZndQ4KfqNvfbJpPMfgeixFgUj5OjCYAboElqODxY5Z1EBsSa6sg==",
+ "version": "5.0.5",
+ "resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-5.0.5.tgz",
+ "integrity": "sha512-Cxw8uEcvNTRmsQuGqzzfiCnfGgf96tVJItLh8taOX0miTcIBALKH5TckCSuZbpbjP7uhAl81dOL9sxfa6HgCIg==",
"dependencies": {
"debug": "^4.1.1",
"readable-stream": "^3.5.0",
@@ -303,13 +403,17 @@
}
},
"node_modules/dockerode": {
- "version": "4.0.2",
- "resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.2.tgz",
- "integrity": "sha512-9wM1BVpVMFr2Pw3eJNXrYYt6DT9k0xMcsSCjtPvyQ+xa1iPg/Mo3T/gUcwI0B2cczqCeCYRPF8yFYDwtFXT0+w==",
+ "version": "4.0.3",
+ "resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.3.tgz",
+ "integrity": "sha512-QSXJFcBQNaGZO6U3qWW4B7p8yRIJn/dWmvL2AQWfO/bjptBBO6QYdVkYSYFz9qoivP2jsOHZfmXMAfrK0BMKyg==",
"dependencies": {
"@balena/dockerignore": "^1.0.2",
- "docker-modem": "^5.0.3",
- "tar-fs": "~2.0.1"
+ "@grpc/grpc-js": "^1.11.1",
+ "@grpc/proto-loader": "^0.7.13",
+ "docker-modem": "^5.0.5",
+ "protobufjs": "^7.3.2",
+ "tar-fs": "~2.0.1",
+ "uuid": "^10.0.0"
},
"engines": {
"node": ">= 8.0"
@@ -473,6 +577,16 @@
"graceful-fs": "^4.1.6"
}
},
+ "node_modules/lodash.camelcase": {
+ "version": "4.3.0",
+ "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz",
+ "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA=="
+ },
+ "node_modules/long": {
+ "version": "5.2.4",
+ "resolved": "https://registry.npmjs.org/long/-/long-5.2.4.tgz",
+ "integrity": "sha512-qtzLbJE8hq7VabR3mISmVGtoXP8KGc2Z/AT8OuqlYD7JTR3oqrgwdjnk07wpj1twXxYmgDXgoKVWUG/fReSzHg=="
+ },
"node_modules/lru-cache": {
"version": "10.2.2",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.2.2.tgz",
@@ -535,14 +649,14 @@
"integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A=="
},
"node_modules/ms": {
- "version": "2.1.2",
- "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
- "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
+ "version": "2.1.3",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
+ "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="
},
"node_modules/nan": {
- "version": "2.18.0",
- "resolved": "https://registry.npmjs.org/nan/-/nan-2.18.0.tgz",
- "integrity": "sha512-W7tfG7vMOGtD30sHoZSSc/JVYiyDPEyQVso/Zz+/uQd0B0L46gtC+pHha5FFMRpil6fm/AoEcRWyOVi4+E/f8w==",
+ "version": "2.22.0",
+ "resolved": "https://registry.npmjs.org/nan/-/nan-2.22.0.tgz",
+ "integrity": "sha512-nbajikzWTMwsW+eSsNm3QwlOs7het9gGJU5dDZzRTQGk03vyBOauxgI4VakDzE0PtsGTmXPsXTbbjVhRwR5mpw==",
"optional": true
},
"node_modules/nanoid": {
@@ -598,6 +712,29 @@
"url": "https://github.com/sponsors/isaacs"
}
},
+ "node_modules/protobufjs": {
+ "version": "7.4.0",
+ "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.4.0.tgz",
+ "integrity": "sha512-mRUWCc3KUU4w1jU8sGxICXH/gNS94DvI1gxqDvBzhj1JpcsimQkYiOJfwsPUykUI5ZaspFbSgmBLER8IrQ3tqw==",
+ "hasInstallScript": true,
+ "dependencies": {
+ "@protobufjs/aspromise": "^1.1.2",
+ "@protobufjs/base64": "^1.1.2",
+ "@protobufjs/codegen": "^2.0.4",
+ "@protobufjs/eventemitter": "^1.1.0",
+ "@protobufjs/fetch": "^1.1.0",
+ "@protobufjs/float": "^1.0.2",
+ "@protobufjs/inquire": "^1.1.0",
+ "@protobufjs/path": "^1.1.2",
+ "@protobufjs/pool": "^1.1.0",
+ "@protobufjs/utf8": "^1.1.0",
+ "@types/node": ">=13.7.0",
+ "long": "^5.0.0"
+ },
+ "engines": {
+ "node": ">=12.0.0"
+ }
+ },
"node_modules/pump": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz",
@@ -713,9 +850,9 @@
"integrity": "sha512-Q5thBSxp5t8WPTTJQS59LrGqOZqOsrhDGDVm8azCqIBjSBd7nd9o2PM+mDulQQkh8h//4U6hFZnc/mul8t5pWQ=="
},
"node_modules/ssh2": {
- "version": "1.15.0",
- "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.15.0.tgz",
- "integrity": "sha512-C0PHgX4h6lBxYx7hcXwu3QWdh4tg6tZZsTfXcdvc5caW/EMxaB4H9dWsl7qk+F7LAW762hp8VbXOX7x4xUYvEw==",
+ "version": "1.16.0",
+ "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.16.0.tgz",
+ "integrity": "sha512-r1X4KsBGedJqo7h8F5c4Ybpcr5RjyP+aWIG007uBPRjmdQWfEiVLzSK71Zji1B9sKxwaCvD8y8cwSkYrlLiRRg==",
"hasInstallScript": true,
"dependencies": {
"asn1": "^0.2.6",
@@ -725,8 +862,8 @@
"node": ">=10.16.0"
},
"optionalDependencies": {
- "cpu-features": "~0.0.9",
- "nan": "^2.18.0"
+ "cpu-features": "~0.0.10",
+ "nan": "^2.20.0"
}
},
"node_modules/string_decoder": {
@@ -875,6 +1012,11 @@
"resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz",
"integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA=="
},
+ "node_modules/undici-types": {
+ "version": "6.20.0",
+ "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.20.0.tgz",
+ "integrity": "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg=="
+ },
"node_modules/universalify": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz",
@@ -888,6 +1030,18 @@
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
"integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw=="
},
+ "node_modules/uuid": {
+ "version": "10.0.0",
+ "resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz",
+ "integrity": "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==",
+ "funding": [
+ "https://github.com/sponsors/broofa",
+ "https://github.com/sponsors/ctavan"
+ ],
+ "bin": {
+ "uuid": "dist/bin/uuid"
+ }
+ },
"node_modules/which": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
@@ -988,6 +1142,26 @@
"resolved": "https://registry.npmjs.org/@balena/dockerignore/-/dockerignore-1.0.2.tgz",
"integrity": "sha512-wMue2Sy4GAVTk6Ic4tJVcnfdau+gx2EnG7S+uAEe+TWJFqE4YoWN4/H8MSLj4eYJKxGg26lZwboEniNiNwZQ6Q=="
},
+ "@grpc/grpc-js": {
+ "version": "1.12.5",
+ "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.12.5.tgz",
+ "integrity": "sha512-d3iiHxdpg5+ZcJ6jnDSOT8Z0O0VMVGy34jAnYLUX8yd36b1qn8f1TwOA/Lc7TsOh03IkPJ38eGI5qD2EjNkoEA==",
+ "requires": {
+ "@grpc/proto-loader": "^0.7.13",
+ "@js-sdsl/ordered-map": "^4.4.2"
+ }
+ },
+ "@grpc/proto-loader": {
+ "version": "0.7.13",
+ "resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.7.13.tgz",
+ "integrity": "sha512-AiXO/bfe9bmxBjxxtYxFAXGZvMaN5s8kO+jBHAJCON8rJoB5YS/D6X7ZNc6XQkuHNmyl4CYaMI1fJ/Gn27RGGw==",
+ "requires": {
+ "lodash.camelcase": "^4.3.0",
+ "long": "^5.0.0",
+ "protobufjs": "^7.2.5",
+ "yargs": "^17.7.2"
+ }
+ },
"@isaacs/cliui": {
"version": "8.0.2",
"resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz",
@@ -1054,12 +1228,79 @@
"minipass": "^7.0.4"
}
},
+ "@js-sdsl/ordered-map": {
+ "version": "4.4.2",
+ "resolved": "https://registry.npmjs.org/@js-sdsl/ordered-map/-/ordered-map-4.4.2.tgz",
+ "integrity": "sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw=="
+ },
"@pkgjs/parseargs": {
"version": "0.11.0",
"resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz",
"integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==",
"optional": true
},
+ "@protobufjs/aspromise": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz",
+ "integrity": "sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ=="
+ },
+ "@protobufjs/base64": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz",
+ "integrity": "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg=="
+ },
+ "@protobufjs/codegen": {
+ "version": "2.0.4",
+ "resolved": "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz",
+ "integrity": "sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg=="
+ },
+ "@protobufjs/eventemitter": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz",
+ "integrity": "sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q=="
+ },
+ "@protobufjs/fetch": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz",
+ "integrity": "sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==",
+ "requires": {
+ "@protobufjs/aspromise": "^1.1.1",
+ "@protobufjs/inquire": "^1.1.0"
+ }
+ },
+ "@protobufjs/float": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz",
+ "integrity": "sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ=="
+ },
+ "@protobufjs/inquire": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz",
+ "integrity": "sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q=="
+ },
+ "@protobufjs/path": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz",
+ "integrity": "sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA=="
+ },
+ "@protobufjs/pool": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz",
+ "integrity": "sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw=="
+ },
+ "@protobufjs/utf8": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz",
+ "integrity": "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw=="
+ },
+ "@types/node": {
+ "version": "22.10.5",
+ "resolved": "https://registry.npmjs.org/@types/node/-/node-22.10.5.tgz",
+ "integrity": "sha512-F8Q+SeGimwOo86fiovQh8qiXfFEh2/ocYv7tU5pJ3EXMSSxk1Joj5wefpFK2fHTf/N6HKGSxIDBT9f3gCxXPkQ==",
+ "requires": {
+ "undici-types": "~6.20.0"
+ }
+ },
"ansi-regex": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
@@ -1142,13 +1383,13 @@
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="
},
"cpu-features": {
- "version": "0.0.9",
- "resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.9.tgz",
- "integrity": "sha512-AKjgn2rP2yJyfbepsmLfiYcmtNn/2eUvocUyM/09yB0YDiz39HteK/5/T4Onf0pmdYDMgkBoGvRLvEguzyL7wQ==",
+ "version": "0.0.10",
+ "resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.10.tgz",
+ "integrity": "sha512-9IkYqtX3YHPCzoVg1Py+o9057a3i0fp7S530UWokCSaFVTc7CwXPRiOjRjBQQ18ZCNafx78YfnG+HALxtVmOGA==",
"optional": true,
"requires": {
"buildcheck": "~0.0.6",
- "nan": "^2.17.0"
+ "nan": "^2.19.0"
}
},
"cross-spawn": {
@@ -1162,17 +1403,17 @@
}
},
"debug": {
- "version": "4.3.4",
- "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz",
- "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==",
+ "version": "4.4.0",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz",
+ "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==",
"requires": {
- "ms": "2.1.2"
+ "ms": "^2.1.3"
}
},
"docker-modem": {
- "version": "5.0.3",
- "resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-5.0.3.tgz",
- "integrity": "sha512-89zhop5YVhcPEt5FpUFGr3cDyceGhq/F9J+ZndQ4KfqNvfbJpPMfgeixFgUj5OjCYAboElqODxY5Z1EBsSa6sg==",
+ "version": "5.0.5",
+ "resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-5.0.5.tgz",
+ "integrity": "sha512-Cxw8uEcvNTRmsQuGqzzfiCnfGgf96tVJItLh8taOX0miTcIBALKH5TckCSuZbpbjP7uhAl81dOL9sxfa6HgCIg==",
"requires": {
"debug": "^4.1.1",
"readable-stream": "^3.5.0",
@@ -1181,13 +1422,17 @@
}
},
"dockerode": {
- "version": "4.0.2",
- "resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.2.tgz",
- "integrity": "sha512-9wM1BVpVMFr2Pw3eJNXrYYt6DT9k0xMcsSCjtPvyQ+xa1iPg/Mo3T/gUcwI0B2cczqCeCYRPF8yFYDwtFXT0+w==",
+ "version": "4.0.3",
+ "resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.3.tgz",
+ "integrity": "sha512-QSXJFcBQNaGZO6U3qWW4B7p8yRIJn/dWmvL2AQWfO/bjptBBO6QYdVkYSYFz9qoivP2jsOHZfmXMAfrK0BMKyg==",
"requires": {
"@balena/dockerignore": "^1.0.2",
- "docker-modem": "^5.0.3",
- "tar-fs": "~2.0.1"
+ "@grpc/grpc-js": "^1.11.1",
+ "@grpc/proto-loader": "^0.7.13",
+ "docker-modem": "^5.0.5",
+ "protobufjs": "^7.3.2",
+ "tar-fs": "~2.0.1",
+ "uuid": "^10.0.0"
}
},
"eastasianwidth": {
@@ -1297,6 +1542,16 @@
"universalify": "^2.0.0"
}
},
+ "lodash.camelcase": {
+ "version": "4.3.0",
+ "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz",
+ "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA=="
+ },
+ "long": {
+ "version": "5.2.4",
+ "resolved": "https://registry.npmjs.org/long/-/long-5.2.4.tgz",
+ "integrity": "sha512-qtzLbJE8hq7VabR3mISmVGtoXP8KGc2Z/AT8OuqlYD7JTR3oqrgwdjnk07wpj1twXxYmgDXgoKVWUG/fReSzHg=="
+ },
"lru-cache": {
"version": "10.2.2",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.2.2.tgz",
@@ -1335,14 +1590,14 @@
"integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A=="
},
"ms": {
- "version": "2.1.2",
- "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
- "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
+ "version": "2.1.3",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
+ "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="
},
"nan": {
- "version": "2.18.0",
- "resolved": "https://registry.npmjs.org/nan/-/nan-2.18.0.tgz",
- "integrity": "sha512-W7tfG7vMOGtD30sHoZSSc/JVYiyDPEyQVso/Zz+/uQd0B0L46gtC+pHha5FFMRpil6fm/AoEcRWyOVi4+E/f8w==",
+ "version": "2.22.0",
+ "resolved": "https://registry.npmjs.org/nan/-/nan-2.22.0.tgz",
+ "integrity": "sha512-nbajikzWTMwsW+eSsNm3QwlOs7het9gGJU5dDZzRTQGk03vyBOauxgI4VakDzE0PtsGTmXPsXTbbjVhRwR5mpw==",
"optional": true
},
"nanoid": {
@@ -1377,6 +1632,25 @@
"minipass": "^5.0.0 || ^6.0.2 || ^7.0.0"
}
},
+ "protobufjs": {
+ "version": "7.4.0",
+ "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.4.0.tgz",
+ "integrity": "sha512-mRUWCc3KUU4w1jU8sGxICXH/gNS94DvI1gxqDvBzhj1JpcsimQkYiOJfwsPUykUI5ZaspFbSgmBLER8IrQ3tqw==",
+ "requires": {
+ "@protobufjs/aspromise": "^1.1.2",
+ "@protobufjs/base64": "^1.1.2",
+ "@protobufjs/codegen": "^2.0.4",
+ "@protobufjs/eventemitter": "^1.1.0",
+ "@protobufjs/fetch": "^1.1.0",
+ "@protobufjs/float": "^1.0.2",
+ "@protobufjs/inquire": "^1.1.0",
+ "@protobufjs/path": "^1.1.2",
+ "@protobufjs/pool": "^1.1.0",
+ "@protobufjs/utf8": "^1.1.0",
+ "@types/node": ">=13.7.0",
+ "long": "^5.0.0"
+ }
+ },
"pump": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz",
@@ -1448,14 +1722,14 @@
"integrity": "sha512-Q5thBSxp5t8WPTTJQS59LrGqOZqOsrhDGDVm8azCqIBjSBd7nd9o2PM+mDulQQkh8h//4U6hFZnc/mul8t5pWQ=="
},
"ssh2": {
- "version": "1.15.0",
- "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.15.0.tgz",
- "integrity": "sha512-C0PHgX4h6lBxYx7hcXwu3QWdh4tg6tZZsTfXcdvc5caW/EMxaB4H9dWsl7qk+F7LAW762hp8VbXOX7x4xUYvEw==",
+ "version": "1.16.0",
+ "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.16.0.tgz",
+ "integrity": "sha512-r1X4KsBGedJqo7h8F5c4Ybpcr5RjyP+aWIG007uBPRjmdQWfEiVLzSK71Zji1B9sKxwaCvD8y8cwSkYrlLiRRg==",
"requires": {
"asn1": "^0.2.6",
"bcrypt-pbkdf": "^1.0.2",
- "cpu-features": "~0.0.9",
- "nan": "^2.18.0"
+ "cpu-features": "~0.0.10",
+ "nan": "^2.20.0"
}
},
"string_decoder": {
@@ -1571,6 +1845,11 @@
"resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz",
"integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA=="
},
+ "undici-types": {
+ "version": "6.20.0",
+ "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.20.0.tgz",
+ "integrity": "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg=="
+ },
"universalify": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz",
@@ -1581,6 +1860,11 @@
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
"integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw=="
},
+ "uuid": {
+ "version": "10.0.0",
+ "resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz",
+ "integrity": "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ=="
+ },
"which": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
diff --git a/dev/deploy-to-container/package.json b/dev/deploy-to-container/package.json
index 8f16279cea..53a362b9d0 100644
--- a/dev/deploy-to-container/package.json
+++ b/dev/deploy-to-container/package.json
@@ -2,7 +2,7 @@
"name": "deploy-to-container",
"type": "module",
"dependencies": {
- "dockerode": "^4.0.2",
+ "dockerode": "^4.0.3",
"fs-extra": "^11.2.0",
"nanoid": "5.0.9",
"nanoid-dictionary": "5.0.0-beta.1",
From 1ffd205ef065ec5cd5fd66ddea55e2a702650579 Mon Sep 17 00:00:00 2001
From: Robert Sparks
Date: Tue, 14 Jan 2025 08:48:55 -0600
Subject: [PATCH 030/460] fix: rsync to the correct dest (#8421)
---
ietf/submit/utils.py | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
diff --git a/ietf/submit/utils.py b/ietf/submit/utils.py
index 16cccc9b59..49f28c4196 100644
--- a/ietf/submit/utils.py
+++ b/ietf/submit/utils.py
@@ -1598,5 +1598,7 @@ def active(dirent):
except UnicodeDecodeError as e:
log.log(f"Error processing {item.name}: {e}")
- ftp_moddir = Path(settings.FTP_DIR) / "yang" / "draftmod"
+ ftp_moddir = Path(settings.FTP_DIR) / "yang" / "draftmod/"
+ if not moddir.endswith("/"):
+ moddir += "/"
subprocess.call(("/usr/bin/rsync", "-aq", "--delete", moddir, ftp_moddir))
From d1d33e7ad50d6c7625541c0966023c2cdfe77c16 Mon Sep 17 00:00:00 2001
From: rjsparks <10996692+rjsparks@users.noreply.github.com>
Date: Thu, 16 Jan 2025 20:44:55 +0000
Subject: [PATCH 031/460] ci: update base image target version to 20250116T2033
---
dev/build/Dockerfile | 2 +-
dev/build/TARGET_BASE | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/dev/build/Dockerfile b/dev/build/Dockerfile
index a923bf693f..2f35125564 100644
--- a/dev/build/Dockerfile
+++ b/dev/build/Dockerfile
@@ -1,4 +1,4 @@
-FROM ghcr.io/ietf-tools/datatracker-app-base:20241212T1741
+FROM ghcr.io/ietf-tools/datatracker-app-base:20250116T2033
LABEL maintainer="IETF Tools Team "
ENV DEBIAN_FRONTEND=noninteractive
diff --git a/dev/build/TARGET_BASE b/dev/build/TARGET_BASE
index b5d33714f2..fd2d539a9e 100644
--- a/dev/build/TARGET_BASE
+++ b/dev/build/TARGET_BASE
@@ -1 +1 @@
-20241212T1741
+20250116T2033
From df27ba9934c15b02ad93ec3c6eb02ee0705730b4 Mon Sep 17 00:00:00 2001
From: Nicolas Giard
Date: Thu, 16 Jan 2025 16:21:42 -0500
Subject: [PATCH 032/460] ci: Update ci-run-tests.yml
---
.github/workflows/ci-run-tests.yml | 29 ++++++++++++++++++++++++++++-
1 file changed, 28 insertions(+), 1 deletion(-)
diff --git a/.github/workflows/ci-run-tests.yml b/.github/workflows/ci-run-tests.yml
index 9121bf8aea..278bd8af2f 100644
--- a/.github/workflows/ci-run-tests.yml
+++ b/.github/workflows/ci-run-tests.yml
@@ -13,7 +13,34 @@ on:
- 'package.json'
jobs:
+ # -----------------------------------------------------------------
+ # PREPARE
+ # -----------------------------------------------------------------
+ prepare:
+ name: Prepare
+ runs-on: ubuntu-latest
+ outputs:
+ base_image_version: ${{ steps.baseimgversion.outputs.base_image_version }}
+
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ fetch-depth: 1
+ fetch-tags: false
+
+ - name: Get Base Image Target Version
+ id: baseimgversion
+ run: |
+ echo "base_image_version=$(sed -n '1p' dev/build/TARGET_BASE)" >> $GITHUB_OUTPUT
+
+ # -----------------------------------------------------------------
+ # TESTS
+ # -----------------------------------------------------------------
tests:
+ name: Run Tests
uses: ./.github/workflows/tests.yml
+ needs: [prepare]
with:
- ignoreLowerCoverage: false
\ No newline at end of file
+ ignoreLowerCoverage: false
+ skipSelenium: true
+ targetBaseVersion: ${{ needs.prepare.outputs.base_image_version }}
From c848a5a00bebef2662d0e189fe0f230333a71229 Mon Sep 17 00:00:00 2001
From: Jennifer Richards
Date: Fri, 17 Jan 2025 11:16:15 -0400
Subject: [PATCH 033/460] feat: async investigate_fragment task; celery results
backend (#8428)
* feat: investigate docs asynchronously
* refactor: move script to its own js file
* fix: adjust polling interval/duration
* test: test new task
* fix: extra tag/fix whitespace
* style: restore whitespace (I hope)
* style: black/standard styling
* test: fix test of investigate view
* test: improve/delint tests
---
ietf/doc/forms.py | 1 +
ietf/doc/tasks.py | 9 ++
ietf/doc/tests.py | 134 +++++++++++++++--
ietf/doc/tests_tasks.py | 13 ++
ietf/doc/views_doc.py | 65 ++++++++-
ietf/settings.py | 15 +-
ietf/static/js/investigate.js | 53 +++++++
ietf/templates/doc/investigate.html | 218 +++++++++++++++-------------
package.json | 1 +
requirements.txt | 1 +
10 files changed, 386 insertions(+), 124 deletions(-)
create mode 100644 ietf/static/js/investigate.js
diff --git a/ietf/doc/forms.py b/ietf/doc/forms.py
index f77b218318..8a1e9ecb98 100644
--- a/ietf/doc/forms.py
+++ b/ietf/doc/forms.py
@@ -276,6 +276,7 @@ class InvestigateForm(forms.Form):
),
min_length=8,
)
+ task_id = forms.CharField(required=False, widget=forms.HiddenInput)
def clean_name_fragment(self):
disallowed_characters = ["%", "/", "\\", "*"]
diff --git a/ietf/doc/tasks.py b/ietf/doc/tasks.py
index f1de459dd8..6eb901e6c7 100644
--- a/ietf/doc/tasks.py
+++ b/ietf/doc/tasks.py
@@ -31,6 +31,7 @@
generate_idnits2_rfcs_obsoleted,
update_or_create_draft_bibxml_file,
ensure_draft_bibxml_path_exists,
+ investigate_fragment,
)
@@ -119,3 +120,11 @@ def generate_draft_bibxml_files_task(days=7, process_all=False):
update_or_create_draft_bibxml_file(event.doc, event.rev)
except Exception as err:
log.log(f"Error generating bibxml for {event.doc.name}-{event.rev}: {err}")
+
+
+@shared_task(ignore_result=False)
+def investigate_fragment_task(name_fragment: str):
+ return {
+ "name_fragment": name_fragment,
+ "results": investigate_fragment(name_fragment),
+ }
diff --git a/ietf/doc/tests.py b/ietf/doc/tests.py
index 0630fcd8d4..f5af7bb48b 100644
--- a/ietf/doc/tests.py
+++ b/ietf/doc/tests.py
@@ -3280,7 +3280,8 @@ def test_investigate_fragment(self):
"draft-this-should-not-be-possible-00.txt",
)
- def test_investigate(self):
+ def test_investigate_get(self):
+ """GET with no querystring should retrieve the investigate UI"""
url = urlreverse("ietf.doc.views_doc.investigate")
login_testing_unauthorized(self, "secretary", url)
r = self.client.get(url)
@@ -3288,36 +3289,143 @@ def test_investigate(self):
q = PyQuery(r.content)
self.assertEqual(len(q("form#investigate")), 1)
self.assertEqual(len(q("div#results")), 0)
- r = self.client.post(url, dict(name_fragment="this-is-not-found"))
+
+ @mock.patch("ietf.doc.views_doc.AsyncResult")
+ def test_investgate_get_task_id(self, mock_asyncresult):
+ """GET with querystring should lookup task status"""
+ url = urlreverse("ietf.doc.views_doc.investigate")
+ login_testing_unauthorized(self, "secretary", url)
+ mock_asyncresult.return_value.ready.return_value = True
+ r = self.client.get(url + "?id=a-task-id")
+ self.assertEqual(r.status_code, 200)
+ self.assertEqual(r.json(), {"status": "ready"})
+ self.assertTrue(mock_asyncresult.called)
+ self.assertEqual(mock_asyncresult.call_args, mock.call("a-task-id"))
+ mock_asyncresult.reset_mock()
+
+ mock_asyncresult.return_value.ready.return_value = False
+ r = self.client.get(url + "?id=a-task-id")
+ self.assertEqual(r.status_code, 200)
+ self.assertEqual(r.json(), {"status": "notready"})
+ self.assertTrue(mock_asyncresult.called)
+ self.assertEqual(mock_asyncresult.call_args, mock.call("a-task-id"))
+
+ @mock.patch("ietf.doc.views_doc.investigate_fragment_task")
+ def test_investigate_post(self, mock_investigate_fragment_task):
+ """POST with a name_fragment and no task_id should start a celery task"""
+ url = urlreverse("ietf.doc.views_doc.investigate")
+ login_testing_unauthorized(self, "secretary", url)
+
+ # test some invalid cases
+ r = self.client.post(url, {"name_fragment": "short"}) # limit is >= 8 characters
self.assertEqual(r.status_code, 200)
q = PyQuery(r.content)
+ self.assertEqual(len(q("#id_name_fragment.is-invalid")), 1)
+ self.assertFalse(mock_investigate_fragment_task.delay.called)
+ for char in ["*", "%", "/", "\\"]:
+ r = self.client.post(url, {"name_fragment": f"bad{char}character"})
+ self.assertEqual(r.status_code, 200)
+ q = PyQuery(r.content)
+ self.assertEqual(len(q("#id_name_fragment.is-invalid")), 1)
+ self.assertFalse(mock_investigate_fragment_task.delay.called)
+
+ # now a valid one
+ mock_investigate_fragment_task.delay.return_value.id = "a-task-id"
+ r = self.client.post(url, {"name_fragment": "this-is-a-valid-fragment"})
+ self.assertEqual(r.status_code, 200)
+ self.assertTrue(mock_investigate_fragment_task.delay.called)
+ self.assertEqual(mock_investigate_fragment_task.delay.call_args, mock.call("this-is-a-valid-fragment"))
+ self.assertEqual(r.json(), {"id": "a-task-id"})
+
+ @mock.patch("ietf.doc.views_doc.AsyncResult")
+ def test_investigate_post_task_id(self, mock_asyncresult):
+ """POST with name_fragment and task_id should retrieve results"""
+ url = urlreverse("ietf.doc.views_doc.investigate")
+ login_testing_unauthorized(self, "secretary", url)
+
+ # First, test a non-successful result - this could be a failure or non-existent task id
+ mock_result = mock_asyncresult.return_value
+ mock_result.successful.return_value = False
+ r = self.client.post(url, {"name_fragment": "some-fragment", "task_id": "a-task-id"})
+ self.assertContains(r, "The investigation task failed.", status_code=200)
+ self.assertTrue(mock_asyncresult.called)
+ self.assertEqual(mock_asyncresult.call_args, mock.call("a-task-id"))
+ self.assertFalse(mock_result.get.called)
+ mock_asyncresult.reset_mock()
+ q = PyQuery(r.content)
+ self.assertEqual(q("#id_name_fragment").val(), "some-fragment")
+ self.assertEqual(q("#id_task_id").val(), "a-task-id")
+
+ # now the various successful result mixes
+ mock_result = mock_asyncresult.return_value
+ mock_result.successful.return_value = True
+ mock_result.get.return_value = {
+ "name_fragment": "different-fragment",
+ "results": {
+ "can_verify": set(),
+ "unverifiable_collections": set(),
+ "unexpected": set(),
+ }
+ }
+ r = self.client.post(url, {"name_fragment": "some-fragment", "task_id": "a-task-id"})
+ self.assertEqual(r.status_code, 200)
+ self.assertTrue(mock_asyncresult.called)
+ self.assertEqual(mock_asyncresult.call_args, mock.call("a-task-id"))
+ mock_asyncresult.reset_mock()
+ q = PyQuery(r.content)
+ self.assertEqual(q("#id_name_fragment").val(), "different-fragment", "name_fragment should be reset")
+ self.assertEqual(q("#id_task_id").val(), "", "task_id should be cleared")
self.assertEqual(len(q("div#results")), 1)
self.assertEqual(len(q("table#authenticated")), 0)
self.assertEqual(len(q("table#unverifiable")), 0)
self.assertEqual(len(q("table#unexpected")), 0)
- r = self.client.post(url, dict(name_fragment="mixed-provenance"))
+
+ # This file was created in setUp. It allows the view to render properly
+ # but its location / content don't matter for this test otherwise.
+ a_file_that_exists = Path(settings.INTERNET_DRAFT_PATH) / "draft-this-is-active-00.txt"
+
+ mock_result.get.return_value = {
+ "name_fragment": "different-fragment",
+ "results": {
+ "can_verify": {a_file_that_exists},
+ "unverifiable_collections": {a_file_that_exists},
+ "unexpected": set(),
+ }
+ }
+ r = self.client.post(url, {"name_fragment": "some-fragment", "task_id": "a-task-id"})
self.assertEqual(r.status_code, 200)
+ self.assertTrue(mock_asyncresult.called)
+ self.assertEqual(mock_asyncresult.call_args, mock.call("a-task-id"))
+ mock_asyncresult.reset_mock()
q = PyQuery(r.content)
+ self.assertEqual(q("#id_name_fragment").val(), "different-fragment", "name_fragment should be reset")
+ self.assertEqual(q("#id_task_id").val(), "", "task_id should be cleared")
self.assertEqual(len(q("div#results")), 1)
self.assertEqual(len(q("table#authenticated")), 1)
self.assertEqual(len(q("table#unverifiable")), 1)
self.assertEqual(len(q("table#unexpected")), 0)
- r = self.client.post(url, dict(name_fragment="not-be-possible"))
+
+ mock_result.get.return_value = {
+ "name_fragment": "different-fragment",
+ "results": {
+ "can_verify": set(),
+ "unverifiable_collections": set(),
+ "unexpected": {a_file_that_exists},
+ }
+ }
+ r = self.client.post(url, {"name_fragment": "some-fragment", "task_id": "a-task-id"})
self.assertEqual(r.status_code, 200)
+ self.assertTrue(mock_asyncresult.called)
+ self.assertEqual(mock_asyncresult.call_args, mock.call("a-task-id"))
+ mock_asyncresult.reset_mock()
q = PyQuery(r.content)
+ self.assertEqual(q("#id_name_fragment").val(), "different-fragment", "name_fragment should be reset")
+ self.assertEqual(q("#id_task_id").val(), "", "task_id should be cleared")
self.assertEqual(len(q("div#results")), 1)
self.assertEqual(len(q("table#authenticated")), 0)
self.assertEqual(len(q("table#unverifiable")), 0)
self.assertEqual(len(q("table#unexpected")), 1)
- r = self.client.post(url, dict(name_fragment="short"))
- self.assertEqual(r.status_code, 200)
- q = PyQuery(r.content)
- self.assertEqual(len(q("#id_name_fragment.is-invalid")), 1)
- for char in ["*", "%", "/", "\\"]:
- r = self.client.post(url, dict(name_fragment=f"bad{char}character"))
- self.assertEqual(r.status_code, 200)
- q = PyQuery(r.content)
- self.assertEqual(len(q("#id_name_fragment.is-invalid")), 1)
+
class LogIOErrorTests(TestCase):
diff --git a/ietf/doc/tests_tasks.py b/ietf/doc/tests_tasks.py
index 67997acd85..8a6ffa8be1 100644
--- a/ietf/doc/tests_tasks.py
+++ b/ietf/doc/tests_tasks.py
@@ -20,6 +20,7 @@
generate_draft_bibxml_files_task,
generate_idnits2_rfcs_obsoleted_task,
generate_idnits2_rfc_status_task,
+ investigate_fragment_task,
notify_expirations_task,
)
@@ -98,6 +99,18 @@ def test_expire_last_calls_task(self, mock_get_expired, mock_expire):
self.assertEqual(mock_expire.call_args_list[1], mock.call(docs[1]))
self.assertEqual(mock_expire.call_args_list[2], mock.call(docs[2]))
+ def test_investigate_fragment_task(self):
+ investigation_results = object() # singleton
+ with mock.patch(
+ "ietf.doc.tasks.investigate_fragment", return_value=investigation_results
+ ) as mock_inv:
+ retval = investigate_fragment_task("some fragment")
+ self.assertTrue(mock_inv.called)
+ self.assertEqual(mock_inv.call_args, mock.call("some fragment"))
+ self.assertEqual(
+ retval, {"name_fragment": "some fragment", "results": investigation_results}
+ )
+
class Idnits2SupportTests(TestCase):
settings_temp_path_overrides = TestCase.settings_temp_path_overrides + ['DERIVED_DIR']
diff --git a/ietf/doc/views_doc.py b/ietf/doc/views_doc.py
index 9f7cf12bcb..591a72d907 100644
--- a/ietf/doc/views_doc.py
+++ b/ietf/doc/views_doc.py
@@ -41,10 +41,11 @@
from pathlib import Path
+from celery.result import AsyncResult
from django.core.cache import caches
from django.core.exceptions import PermissionDenied
from django.db.models import Max
-from django.http import HttpResponse, Http404, HttpResponseBadRequest
+from django.http import HttpResponse, Http404, HttpResponseBadRequest, JsonResponse
from django.shortcuts import render, get_object_or_404, redirect
from django.template.loader import render_to_string
from django.urls import reverse as urlreverse
@@ -59,8 +60,9 @@
ConsensusDocEvent, NewRevisionDocEvent, TelechatDocEvent, WriteupDocEvent, IanaExpertDocEvent,
IESG_BALLOT_ACTIVE_STATES, STATUSCHANGE_RELATIONS, DocumentActionHolder, DocumentAuthor,
RelatedDocument, RelatedDocHistory)
+from ietf.doc.tasks import investigate_fragment_task
from ietf.doc.utils import (augment_events_with_revision,
- can_adopt_draft, can_unadopt_draft, get_chartering_type, get_tags_for_stream_id, investigate_fragment,
+ can_adopt_draft, can_unadopt_draft, get_chartering_type, get_tags_for_stream_id,
needed_ballot_positions, nice_consensus, update_telechat, has_same_ballot,
get_initial_notify, make_notify_changed_event, make_rev_history, default_consensus,
add_events_message_info, get_unicode_document_content,
@@ -2275,16 +2277,67 @@ def idnits2_state(request, name, rev=None):
content_type="text/plain;charset=utf-8",
)
+
@role_required("Secretariat")
def investigate(request):
+ """Investigate a fragment
+
+ A plain GET with no querystring returns the UI page.
+
+ POST with the task_id field empty starts an async task and returns a JSON response with
+ the ID needed to monitor the task for results.
+
+ GET with a querystring parameter "id" will poll the status of the async task and return "ready"
+ or "notready".
+
+ POST with the task_id field set to the id of a "ready" task will return its results or an error
+ if the task failed or the id is invalid (expired, never exited, etc).
+ """
results = None
+ # Start an investigation or retrieve a result on a POST
if request.method == "POST":
form = InvestigateForm(request.POST)
if form.is_valid():
- name_fragment = form.cleaned_data["name_fragment"]
- results = investigate_fragment(name_fragment)
+ task_id = form.cleaned_data["task_id"]
+ if task_id:
+ # Ignore the rest of the form and retrieve the result
+ task_result = AsyncResult(task_id)
+ if task_result.successful():
+ retval = task_result.get()
+ results = retval["results"]
+ form.data = form.data.copy()
+ form.data["name_fragment"] = retval[
+ "name_fragment"
+ ] # ensure consistency
+ del form.data["task_id"] # do not request the task result again
+ else:
+ form.add_error(
+ None,
+ "The investigation task failed. Please try again and ask for help if this recurs.",
+ )
+ # Falls through to the render at the end!
+ else:
+ name_fragment = form.cleaned_data["name_fragment"]
+ task_result = investigate_fragment_task.delay(name_fragment)
+ return JsonResponse({"id": task_result.id})
else:
- form = InvestigateForm()
+ task_id = request.GET.get("id", None)
+ if task_id is not None:
+ # Check status if we got the "id" parameter
+ task_result = AsyncResult(task_id)
+ return JsonResponse(
+ {"status": "ready" if task_result.ready() else "notready"}
+ )
+ else:
+ # Serve up an empty form
+ form = InvestigateForm()
+
+ # If we get here, it is just a plain GET - serve the UI
return render(
- request, "doc/investigate.html", context=dict(form=form, results=results)
+ request,
+ "doc/investigate.html",
+ context={
+ "form": form,
+ "results": results,
+ },
)
diff --git a/ietf/settings.py b/ietf/settings.py
index b452864be6..a1dc9ffe21 100644
--- a/ietf/settings.py
+++ b/ietf/settings.py
@@ -452,6 +452,7 @@ def skip_unreadable_post(record):
'django_vite',
'django_bootstrap5',
'django_celery_beat',
+ 'django_celery_results',
'corsheaders',
'django_markup',
'oidc_provider',
@@ -1226,7 +1227,9 @@ def skip_unreadable_post(record):
# https://docs.celeryq.dev/en/stable/userguide/tasks.html#rpc-result-backend-rabbitmq-qpid
# Results can be retrieved only once and only by the caller of the task. Results will be
# lost if the message broker restarts.
-CELERY_RESULT_BACKEND = 'rpc://' # sends a msg via the msg broker
+CELERY_RESULT_BACKEND = 'django-cache' # use a Django cache for results
+CELERY_CACHE_BACKEND = 'celery-results' # which Django cache to use
+CELERY_RESULT_EXPIRES = datetime.timedelta(minutes=5) # how long are results valid? (Default is 1 day)
CELERY_TASK_IGNORE_RESULT = True # ignore results unless specifically enabled for a task
# Meetecho API setup: Uncomment this and provide real credentials to enable
@@ -1309,6 +1312,11 @@ def skip_unreadable_post(record):
"MAX_ENTRIES": 5000,
},
},
+ "celery-results": {
+ "BACKEND": "django.core.cache.backends.memcached.PyMemcacheCache",
+ "LOCATION": f"{MEMCACHED_HOST}:{MEMCACHED_PORT}",
+ "KEY_PREFIX": "ietf:celery",
+ },
}
else:
CACHES = {
@@ -1347,6 +1355,11 @@ def skip_unreadable_post(record):
"MAX_ENTRIES": 5000,
},
},
+ "celery-results": {
+ "BACKEND": "django.core.cache.backends.memcached.PyMemcacheCache",
+ "LOCATION": "app:11211",
+ "KEY_PREFIX": "ietf:celery",
+ },
}
PUBLISH_IPR_STATES = ['posted', 'removed', 'removed_objfalse']
diff --git a/ietf/static/js/investigate.js b/ietf/static/js/investigate.js
new file mode 100644
index 0000000000..b22e099b1e
--- /dev/null
+++ b/ietf/static/js/investigate.js
@@ -0,0 +1,53 @@
+// Copyright The IETF Trust 2025, All Rights Reserved
+document.addEventListener('DOMContentLoaded', () => {
+ const investigateForm = document.forms['investigate']
+ investigateForm.addEventListener('submit', (event) => {
+ // Intercept submission unless we've filled in the task_id field
+ if (!investigateForm.elements['id_task_id'].value) {
+ event.preventDefault()
+ runInvestigation()
+ }
+ })
+
+ const runInvestigation = async () => {
+ // Submit the request
+ const response = await fetch('', {
+ method: investigateForm.method, body: new FormData(investigateForm)
+ })
+ if (!response.ok) {
+ loadResultsFromTask('bogus-task-id') // bad task id will generate an error from Django
+ }
+ const taskId = (await response.json()).id
+ // Poll for completion of the investigation up to 18*10 = 180 seconds
+ waitForResults(taskId, 18)
+ }
+
+ const waitForResults = async (taskId, retries) => {
+ // indicate that investigation is in progress
+ document.getElementById('spinner').classList.remove('d-none')
+ document.getElementById('investigate-button').disabled = true
+ investigateForm.elements['id_name_fragment'].disabled = true
+
+ const response = await fetch('?' + new URLSearchParams({ id: taskId }))
+ if (!response.ok) {
+ loadResultsFromTask('bogus-task-id') // bad task id will generate an error from Django
+ }
+ const result = await response.json()
+ if (result.status !== 'ready' && retries > 0) {
+ // 10 seconds per retry
+ setTimeout(waitForResults, 10000, taskId, retries - 1)
+ } else {
+ /* Either the response is ready or we timed out waiting. In either case, submit
+ the task_id via POST and let Django display an error if it's not ready. Before
+ submitting, re-enable the form fields so the POST is valid. Other in-progress
+ indicators will be reset when the POST response is loaded. */
+ loadResultsFromTask(taskId)
+ }
+ }
+
+ const loadResultsFromTask = (taskId) => {
+ investigateForm.elements['id_name_fragment'].disabled = false
+ investigateForm.elements['id_task_id'].value = taskId
+ investigateForm.submit()
+ }
+})
diff --git a/ietf/templates/doc/investigate.html b/ietf/templates/doc/investigate.html
index bdcf644406..436a8ce91a 100644
--- a/ietf/templates/doc/investigate.html
+++ b/ietf/templates/doc/investigate.html
@@ -6,112 +6,122 @@
{% endblock %}
{% block content %}
- {% origin %}
-
Investigate
-
- {% if results %}
-
- {% if results.can_verify %}
-
These can be authenticated
-
-
-
-
Name
-
Last Modified On
-
Link
-
Source
-
-
-
- {% for path in results.can_verify %}
- {% with url=path|url_for_path %}
-
-
{{path.name}}
-
- {% if path|mtime_is_epoch %}
- Timestamp has been lost (is Unix Epoch)
- {% else %}
- {{path|mtime|date:"DATETIME_FORMAT"}}
- {% endif %}
-
")
+ with patch("ietf.meeting.views.generate_proceedings_content") as mock_gpc:
+ mock_gpc.return_value = cached_content
+ r = self.client.get(url)
self.assertEqual(r.status_code, 200)
+ self.assertIn(cached_content, r.content.decode())
+ self.assertTemplateUsed(r, "meeting/proceedings_wrapper.html")
+ self.assertTemplateNotUsed(r, "meeting/proceedings.html")
+ # These are rendered in proceedings_wrapper.html, so test them here
if len(meeting.city) > 0:
self.assertContains(r, meeting.city)
if len(meeting.venue_name) > 0:
self.assertContains(r, meeting.venue_name)
+ self._assertMeetingHostsDisplayed(PyQuery(r.content), meeting)
+
+ @patch("ietf.meeting.utils.caches")
+ def test_generate_proceedings_content(self, mock_caches):
+ # number must be >97 (settings.PROCEEDINGS_VERSION_CHANGES)
+ meeting = make_meeting_test_data(meeting=MeetingFactory(type_id='ietf', number='100'))
+
+ # First, check that by default a value in the cache is used without doing any other computation
+ mock_default_cache = mock_caches["default"]
+ mock_default_cache.get.return_value = "a cached value"
+ result = generate_proceedings_content(meeting)
+ self.assertEqual(result, "a cached value")
+ self.assertFalse(mock_default_cache.set.called)
+ self.assertTrue(mock_default_cache.get.called)
+ cache_key = mock_default_cache.get.call_args.args[0]
+ mock_default_cache.get.reset_mock()
+
+ # Now set up for actual computation of the proceedings content.
+ session = Session.objects.filter(meeting=meeting, group__acronym="mars").first()
+ GroupEventFactory(group=session.group,type='status_update')
+ SessionPresentationFactory(document__type_id='recording',session=session)
+ SessionPresentationFactory(document__type_id='recording',session=session,document__title="Audio recording for tests")
+
+ # Add various group sessions
+ groups = []
+ parent_groups = [
+ GroupFactory.create(type_id="area", acronym="gen"),
+ GroupFactory.create(acronym="iab"),
+ GroupFactory.create(acronym="irtf"),
+ ]
+ for parent in parent_groups:
+ groups.append(GroupFactory.create(parent=parent))
+ for acronym in ["rsab", "edu"]:
+ groups.append(GroupFactory.create(acronym=acronym))
+ for group in groups:
+ SessionFactory(meeting=meeting, group=group)
+
+ self.write_materials_files(meeting, session)
+ self._create_proceedings_materials(meeting)
+
+ # Now "empty" the mock cache and see that we compute the expected proceedings content.
+ mock_default_cache.get.return_value = None
+ proceedings_content = generate_proceedings_content(meeting)
+ self.assertTrue(mock_default_cache.get.called)
+ self.assertEqual(mock_default_cache.get.call_args.args[0], cache_key, "same cache key each time")
+ self.assertTrue(mock_default_cache.set.called)
+ self.assertEqual(mock_default_cache.set.call_args, call(cache_key, proceedings_content, timeout=86400))
+ mock_default_cache.get.reset_mock()
+ mock_default_cache.set.reset_mock()
# standard items on every proceedings
- pq = PyQuery(r.content)
+ pq = PyQuery(proceedings_content)
self.assertNotEqual(
pq('a[href="{}"]'.format(
urlreverse('ietf.meeting.views.proceedings_overview', kwargs=dict(num=meeting.number)))
@@ -8405,9 +8458,17 @@ def test_proceedings(self):
)
# configurable contents
- self._assertMeetingHostsDisplayed(r, meeting)
- self._assertProceedingsMaterialsDisplayed(r, meeting)
- self._assertGroupSessions(r, meeting)
+ self._assertProceedingsMaterialsDisplayed(pq, meeting)
+ self._assertGroupSessions(pq)
+
+ # Finally, repeat the first cache test, but now with force_refresh=True. The cached value
+ # should be ignored and we should recompute the proceedings as before.
+ mock_default_cache.get.return_value = "a cached value"
+ result = generate_proceedings_content(meeting, force_refresh=True)
+ self.assertEqual(result, proceedings_content) # should have recomputed the same thing
+ self.assertFalse(mock_default_cache.get.called, "don't bother reading cache when force_refresh is True")
+ self.assertTrue(mock_default_cache.set.called)
+ self.assertEqual(mock_default_cache.set.call_args, call(cache_key, proceedings_content, timeout=86400))
def test_named_session(self):
"""Session with a name should appear separately in the proceedings"""
diff --git a/ietf/meeting/utils.py b/ietf/meeting/utils.py
index 6e681fdc31..92bae5ac23 100644
--- a/ietf/meeting/utils.py
+++ b/ietf/meeting/utils.py
@@ -3,6 +3,8 @@
import datetime
import itertools
import os
+from hashlib import sha384
+
import pytz
import subprocess
@@ -11,8 +13,9 @@
from django.conf import settings
from django.contrib import messages
+from django.core.cache import caches
from django.core.files.base import ContentFile
-from django.db.models import OuterRef, Subquery, TextField, Q, Value
+from django.db.models import OuterRef, Subquery, TextField, Q, Value, Max
from django.db.models.functions import Coalesce
from django.template.loader import render_to_string
from django.utils import timezone
@@ -995,3 +998,169 @@ def participants_for_meeting(meeting):
sessions = meeting.session_set.filter(Q(type='plenary') | Q(group__type__in=['wg', 'rg']))
attended = Attended.objects.filter(session__in=sessions).values_list('person', flat=True).distinct()
return (checked_in, attended)
+
+
+def generate_proceedings_content(meeting, force_refresh=False):
+ """Render proceedings content for a meeting and update cache
+
+ :meeting: meeting whose proceedings should be rendered
+ :force_refresh: true to force regeneration and cache refresh
+ """
+ cache = caches["default"]
+ cache_version = Document.objects.filter(session__meeting__number=meeting.number).aggregate(Max('time'))["time__max"]
+ # Include proceedings_final in the bare_key so we'll always reflect that accurately, even at the cost of
+ # a recomputation in the view
+ bare_key = f"proceedings.{meeting.number}.{cache_version}.final={meeting.proceedings_final}"
+ cache_key = sha384(bare_key.encode("utf8")).hexdigest()
+ if not force_refresh:
+ cached_content = cache.get(cache_key, None)
+ if cached_content is not None:
+ return cached_content
+
+ def area_and_group_acronyms_from_session(s):
+ area = s.group_parent_at_the_time()
+ if area == None:
+ area = s.group.parent
+ group = s.group_at_the_time()
+ return (area.acronym, group.acronym)
+
+ schedule = meeting.schedule
+ sessions = (
+ meeting.session_set.with_current_status()
+ .filter(Q(timeslotassignments__schedule__in=[schedule, schedule.base if schedule else None])
+ | Q(current_status='notmeet'))
+ .select_related()
+ .order_by('-current_status')
+ )
+
+ plenaries, _ = organize_proceedings_sessions(
+ sessions.filter(name__icontains='plenary')
+ .exclude(current_status='notmeet')
+ )
+ irtf_meeting, irtf_not_meeting = organize_proceedings_sessions(
+ sessions.filter(group__parent__acronym = 'irtf').order_by('group__acronym')
+ )
+ # per Colin (datatracker #5010) - don't report not meeting rags
+ irtf_not_meeting = [item for item in irtf_not_meeting if item["group"].type_id != "rag"]
+ irtf = {"meeting_groups":irtf_meeting, "not_meeting_groups":irtf_not_meeting}
+
+ training, _ = organize_proceedings_sessions(
+ sessions.filter(group__acronym__in=['edu','iaoc'], type_id__in=['regular', 'other',])
+ .exclude(current_status='notmeet')
+ )
+ iab, _ = organize_proceedings_sessions(
+ sessions.filter(group__parent__acronym = 'iab')
+ .exclude(current_status='notmeet')
+ )
+ editorial, _ = organize_proceedings_sessions(
+ sessions.filter(group__acronym__in=['rsab','rswg'])
+ .exclude(current_status='notmeet')
+ )
+
+ ietf = sessions.filter(group__parent__type__slug = 'area').exclude(group__acronym__in=['edu','iepg','tools'])
+ ietf = list(ietf)
+ ietf.sort(key=lambda s: area_and_group_acronyms_from_session(s))
+ ietf_areas = []
+ for area, area_sessions in itertools.groupby(ietf, key=lambda s: s.group_parent_at_the_time()):
+ meeting_groups, not_meeting_groups = organize_proceedings_sessions(area_sessions)
+ ietf_areas.append((area, meeting_groups, not_meeting_groups))
+
+ with timezone.override(meeting.tz()):
+ rendered_content = render_to_string(
+ "meeting/proceedings.html",
+ {
+ 'meeting': meeting,
+ 'plenaries': plenaries,
+ 'training': training,
+ 'irtf': irtf,
+ 'iab': iab,
+ 'editorial': editorial,
+ 'ietf_areas': ietf_areas,
+ 'meetinghost_logo': {
+ 'max_height': settings.MEETINGHOST_LOGO_MAX_DISPLAY_HEIGHT,
+ 'max_width': settings.MEETINGHOST_LOGO_MAX_DISPLAY_WIDTH,
+ }
+ },
+ )
+ cache.set(
+ cache_key,
+ rendered_content,
+ timeout=86400, # one day, in seconds
+ )
+ return rendered_content
+
+
+def organize_proceedings_sessions(sessions):
+ # Collect sessions by Group, then bin by session name (including sessions with blank names).
+ # If all of a group's sessions are 'notmeet', the processed data goes in not_meeting_sessions.
+ # Otherwise, the data goes in meeting_sessions.
+ meeting_groups = []
+ not_meeting_groups = []
+ for group_acronym, group_sessions in itertools.groupby(sessions, key=lambda s: s.group.acronym):
+ by_name = {}
+ is_meeting = False
+ all_canceled = True
+ group = None
+ for s in sorted(
+ group_sessions,
+ key=lambda gs: (
+ gs.official_timeslotassignment().timeslot.time
+ if gs.official_timeslotassignment() else datetime.datetime(datetime.MAXYEAR, 1, 1)
+ ),
+ ):
+ group = s.group
+ if s.current_status != 'notmeet':
+ is_meeting = True
+ if s.current_status != 'canceled':
+ all_canceled = False
+ by_name.setdefault(s.name, [])
+ if s.current_status != 'notmeet' or s.presentations.exists():
+ by_name[s.name].append(s) # for notmeet, only include sessions with materials
+ for sess_name, ss in by_name.items():
+ session = ss[0] if ss else None
+ def _format_materials(items):
+ """Format session/material for template
+
+ Input is a list of (session, materials) pairs. The materials value can be a single value or a list.
+ """
+ material_times = {} # key is material, value is first timestamp it appeared
+ for s, mats in items:
+ tsa = s.official_timeslotassignment()
+ timestamp = tsa.timeslot.time if tsa else None
+ if not isinstance(mats, list):
+ mats = [mats]
+ for mat in mats:
+ if mat and mat not in material_times:
+ material_times[mat] = timestamp
+ n_mats = len(material_times)
+ result = []
+ if n_mats == 1:
+ result.append({'material': list(material_times)[0]}) # no 'time' when only a single material
+ elif n_mats > 1:
+ for mat, timestamp in material_times.items():
+ result.append({'material': mat, 'time': timestamp})
+ return result
+
+ entry = {
+ 'group': group,
+ 'name': sess_name,
+ 'session': session,
+ 'canceled': all_canceled,
+ 'has_materials': s.presentations.exists(),
+ 'agendas': _format_materials((s, s.agenda()) for s in ss),
+ 'minutes': _format_materials((s, s.minutes()) for s in ss),
+ 'bluesheets': _format_materials((s, s.bluesheets()) for s in ss),
+ 'recordings': _format_materials((s, s.recordings()) for s in ss),
+ 'meetecho_recordings': _format_materials((s, [s.session_recording_url()]) for s in ss),
+ 'chatlogs': _format_materials((s, s.chatlogs()) for s in ss),
+ 'slides': _format_materials((s, s.slides()) for s in ss),
+ 'drafts': _format_materials((s, s.drafts()) for s in ss),
+ 'last_update': session.last_update if hasattr(session, 'last_update') else None
+ }
+ if session and session.meeting.type_id == 'ietf' and not session.meeting.proceedings_final:
+ entry['attendances'] = _format_materials((s, s) for s in ss if Attended.objects.filter(session=s).exists())
+ if is_meeting:
+ meeting_groups.append(entry)
+ else:
+ not_meeting_groups.append(entry)
+ return meeting_groups, not_meeting_groups
diff --git a/ietf/meeting/views.py b/ietf/meeting/views.py
index 2f2464028b..1226e30d60 100644
--- a/ietf/meeting/views.py
+++ b/ietf/meeting/views.py
@@ -75,7 +75,13 @@
from ietf.meeting.helpers import send_interim_approval
from ietf.meeting.helpers import send_interim_approval_request
from ietf.meeting.helpers import send_interim_announcement_request, sessions_post_cancel
-from ietf.meeting.utils import finalize, sort_accept_tuple, condition_slide_order
+from ietf.meeting.utils import (
+ condition_slide_order,
+ finalize,
+ generate_proceedings_content,
+ organize_proceedings_sessions,
+ sort_accept_tuple,
+)
from ietf.meeting.utils import add_event_info_to_session_qs
from ietf.meeting.utils import session_time_for_sorting
from ietf.meeting.utils import session_requested_by, SaveMaterialsError
@@ -4128,93 +4134,10 @@ def upcoming_json(request):
response = HttpResponse(json.dumps(data, indent=2, sort_keys=False), content_type='application/json;charset=%s'%settings.DEFAULT_CHARSET)
return response
-def organize_proceedings_sessions(sessions):
- # Collect sessions by Group, then bin by session name (including sessions with blank names).
- # If all of a group's sessions are 'notmeet', the processed data goes in not_meeting_sessions.
- # Otherwise, the data goes in meeting_sessions.
- meeting_groups = []
- not_meeting_groups = []
- for group_acronym, group_sessions in itertools.groupby(sessions, key=lambda s: s.group.acronym):
- by_name = {}
- is_meeting = False
- all_canceled = True
- group = None
- for s in sorted(
- group_sessions,
- key=lambda gs: (
- gs.official_timeslotassignment().timeslot.time
- if gs.official_timeslotassignment() else datetime.datetime(datetime.MAXYEAR, 1, 1)
- ),
- ):
- group = s.group
- if s.current_status != 'notmeet':
- is_meeting = True
- if s.current_status != 'canceled':
- all_canceled = False
- by_name.setdefault(s.name, [])
- if s.current_status != 'notmeet' or s.presentations.exists():
- by_name[s.name].append(s) # for notmeet, only include sessions with materials
- for sess_name, ss in by_name.items():
- session = ss[0] if ss else None
- def _format_materials(items):
- """Format session/material for template
-
- Input is a list of (session, materials) pairs. The materials value can be a single value or a list.
- """
- material_times = {} # key is material, value is first timestamp it appeared
- for s, mats in items:
- tsa = s.official_timeslotassignment()
- timestamp = tsa.timeslot.time if tsa else None
- if not isinstance(mats, list):
- mats = [mats]
- for mat in mats:
- if mat and mat not in material_times:
- material_times[mat] = timestamp
- n_mats = len(material_times)
- result = []
- if n_mats == 1:
- result.append({'material': list(material_times)[0]}) # no 'time' when only a single material
- elif n_mats > 1:
- for mat, timestamp in material_times.items():
- result.append({'material': mat, 'time': timestamp})
- return result
-
- entry = {
- 'group': group,
- 'name': sess_name,
- 'session': session,
- 'canceled': all_canceled,
- 'has_materials': s.presentations.exists(),
- 'agendas': _format_materials((s, s.agenda()) for s in ss),
- 'minutes': _format_materials((s, s.minutes()) for s in ss),
- 'bluesheets': _format_materials((s, s.bluesheets()) for s in ss),
- 'recordings': _format_materials((s, s.recordings()) for s in ss),
- 'meetecho_recordings': _format_materials((s, [s.session_recording_url()]) for s in ss),
- 'chatlogs': _format_materials((s, s.chatlogs()) for s in ss),
- 'slides': _format_materials((s, s.slides()) for s in ss),
- 'drafts': _format_materials((s, s.drafts()) for s in ss),
- 'last_update': session.last_update if hasattr(session, 'last_update') else None
- }
- if session and session.meeting.type_id == 'ietf' and not session.meeting.proceedings_final:
- entry['attendances'] = _format_materials((s, s) for s in ss if Attended.objects.filter(session=s).exists())
- if is_meeting:
- meeting_groups.append(entry)
- else:
- not_meeting_groups.append(entry)
- return meeting_groups, not_meeting_groups
-
def proceedings(request, num=None):
-
- def area_and_group_acronyms_from_session(s):
- area = s.group_parent_at_the_time()
- if area == None:
- area = s.group.parent
- group = s.group_at_the_time()
- return (area.acronym, group.acronym)
-
meeting = get_meeting(num)
-
+
# Early proceedings were hosted on www.ietf.org rather than the datatracker
if meeting.proceedings_format_version == 1:
return HttpResponseRedirect(settings.PROCEEDINGS_V1_BASE_URL.format(meeting=meeting))
@@ -4225,72 +4148,12 @@ def area_and_group_acronyms_from_session(s):
kwargs['num'] = num
return redirect('ietf.meeting.views.materials', **kwargs)
- begin_date = meeting.get_submission_start_date()
- cut_off_date = meeting.get_submission_cut_off_date()
- cor_cut_off_date = meeting.get_submission_correction_date()
- today_utc = date_today(datetime.timezone.utc)
-
- schedule = get_schedule(meeting, None)
- sessions = (
- meeting.session_set.with_current_status()
- .filter(Q(timeslotassignments__schedule__in=[schedule, schedule.base if schedule else None])
- | Q(current_status='notmeet'))
- .select_related()
- .order_by('-current_status')
- )
-
- plenaries, _ = organize_proceedings_sessions(
- sessions.filter(name__icontains='plenary')
- .exclude(current_status='notmeet')
- )
- irtf_meeting, irtf_not_meeting = organize_proceedings_sessions(
- sessions.filter(group__parent__acronym = 'irtf').order_by('group__acronym')
- )
- # per Colin (datatracker #5010) - don't report not meeting rags
- irtf_not_meeting = [item for item in irtf_not_meeting if item["group"].type_id != "rag"]
- irtf = {"meeting_groups":irtf_meeting, "not_meeting_groups":irtf_not_meeting}
-
- training, _ = organize_proceedings_sessions(
- sessions.filter(group__acronym__in=['edu','iaoc'], type_id__in=['regular', 'other',])
- .exclude(current_status='notmeet')
- )
- iab, _ = organize_proceedings_sessions(
- sessions.filter(group__parent__acronym = 'iab')
- .exclude(current_status='notmeet')
- )
- editorial, _ = organize_proceedings_sessions(
- sessions.filter(group__acronym__in=['rsab','rswg'])
- .exclude(current_status='notmeet')
- )
-
- ietf = sessions.filter(group__parent__type__slug = 'area').exclude(group__acronym__in=['edu','iepg','tools'])
- ietf = list(ietf)
- ietf.sort(key=lambda s: area_and_group_acronyms_from_session(s))
- ietf_areas = []
- for area, area_sessions in itertools.groupby(ietf, key=lambda s: s.group_parent_at_the_time()):
- meeting_groups, not_meeting_groups = organize_proceedings_sessions(area_sessions)
- ietf_areas.append((area, meeting_groups, not_meeting_groups))
-
- cache_version = Document.objects.filter(session__meeting__number=meeting.number).aggregate(Max('time'))["time__max"]
with timezone.override(meeting.tz()):
- return render(request, "meeting/proceedings.html", {
+ return render(request, "meeting/proceedings_wrapper.html", {
'meeting': meeting,
- 'plenaries': plenaries,
- 'training': training,
- 'irtf': irtf,
- 'iab': iab,
- 'editorial': editorial,
- 'ietf_areas': ietf_areas,
- 'cut_off_date': cut_off_date,
- 'cor_cut_off_date': cor_cut_off_date,
- 'submission_started': today_utc > begin_date,
- 'cache_version': cache_version,
'attendance': meeting.get_attendance(),
- 'meetinghost_logo': {
- 'max_height': settings.MEETINGHOST_LOGO_MAX_DISPLAY_HEIGHT,
- 'max_width': settings.MEETINGHOST_LOGO_MAX_DISPLAY_WIDTH,
- }
+ 'proceedings_content': generate_proceedings_content(meeting),
})
@role_required('Secretariat')
diff --git a/ietf/templates/meeting/proceedings.html b/ietf/templates/meeting/proceedings.html
index b5d4a6198a..0aa8197fe9 100644
--- a/ietf/templates/meeting/proceedings.html
+++ b/ietf/templates/meeting/proceedings.html
@@ -1,184 +1,160 @@
-{% extends "base.html" %}
-{# Copyright The IETF Trust 2015, All Rights Reserved #}
-{% load origin %}
-{% load ietf_filters static %}
-{% block pagehead %}
-
-{% endblock %}
-{% block title %}
- IETF {{ meeting.number }}
- {% if not meeting.proceedings_final %}Draft{% endif %}
- Proceedings
-{% endblock %}
-{% block content %}
- {% origin %}
- {% include 'meeting/proceedings/title.html' with meeting=meeting attendance=attendance only %}
- {% if user|has_role:"Secretariat" and not meeting.proceedings_final %}
-
- Finalize proceedings
-
- {% endif %}
- {# cache for 15 minutes, as long as there's no proceedings activity. takes 4-8 seconds to generate. #}
- {% load cache %}
- {% cache 900 ietf_meeting_proceedings meeting.number cache_version %}
- {% include 'meeting/proceedings/introduction.html' with meeting=meeting only %}
-
- {% if plenaries %}
-
Plenaries
-
-
-
-
Group
-
Artifacts
-
Recordings
-
Slides
-
Internet-Drafts
-
-
-
- {% for entry in plenaries %}
- {% include "meeting/group_proceedings.html" with entry=entry meeting=meeting show_agenda=True only %}
- {% endfor %}
-
-
- {% endif %}
-
- {% for area, meeting_groups, not_meeting_groups in ietf_areas %}
-
- {{ area.acronym|upper }} {{ area.name }}
-
- {% if meeting_groups %}
-
-
-
-
Group
-
Artifacts
-
Recordings
-
Slides
-
Internet-Drafts
-
-
-
- {% for entry in meeting_groups %}
- {% include "meeting/group_proceedings.html" with entry=entry meeting=meeting show_agenda=True only %}
- {% endfor %}
-
-
- {% endif %}
- {% if not_meeting_groups %}
-
- {{ area.name }} groups not meeting:
- {% for entry in not_meeting_groups %}
- {% if entry.name == "" %}{# do not show named sessions in this list #}
-
- {{ entry.group.acronym }}
- {% if not forloop.last %},{% endif %}
- {% endif %}
- {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
- {% for entry in not_meeting_groups %}{% if entry.has_materials %}
- {% include "meeting/group_proceedings.html" with entry=entry meeting=meeting show_agenda=True only %}
- {% endif %}{% endfor %}
-
-
- {% endif %}
- {% endfor %}
-
- {% if training %}
-
Training
+{% include 'meeting/proceedings/introduction.html' with meeting=meeting only %}
+
+{% if plenaries %}
+
Plenaries
+
+
+
+
Group
+
Artifacts
+
Recordings
+
Slides
+
Internet-Drafts
+
+
+
+ {% for entry in plenaries %}
+ {% include "meeting/group_proceedings.html" with entry=entry meeting=meeting show_agenda=True only %}
+ {% endfor %}
+
+
+{% endif %}
+
+{% for area, meeting_groups, not_meeting_groups in ietf_areas %}
+
+ {{ area.acronym|upper }} {{ area.name }}
+
+ {% if meeting_groups %}
-
-
Group
-
Artifacts
-
Recordings
-
Slides
-
Internet-Drafts
-
+
+
Group
+
Artifacts
+
Recordings
+
Slides
+
Internet-Drafts
+
- {% for entry in training %}
- {% include "meeting/group_proceedings.html" with entry=entry meeting=meeting show_agenda=False only %}
- {% endfor %}
+ {% for entry in meeting_groups %}
+ {% include "meeting/group_proceedings.html" with entry=entry meeting=meeting show_agenda=True only %}
+ {% endfor %}
{% endif %}
-
- {% if iab %}
-
- IAB Internet Architecture Board
-
-
+ {% if not_meeting_groups %}
+
+ {{ area.name }} groups not meeting:
+ {% for entry in not_meeting_groups %}
+ {% if entry.name == "" %}{# do not show named sessions in this list #}
+
+ {{ entry.group.acronym }}
+ {% if not forloop.last %},{% endif %}
+ {% endif %}
+ {% endfor %}
+
+
-
-
- Group
-
-
- Artifacts
-
-
- Recordings
-
-
- Slides
-
-
- Internet-Drafts
-
-
+
+
+
+
+
+
+
- {% for entry in iab %}
- {% include "meeting/group_proceedings.html" with entry=entry meeting=meeting show_agenda=True only %}
- {% endfor %}
+ {% for entry in not_meeting_groups %}{% if entry.has_materials %}
+ {% include "meeting/group_proceedings.html" with entry=entry meeting=meeting show_agenda=True only %}
+ {% endif %}{% endfor %}
{% endif %}
-
- {% if irtf.meeting_groups %}
-
- IRTF Internet Research Task Force
-
-
-
-
-
- Group
-
-
- Artifacts
-
-
- Recordings
-
-
- Slides
-
-
- Internet-Drafts
-
-
-
-
- {% for entry in irtf.meeting_groups %}
- {% include "meeting/group_proceedings.html" with entry=entry meeting=meeting show_agenda=True only %}
- {% endfor %}
-
-
- {% if irtf.not_meeting_groups %}
+{% endfor %}
+
+{% if training %}
+
Training
+
+
+
+
Group
+
Artifacts
+
Recordings
+
Slides
+
Internet-Drafts
+
+
+
+ {% for entry in training %}
+ {% include "meeting/group_proceedings.html" with entry=entry meeting=meeting show_agenda=False only %}
+ {% endfor %}
+
+
+{% endif %}
+
+{% if iab %}
+
+ IAB Internet Architecture Board
+
+
+
+
+
+ Group
+
+
+ Artifacts
+
+
+ Recordings
+
+
+ Slides
+
+
+ Internet-Drafts
+
+
+
+
+ {% for entry in iab %}
+ {% include "meeting/group_proceedings.html" with entry=entry meeting=meeting show_agenda=True only %}
+ {% endfor %}
+
+
+{% endif %}
+
+{% if irtf.meeting_groups %}
+
+ IRTF Internet Research Task Force
+
+
+
+
+
+ Group
+
+
+ Artifacts
+
+
+ Recordings
+
+
+ Slides
+
+
+ Internet-Drafts
+
+
+
+
+ {% for entry in irtf.meeting_groups %}
+ {% include "meeting/group_proceedings.html" with entry=entry meeting=meeting show_agenda=True only %}
+ {% endfor %}
+
+
+ {% if irtf.not_meeting_groups %}
IRTF groups not meeting:
{% for entry in irtf.not_meeting_groups %}
@@ -191,18 +167,18 @@
-
-
-
-
-
-
-
+
+
+
+
+
+
+
- {% for entry in irtf.not_meeting %}{% if entry.has_materials %}
- {% include "meeting/group_proceedings.html" with entry=entry meeting=meeting show_agenda=True only %}
- {% endif %}{% endfor %}
+ {% for entry in irtf.not_meeting %}{% if entry.has_materials %}
+ {% include "meeting/group_proceedings.html" with entry=entry meeting=meeting show_agenda=True only %}
+ {% endif %}{% endfor %}
{% endif %}
@@ -211,35 +187,29 @@
Editorial Stream
-
-
- Group
-
-
- Artifacts
-
-
- Recordings
-
-
- Slides
-
-
- Internet-Drafts
-
-
+
+
+ Group
+
+
+ Artifacts
+
+
+ Recordings
+
+
+ Slides
+
+
+ Internet-Drafts
+
+
- {% for entry in editorial %}
- {% include "meeting/group_proceedings.html" with entry=entry meeting=meeting show_agenda=True only %}
- {% endfor %}
+ {% for entry in editorial %}
+ {% include "meeting/group_proceedings.html" with entry=entry meeting=meeting show_agenda=True only %}
+ {% endfor %}
{% endif %}
- {% endif %}
-{% endcache %}
-{% endblock %}
-{% block js %}
-
-{% endblock %}
+{% endif %}
diff --git a/ietf/templates/meeting/proceedings_wrapper.html b/ietf/templates/meeting/proceedings_wrapper.html
new file mode 100644
index 0000000000..a20291a693
--- /dev/null
+++ b/ietf/templates/meeting/proceedings_wrapper.html
@@ -0,0 +1,27 @@
+{% extends "base.html" %}
+{# Copyright The IETF Trust 2015, All Rights Reserved #}
+{% load origin %}
+{% load ietf_filters static %}
+{% block pagehead %}
+
+{% endblock %}
+{% block title %}
+ IETF {{ meeting.number }}
+ {% if not meeting.proceedings_final %}Draft{% endif %}
+ Proceedings
+{% endblock %}
+{% block content %}
+ {% origin %}
+ {% include 'meeting/proceedings/title.html' with meeting=meeting attendance=attendance only %}
+ {% if user|has_role:"Secretariat" and not meeting.proceedings_final %}
+
+ Finalize proceedings
+
+ {% endif %}
+ {{ proceedings_content }}
+{% endblock %}
+{% block js %}
+
+{% endblock %}
From ed19b54d8a39fca72a292181bdb2c12e9b63a5b6 Mon Sep 17 00:00:00 2001
From: Jennifer Richards
Date: Thu, 6 Feb 2025 14:35:07 -0400
Subject: [PATCH 058/460] fix: deal with "GMT" time zone in iCalendar files
(#8506)
* fix: omit vtimezone when None
* fix: fix ics time format for tz=GMT
---
ietf/doc/templatetags/ietf_filters.py | 5 ++++-
ietf/templates/meeting/agenda.ics | 10 +++++-----
2 files changed, 9 insertions(+), 6 deletions(-)
diff --git a/ietf/doc/templatetags/ietf_filters.py b/ietf/doc/templatetags/ietf_filters.py
index e1a80a26b2..86507eeaaa 100644
--- a/ietf/doc/templatetags/ietf_filters.py
+++ b/ietf/doc/templatetags/ietf_filters.py
@@ -533,11 +533,14 @@ def ics_date_time(dt, tzname):
>>> ics_date_time(datetime.datetime(2022,1,2,3,4,5), 'UTC')
':20220102T030405Z'
+ >>> ics_date_time(datetime.datetime(2022,1,2,3,4,5), 'GmT')
+ ':20220102T030405Z'
+
>>> ics_date_time(datetime.datetime(2022,1,2,3,4,5), 'America/Los_Angeles')
';TZID=America/Los_Angeles:20220102T030405'
"""
timestamp = dt.strftime('%Y%m%dT%H%M%S')
- if tzname.lower() == 'utc':
+ if tzname.lower() in ('gmt', 'utc'):
return f':{timestamp}Z'
else:
return f';TZID={ics_esc(tzname)}:{timestamp}'
diff --git a/ietf/templates/meeting/agenda.ics b/ietf/templates/meeting/agenda.ics
index 72b7ab806c..8bc8222bbe 100644
--- a/ietf/templates/meeting/agenda.ics
+++ b/ietf/templates/meeting/agenda.ics
@@ -1,15 +1,15 @@
-{% load humanize tz %}{% autoescape off %}{% timezone schedule.meeting.tz %}{% load ietf_filters textfilters %}{% load cache %}{% cache 1800 ietf_meeting_agenda_ics schedule.meeting.number request.path request.GET %}BEGIN:VCALENDAR
+{% load humanize tz %}{% autoescape off %}{% timezone schedule.meeting.tz %}{% with tzname=schedule.meeting.time_zone|lower %}{% load ietf_filters textfilters %}{% load cache %}{% cache 1800 ietf_meeting_agenda_ics schedule.meeting.number request.path request.GET %}BEGIN:VCALENDAR
VERSION:2.0
METHOD:PUBLISH
PRODID:-//IETF//datatracker.ietf.org ical agenda//EN
-{{schedule.meeting.vtimezone}}{% for item in assignments %}BEGIN:VEVENT
+{% if tzname != "utc" and tzname != "gmt" %}{% firstof schedule.meeting.vtimezone "" %}{% endif %}{% for item in assignments %}BEGIN:VEVENT
UID:ietf-{{schedule.meeting.number}}-{{item.timeslot.pk}}-{{item.session.group.acronym}}
SUMMARY:{% if item.session.name %}{{item.session.name|ics_esc}}{% else %}{{item.session.group_at_the_time.acronym|lower}} - {{item.session.group_at_the_time.name}}{%endif%}{% if item.session.agenda_note %} ({{item.session.agenda_note}}){% endif %}
{% if item.timeslot.show_location %}LOCATION:{{item.timeslot.get_location}}
{% endif %}STATUS:{{item.session.ical_status}}
CLASS:PUBLIC
-DTSTART{% ics_date_time item.timeslot.local_start_time schedule.meeting.time_zone %}
-DTEND{% ics_date_time item.timeslot.local_end_time schedule.meeting.time_zone %}
+DTSTART{% ics_date_time item.timeslot.local_start_time tzname %}
+DTEND{% ics_date_time item.timeslot.local_end_time tzname %}
DTSTAMP{% ics_date_time item.timeslot.modified|utc 'utc' %}{% if item.session.agenda %}
URL:{{item.session.agenda.get_versionless_href}}{% endif %}
DESCRIPTION:{{item.timeslot.name|ics_esc}}\n{% if item.session.agenda_note %}
@@ -29,4 +29,4 @@ DESCRIPTION:{{item.timeslot.name|ics_esc}}\n{% if item.session.agenda_note %}
\n{# link agenda for ietf meetings #}
See in schedule: {% absurl 'agenda' num=schedule.meeting.number %}#row-{{ item.slug }}\n{% endif %}
END:VEVENT
-{% endfor %}END:VCALENDAR{% endcache %}{% endtimezone %}{% endautoescape %}
+{% endfor %}END:VCALENDAR{% endcache %}{% endwith %}{% endtimezone %}{% endautoescape %}
From d1ec7378da3ecb66d3e492219db75fda69681df4 Mon Sep 17 00:00:00 2001
From: Nicolas Giard
Date: Fri, 14 Feb 2025 18:14:03 -0500
Subject: [PATCH 059/460] fix: use groupAcronym to determine hackathon icon in
agenda (#8540)
* fix: use groupAcronym to determine hackathon icon in agenda
* test: fix agenda tests related to hackathon
---
client/agenda/AgendaScheduleList.vue | 4 ++--
playwright/helpers/meeting.js | 2 +-
playwright/tests/meeting/agenda.spec.js | 6 +++---
3 files changed, 6 insertions(+), 6 deletions(-)
diff --git a/client/agenda/AgendaScheduleList.vue b/client/agenda/AgendaScheduleList.vue
index e7c14797ee..905677b4da 100644
--- a/client/agenda/AgendaScheduleList.vue
+++ b/client/agenda/AgendaScheduleList.vue
@@ -307,7 +307,7 @@ const meetingEvents = computed(() => {
})
}
// -> Point to Wiki for Hackathon sessions, HedgeDocs otherwise
- if (item.name.toLowerCase().includes('hackathon')) {
+ if (item.groupAcronym === 'hackathon') {
links.push({
id: `lnk-${item.id}-wiki`,
label: 'Wiki',
@@ -461,7 +461,7 @@ const meetingEvents = computed(() => {
case 'other':
if (item.name.toLowerCase().indexOf('office hours') >= 0) {
icon = 'bi-building'
- } else if (item.name.toLowerCase().indexOf('hackathon') >= 0) {
+ } else if (item.groupAcronym === 'hackathon') {
icon = 'bi-command bi-pink'
}
break
diff --git a/playwright/helpers/meeting.js b/playwright/helpers/meeting.js
index 52bc331fd6..9722ffc68b 100644
--- a/playwright/helpers/meeting.js
+++ b/playwright/helpers/meeting.js
@@ -395,7 +395,7 @@ module.exports = {
name: 'Hackathon Kickoff',
startDateTime: day1.set({ hour: 10, minute: 30 }),
duration: '30m',
- ...findAreaGroup('hackathon-kickoff', categories[2]),
+ ...findAreaGroup('hackathon', categories[2]),
showAgenda: true,
hasAgenda: true,
hasRecordings: true,
diff --git a/playwright/tests/meeting/agenda.spec.js b/playwright/tests/meeting/agenda.spec.js
index d31dbd5c2c..e6c6e17031 100644
--- a/playwright/tests/meeting/agenda.spec.js
+++ b/playwright/tests/meeting/agenda.spec.js
@@ -238,7 +238,7 @@ test.describe('past - desktop', () => {
// Name column
// -----------
// Event icon
- if (['break', 'plenary'].includes(event.type) || (event.type === 'other' && ['office hours', 'hackathon'].some(s => event.name.toLowerCase().indexOf(s) >= 0))) {
+ if (['break', 'plenary'].includes(event.type) || (event.type === 'other' && event.name.toLowerCase().indexOf('office hours') >= 0)) {
await expect(row.locator('.agenda-table-cell-name > i.bi')).toBeVisible()
}
// Name link
@@ -286,7 +286,7 @@ test.describe('past - desktop', () => {
// No meeting materials yet warning badge
await expect(eventButtons.locator('.no-meeting-materials')).toBeVisible()
}
- if (event.name.toLowerCase().includes('hackathon')) {
+ if (event.groupAcronym === 'hackathon') {
// Hackathon Wiki button
const hackathonWikiLink = `https://wiki.ietf.org/meeting/${meetingData.meeting.number}/hackathon`
await expect(eventButtons.locator(`#btn-lnk-${event.id}-wiki`)).toHaveAttribute('href', hackathonWikiLink)
@@ -1169,7 +1169,7 @@ test.describe('future - desktop', () => {
// No meeting materials yet warning badge
await expect(eventButtons.locator('.no-meeting-materials')).toBeVisible()
}
- if (event.name.toLowerCase().includes('hackathon')) {
+ if (event.groupAcronym === 'hackathon') {
// Hackathon Wiki button
const hackathonWikiLink = `https://wiki.ietf.org/meeting/${meetingData.meeting.number}/hackathon`
await expect(eventButtons.locator(`#btn-lnk-${event.id}-wiki`)).toHaveAttribute('href', hackathonWikiLink)
From 853de88546b25d1a59477e31580014351c9f0913 Mon Sep 17 00:00:00 2001
From: Matthew Holloway
Date: Sat, 15 Feb 2025 12:30:24 +1300
Subject: [PATCH 060/460] feat: CSS other-session-selected. Fixes #8525 (#8529)
---
ietf/static/css/ietf.scss | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/ietf/static/css/ietf.scss b/ietf/static/css/ietf.scss
index f1f2b94a19..aee93dcfe1 100644
--- a/ietf/static/css/ietf.scss
+++ b/ietf/static/css/ietf.scss
@@ -695,8 +695,8 @@ td.position-empty {
}
.edit-meeting-schedule .session.other-session-selected {
- outline: var(--bs-info) solid 0.2em;
- /* width matches margin on .session */
+ outline: 0.3em solid var(--bs-info);
+ box-shadow: 0 0 1em var(--bs-info);
z-index: 2;
/* render above timeslot outlines */
}
From 04952d58fa05f8b47884f621d9c1c510df0eb9a6 Mon Sep 17 00:00:00 2001
From: Jennifer Richards
Date: Tue, 18 Feb 2025 12:33:33 -0400
Subject: [PATCH 061/460] fix: remove all staging files for submission (#8535)
---
ietf/submit/utils.py | 12 ++++--------
1 file changed, 4 insertions(+), 8 deletions(-)
diff --git a/ietf/submit/utils.py b/ietf/submit/utils.py
index 49f28c4196..fe52f1c774 100644
--- a/ietf/submit/utils.py
+++ b/ietf/submit/utils.py
@@ -671,20 +671,16 @@ def move_files_to_repository(submission):
raise ValueError("Intended to move '%s' to '%s', but found source and destination missing.")
-def remove_staging_files(name, rev, exts=None):
- """Remove staging files corresponding to a submission
-
- exts is a list of extensions to be removed. If None, defaults to settings.IDSUBMIT_FILE_TYPES.
- """
- if exts is None:
- exts = [f'.{ext}' for ext in settings.IDSUBMIT_FILE_TYPES]
+def remove_staging_files(name, rev):
+ """Remove staging files corresponding to a submission"""
basename = pathlib.Path(settings.IDSUBMIT_STAGING_PATH) / f'{name}-{rev}'
+ exts = [f'.{ext}' for ext in settings.IDSUBMIT_FILE_TYPES]
for ext in exts:
basename.with_suffix(ext).unlink(missing_ok=True)
def remove_submission_files(submission):
- remove_staging_files(submission.name, submission.rev, submission.file_types.split(','))
+ remove_staging_files(submission.name, submission.rev)
def approvable_submissions_for_user(user):
From e71272fd2f2047d092fca76ad56b1ebe7899a27c Mon Sep 17 00:00:00 2001
From: Robert Sparks
Date: Wed, 19 Feb 2025 15:09:17 -0600
Subject: [PATCH 062/460] chore: mark a function unreachable (#8553)
---
ietf/submit/utils.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/ietf/submit/utils.py b/ietf/submit/utils.py
index fe52f1c774..f19f2384c8 100644
--- a/ietf/submit/utils.py
+++ b/ietf/submit/utils.py
@@ -646,6 +646,7 @@ def cancel_submission(submission):
def rename_submission_files(submission, prev_rev, new_rev):
+ log.unreachable("2025-2-19")
for ext in settings.IDSUBMIT_FILE_TYPES:
staging_path = Path(settings.IDSUBMIT_STAGING_PATH)
source = staging_path / f"{submission.name}-{prev_rev}.{ext}"
From 997239a2eaf70d39a95c1bb465f4106c2ab1d544 Mon Sep 17 00:00:00 2001
From: Robert Sparks
Date: Wed, 19 Feb 2025 17:41:10 -0600
Subject: [PATCH 063/460] feat: write objects to blob storage (#8557)
* feat: basic blobstore infrastructure for dev
* refactor: (broken) attempt to put minio console behind nginx
* feat: initialize blobstore with boto3
* fix: abandon attempt to proxy minio. Use docker compose instead.
* feat: beginning of blob writes
* feat: storage utilities
* feat: test buckets
* chore: black
* chore: remove unused import
* chore: avoid f string when not needed
* fix: inform all settings files about blobstores
* fix: declare types for some settings
* ci: point to new target base
* ci: adjust test workflow
* fix: give the tests debug environment a blobstore
* fix: "better" name declarations
* ci: use devblobstore container
* chore: identify places to write to blobstorage
* chore: remove unreachable code
* feat: store materials
* feat: store statements
* feat: store status changes
* feat: store liaison attachments
* feat: store agendas provided with Interim session requests
* chore: capture TODOs
* feat: store polls and chatlogs
* chore: remove unneeded TODO
* feat: store drafts on submit and post
* fix: handle storage during doc expiration and resurrection
* fix: mirror an unlink
* chore: add/refine TODOs
* feat: store slide submissions
* fix: structure slide test correctly
* fix: correct sense of existence check
* feat: store some indexes
* feat: BlobShadowFileSystemStorage
* feat: shadow floorplans / host logos to the blob
* chore: remove unused import
* feat: strip path from blob shadow names
* feat: shadow photos / thumbs
* refactor: combine photo and photothumb blob kinds
The photos / thumbs were already dropped in the same
directory, so let's not add a distinction at this point.
* style: whitespace
* refactor: use kwargs consistently
* chore: migrations
* refactor: better deconstruct(); rebuild migrations
* fix: use new class in mack patch
* chore: add TODO
* feat: store group index documents
* chore: identify more TODO
* feat: store reviews
* fix: repair merge
* chore: remove unnecessary TODO
* feat: StoredObject metadata
* fix: deburr some debugging code
* fix: only set the deleted timestamp once
* chore: correct typo
* fix: get_or_create vs get and test
* fix: avoid the questionable is_seekable helper
* chore: capture future design consideration
* chore: blob store cfg for k8s
* chore: black
* chore: copyright
* ci: bucket name prefix option + run Black
Adds/uses DATATRACKER_BLOB_STORE_BUCKET_PREFIX option. Other changes
are just Black styling.
* ci: fix typo in bucket name expression
* chore: parameters in app-configure-blobstore
Allows use with other blob stores.
* ci: remove verify=False option
* fix: don't return value from __init__
* feat: option to log timing of S3Storage calls
* chore: units
* fix: deleted->null when storing a file
* style: Black
* feat: log as JSON; refactor to share code; handle exceptions
* ci: add ietf_log_blob_timing option for k8s
* test: --no-manage-blobstore option for running tests
* test: use blob store settings from env, if set
* test: actually set a couple more storage opts
* feat: offswitch (#8541)
* feat: offswitch
* fix: apply ENABLE_BLOBSTORAGE to BlobShadowFileSystemStorage behavior
* chore: log timing of blob reads
* chore: import Config from botocore.config
* chore(deps): import boto3-stubs / botocore
botocore is implicitly imported, but make it explicit
since we refer to it directly
* chore: drop type annotation that mypy loudly ignores
* refactor: add storage methods via mixin
Shares code between Document and DocHistory without
putting it in the base DocumentInfo class, which
lacks the name field. Also makes mypy happy.
* feat: add timeout / retry limit to boto client
* ci: let k8s config the timeouts via env
* chore: repair merge resolution typo
* chore: tweak settings imports
* chore: simplify k8s/settings_local.py imports
---------
Co-authored-by: Jennifer Richards
---
.devcontainer/docker-compose.extend.yml | 4 +
.github/workflows/tests.yml | 2 +
README.md | 17 ++
dev/deploy-to-container/settings_local.py | 23 ++-
dev/diff/settings_local.py | 23 ++-
dev/tests/docker-compose.debug.yml | 3 +
dev/tests/settings_local.py | 23 ++-
docker-compose.yml | 10 +
docker/app.Dockerfile | 4 +-
docker/configs/settings_local.py | 27 ++-
docker/docker-compose.extend.yml | 4 +
docker/scripts/app-configure-blobstore.py | 28 +++
docker/scripts/app-init.sh | 5 +
ietf/api/tests.py | 5 +
ietf/doc/admin.py | 8 +-
ietf/doc/expire.py | 14 ++
...ject_storedobject_unique_name_per_store.py | 66 ++++++
ietf/doc/models.py | 85 +++++++-
ietf/doc/resources.py | 25 ++-
ietf/doc/storage_backends.py | 192 ++++++++++++++++++
ietf/doc/storage_utils.py | 103 ++++++++++
ietf/doc/tasks.py | 4 +-
ietf/doc/tests_bofreq.py | 3 +
ietf/doc/tests_charter.py | 6 +
ietf/doc/tests_conflict_review.py | 2 +
ietf/doc/tests_draft.py | 13 ++
ietf/doc/tests_material.py | 6 +
ietf/doc/tests_review.py | 5 +
ietf/doc/tests_statement.py | 17 ++
ietf/doc/tests_status_change.py | 18 +-
ietf/doc/utils.py | 2 +-
ietf/doc/views_bofreq.py | 2 +
ietf/doc/views_charter.py | 8 +-
ietf/doc/views_conflict_review.py | 6 +-
ietf/doc/views_draft.py | 6 +
ietf/doc/views_material.py | 2 +
ietf/doc/views_review.py | 1 +
ietf/doc/views_statement.py | 13 +-
ietf/doc/views_status_change.py | 6 +-
ietf/group/tasks.py | 11 +
ietf/group/tests_info.py | 35 ++--
ietf/idindex/tasks.py | 4 +
ietf/idindex/tests.py | 5 +
ietf/liaisons/forms.py | 2 +
ietf/liaisons/tests.py | 38 +++-
ietf/meeting/factories.py | 5 +
ietf/meeting/forms.py | 1 +
ietf/meeting/helpers.py | 5 +
..._floorplan_image_alter_meetinghost_logo.py | 56 +++++
ietf/meeting/models.py | 15 +-
ietf/meeting/tests_views.py | 126 ++++++++++--
ietf/meeting/utils.py | 17 +-
ietf/meeting/views.py | 17 +-
ietf/nomcom/models.py | 1 +
...r_person_photo_alter_person_photo_thumb.py | 38 ++++
ietf/person/models.py | 16 +-
ietf/settings.py | 38 ++++
ietf/settings_test.py | 30 ++-
ietf/submit/tests.py | 94 ++++++++-
ietf/submit/utils.py | 20 +-
ietf/utils/storage.py | 56 ++++-
ietf/utils/test_runner.py | 70 ++++++-
k8s/settings_local.py | 103 ++++++++--
requirements.txt | 6 +-
64 files changed, 1484 insertions(+), 116 deletions(-)
create mode 100755 docker/scripts/app-configure-blobstore.py
create mode 100644 ietf/doc/migrations/0025_storedobject_storedobject_unique_name_per_store.py
create mode 100644 ietf/doc/storage_backends.py
create mode 100644 ietf/doc/storage_utils.py
create mode 100644 ietf/meeting/migrations/0010_alter_floorplan_image_alter_meetinghost_logo.py
create mode 100644 ietf/person/migrations/0004_alter_person_photo_alter_person_photo_thumb.py
diff --git a/.devcontainer/docker-compose.extend.yml b/.devcontainer/docker-compose.extend.yml
index fa9a412cf2..286eefb29c 100644
--- a/.devcontainer/docker-compose.extend.yml
+++ b/.devcontainer/docker-compose.extend.yml
@@ -14,6 +14,10 @@ services:
# - datatracker-vscode-ext:/root/.vscode-server/extensions
# Runs app on the same network as the database container, allows "forwardPorts" in devcontainer.json function.
network_mode: service:db
+ blobstore:
+ ports:
+ - '9000'
+ - '9001'
volumes:
datatracker-vscode-ext:
diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml
index 5457415f59..f10c1db9a3 100644
--- a/.github/workflows/tests.yml
+++ b/.github/workflows/tests.yml
@@ -28,6 +28,8 @@ jobs:
services:
db:
image: ghcr.io/ietf-tools/datatracker-db:latest
+ blobstore:
+ image: ghcr.io/ietf-tools/datatracker-devblobstore:latest
steps:
- uses: actions/checkout@v4
diff --git a/README.md b/README.md
index ee9865ba21..0ece0eb03b 100644
--- a/README.md
+++ b/README.md
@@ -106,6 +106,23 @@ Nightly database dumps of the datatracker are available as Docker images: `ghcr.
> Note that to update the database in your dev environment to the latest version, you should run the `docker/cleandb` script.
+### Blob storage for dev/test
+
+The dev and test environments use [minio](https://github.com/minio/minio) to provide local blob storage. See the settings files for how the app container communicates with the blobstore container. If you need to work with minio directly from outside the containers (to interact with its api or console), use `docker compose` from the top level directory of your clone to expose it at an ephemeral port.
+
+```
+$ docker compose port blobstore 9001
+0.0.0.0:
+
+$ curl -I http://localhost:
+HTTP/1.1 200 OK
+...
+```
+
+
+The minio container exposes the minio api at port 9000 and the minio console at port 9001
+
+
### Frontend Development
#### Intro
diff --git a/dev/deploy-to-container/settings_local.py b/dev/deploy-to-container/settings_local.py
index 0a991ae9fe..e878206bd5 100644
--- a/dev/deploy-to-container/settings_local.py
+++ b/dev/deploy-to-container/settings_local.py
@@ -1,7 +1,9 @@
# Copyright The IETF Trust 2007-2019, All Rights Reserved
# -*- coding: utf-8 -*-
-from ietf.settings import * # pyflakes:ignore
+from ietf.settings import * # pyflakes:ignore
+from ietf.settings import STORAGES, MORE_STORAGE_NAMES, BLOBSTORAGE_CONNECT_TIMEOUT, BLOBSTORAGE_READ_TIMEOUT, BLOBSTORAGE_MAX_ATTEMPTS
+import botocore.config
ALLOWED_HOSTS = ['*']
@@ -79,3 +81,22 @@
# OIDC configuration
SITE_URL = 'https://__HOSTNAME__'
+
+for storagename in MORE_STORAGE_NAMES:
+ STORAGES[storagename] = {
+ "BACKEND": "ietf.doc.storage_backends.CustomS3Storage",
+ "OPTIONS": dict(
+ endpoint_url="http://blobstore:9000",
+ access_key="minio_root",
+ secret_key="minio_pass",
+ security_token=None,
+ client_config=botocore.config.Config(
+ signature_version="s3v4",
+ connect_timeout=BLOBSTORAGE_CONNECT_TIMEOUT,
+ read_timeout=BLOBSTORAGE_READ_TIMEOUT,
+ retries={"total_max_attempts": BLOBSTORAGE_MAX_ATTEMPTS},
+ ),
+ verify=False,
+ bucket_name=f"test-{storagename}",
+ ),
+ }
diff --git a/dev/diff/settings_local.py b/dev/diff/settings_local.py
index 95d1e481c9..9e0806a8a6 100644
--- a/dev/diff/settings_local.py
+++ b/dev/diff/settings_local.py
@@ -1,7 +1,9 @@
# Copyright The IETF Trust 2007-2019, All Rights Reserved
# -*- coding: utf-8 -*-
-from ietf.settings import * # pyflakes:ignore
+from ietf.settings import * # pyflakes:ignore
+from ietf.settings import STORAGES, MORE_STORAGE_NAMES, BLOBSTORAGE_CONNECT_TIMEOUT, BLOBSTORAGE_READ_TIMEOUT, BLOBSTORAGE_MAX_ATTEMPTS
+import botocore.config
ALLOWED_HOSTS = ['*']
@@ -66,3 +68,22 @@
SLIDE_STAGING_PATH = 'test/staging/'
DE_GFM_BINARY = '/usr/local/bin/de-gfm'
+
+for storagename in MORE_STORAGE_NAMES:
+ STORAGES[storagename] = {
+ "BACKEND": "ietf.doc.storage_backends.CustomS3Storage",
+ "OPTIONS": dict(
+ endpoint_url="http://blobstore:9000",
+ access_key="minio_root",
+ secret_key="minio_pass",
+ security_token=None,
+ client_config=botocore.config.Config(
+ signature_version="s3v4",
+ connect_timeout=BLOBSTORAGE_CONNECT_TIMEOUT,
+ read_timeout=BLOBSTORAGE_READ_TIMEOUT,
+ retries={"total_max_attempts": BLOBSTORAGE_MAX_ATTEMPTS},
+ ),
+ verify=False,
+ bucket_name=f"test-{storagename}",
+ ),
+ }
diff --git a/dev/tests/docker-compose.debug.yml b/dev/tests/docker-compose.debug.yml
index 8d939e0ea2..8117b92375 100644
--- a/dev/tests/docker-compose.debug.yml
+++ b/dev/tests/docker-compose.debug.yml
@@ -28,5 +28,8 @@ services:
volumes:
- postgresdb-data:/var/lib/postgresql/data
+ blobstore:
+ image: ghcr.io/ietf-tools/datatracker-devblobstore:latest
+
volumes:
postgresdb-data:
diff --git a/dev/tests/settings_local.py b/dev/tests/settings_local.py
index 7b10bee06a..f2166053a7 100644
--- a/dev/tests/settings_local.py
+++ b/dev/tests/settings_local.py
@@ -1,7 +1,9 @@
# Copyright The IETF Trust 2007-2019, All Rights Reserved
# -*- coding: utf-8 -*-
-from ietf.settings import * # pyflakes:ignore
+from ietf.settings import * # pyflakes:ignore
+from ietf.settings import STORAGES, MORE_STORAGE_NAMES, BLOBSTORAGE_CONNECT_TIMEOUT, BLOBSTORAGE_READ_TIMEOUT, BLOBSTORAGE_MAX_ATTEMPTS
+import botocore.config
ALLOWED_HOSTS = ['*']
@@ -65,3 +67,22 @@
SLIDE_STAGING_PATH = 'test/staging/'
DE_GFM_BINARY = '/usr/local/bin/de-gfm'
+
+for storagename in MORE_STORAGE_NAMES:
+ STORAGES[storagename] = {
+ "BACKEND": "ietf.doc.storage_backends.CustomS3Storage",
+ "OPTIONS": dict(
+ endpoint_url="http://blobstore:9000",
+ access_key="minio_root",
+ secret_key="minio_pass",
+ security_token=None,
+ client_config=botocore.config.Config(
+ signature_version="s3v4",
+ connect_timeout=BLOBSTORAGE_CONNECT_TIMEOUT,
+ read_timeout=BLOBSTORAGE_READ_TIMEOUT,
+ retries={"total_max_attempts": BLOBSTORAGE_MAX_ATTEMPTS},
+ ),
+ verify=False,
+ bucket_name=f"test-{storagename}",
+ ),
+ }
diff --git a/docker-compose.yml b/docker-compose.yml
index 9988b10c2e..30ce8ba4d2 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -15,6 +15,7 @@ services:
depends_on:
- db
- mq
+ - blobstore
ipc: host
@@ -83,6 +84,14 @@ services:
- .:/workspace
- app-assets:/assets
+ blobstore:
+ image: ghcr.io/ietf-tools/datatracker-devblobstore:latest
+ restart: unless-stopped
+ volumes:
+ - "minio-data:/data"
+
+
+
# Celery Beat is a periodic task runner. It is not normally needed for development,
# but can be enabled by uncommenting the following.
#
@@ -106,3 +115,4 @@ services:
volumes:
postgresdb-data:
app-assets:
+ minio-data:
diff --git a/docker/app.Dockerfile b/docker/app.Dockerfile
index b7dd44b6f1..fee3833733 100644
--- a/docker/app.Dockerfile
+++ b/docker/app.Dockerfile
@@ -43,8 +43,8 @@ RUN rm -rf /tmp/library-scripts
# Copy the startup file
COPY docker/scripts/app-init.sh /docker-init.sh
COPY docker/scripts/app-start.sh /docker-start.sh
-RUN sed -i 's/\r$//' /docker-init.sh && chmod +x /docker-init.sh
-RUN sed -i 's/\r$//' /docker-start.sh && chmod +x /docker-start.sh
+RUN sed -i 's/\r$//' /docker-init.sh && chmod +rx /docker-init.sh
+RUN sed -i 's/\r$//' /docker-start.sh && chmod +rx /docker-start.sh
# Fix user UID / GID to match host
RUN groupmod --gid $USER_GID $USERNAME \
diff --git a/docker/configs/settings_local.py b/docker/configs/settings_local.py
index 5df5d15e82..46833451c1 100644
--- a/docker/configs/settings_local.py
+++ b/docker/configs/settings_local.py
@@ -1,11 +1,13 @@
-# Copyright The IETF Trust 2007-2019, All Rights Reserved
+# Copyright The IETF Trust 2007-2025, All Rights Reserved
# -*- coding: utf-8 -*-
-from ietf.settings import * # pyflakes:ignore
+from ietf.settings import * # pyflakes:ignore
+from ietf.settings import STORAGES, MORE_STORAGE_NAMES, BLOBSTORAGE_CONNECT_TIMEOUT, BLOBSTORAGE_READ_TIMEOUT, BLOBSTORAGE_MAX_ATTEMPTS
+import botocore.config
ALLOWED_HOSTS = ['*']
-from ietf.settings_postgresqldb import DATABASES # pyflakes:ignore
+from ietf.settings_postgresqldb import DATABASES # pyflakes:ignore
IDSUBMIT_IDNITS_BINARY = "/usr/local/bin/idnits"
IDSUBMIT_STAGING_PATH = "/assets/www6s/staging/"
@@ -37,6 +39,25 @@
# DEV_TEMPLATE_CONTEXT_PROCESSORS = [
# 'ietf.context_processors.sql_debug',
# ]
+for storagename in MORE_STORAGE_NAMES:
+ STORAGES[storagename] = {
+ "BACKEND": "ietf.doc.storage_backends.CustomS3Storage",
+ "OPTIONS": dict(
+ endpoint_url="http://blobstore:9000",
+ access_key="minio_root",
+ secret_key="minio_pass",
+ security_token=None,
+ client_config=botocore.config.Config(
+ signature_version="s3v4",
+ connect_timeout=BLOBSTORAGE_CONNECT_TIMEOUT,
+ read_timeout=BLOBSTORAGE_READ_TIMEOUT,
+ retries={"total_max_attempts": BLOBSTORAGE_MAX_ATTEMPTS},
+ ),
+ verify=False,
+ bucket_name=storagename,
+ ),
+ }
+
DOCUMENT_PATH_PATTERN = '/assets/ietfdata/doc/{doc.type_id}/'
INTERNET_DRAFT_PATH = '/assets/ietf-ftp/internet-drafts/'
diff --git a/docker/docker-compose.extend.yml b/docker/docker-compose.extend.yml
index 0538c0d3e9..a69a453110 100644
--- a/docker/docker-compose.extend.yml
+++ b/docker/docker-compose.extend.yml
@@ -16,6 +16,10 @@ services:
pgadmin:
ports:
- '5433'
+ blobstore:
+ ports:
+ - '9000'
+ - '9001'
celery:
volumes:
- .:/workspace
diff --git a/docker/scripts/app-configure-blobstore.py b/docker/scripts/app-configure-blobstore.py
new file mode 100755
index 0000000000..7b5ce962eb
--- /dev/null
+++ b/docker/scripts/app-configure-blobstore.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python
+# Copyright The IETF Trust 2024, All Rights Reserved
+
+import boto3
+import os
+import sys
+
+from ietf.settings import MORE_STORAGE_NAMES
+
+
+def init_blobstore():
+ blobstore = boto3.resource(
+ "s3",
+ endpoint_url=os.environ.get("BLOB_STORE_ENDPOINT_URL", "http://blobstore:9000"),
+ aws_access_key_id=os.environ.get("BLOB_STORE_ACCESS_KEY", "minio_root"),
+ aws_secret_access_key=os.environ.get("BLOB_STORE_SECRET_KEY", "minio_pass"),
+ aws_session_token=None,
+ config=botocore.config.Config(signature_version="s3v4"),
+ verify=False,
+ )
+ for bucketname in MORE_STORAGE_NAMES:
+ blobstore.create_bucket(
+ Bucket=f"{os.environ.get('BLOB_STORE_BUCKET_PREFIX', '')}{bucketname}".strip()
+ )
+
+
+if __name__ == "__main__":
+ sys.exit(init_blobstore())
diff --git a/docker/scripts/app-init.sh b/docker/scripts/app-init.sh
index b96b88f1f5..e970398ac2 100755
--- a/docker/scripts/app-init.sh
+++ b/docker/scripts/app-init.sh
@@ -73,6 +73,11 @@ echo "Creating data directories..."
chmod +x ./docker/scripts/app-create-dirs.sh
./docker/scripts/app-create-dirs.sh
+# Configure the development blobstore
+
+echo "Configuring blobstore..."
+PYTHONPATH=/workspace python ./docker/scripts/app-configure-blobstore.py
+
# Download latest coverage results file
echo "Downloading latest coverage results file..."
diff --git a/ietf/api/tests.py b/ietf/api/tests.py
index d9af457e95..ac0b37a608 100644
--- a/ietf/api/tests.py
+++ b/ietf/api/tests.py
@@ -25,6 +25,7 @@
import debug # pyflakes:ignore
import ietf
+from ietf.doc.storage_utils import retrieve_str
from ietf.doc.utils import get_unicode_document_content
from ietf.doc.models import RelatedDocument, State
from ietf.doc.factories import IndividualDraftFactory, WgDraftFactory, WgRfcFactory
@@ -553,6 +554,10 @@ def test_api_upload_polls_and_chatlog(self):
newdoc = session.presentations.get(document__type_id=type_id).document
newdoccontent = get_unicode_document_content(newdoc.name, Path(session.meeting.get_materials_path()) / type_id / newdoc.uploaded_filename)
self.assertEqual(json.loads(content), json.loads(newdoccontent))
+ self.assertEqual(
+ json.loads(retrieve_str(type_id, newdoc.uploaded_filename)),
+ json.loads(content)
+ )
def test_api_upload_bluesheet(self):
url = urlreverse("ietf.meeting.views.api_upload_bluesheet")
diff --git a/ietf/doc/admin.py b/ietf/doc/admin.py
index 301d32d7cc..db3b24b2d2 100644
--- a/ietf/doc/admin.py
+++ b/ietf/doc/admin.py
@@ -12,7 +12,7 @@
TelechatDocEvent, BallotPositionDocEvent, ReviewRequestDocEvent, InitialReviewDocEvent,
AddedMessageEvent, SubmissionDocEvent, DeletedEvent, EditedAuthorsDocEvent, DocumentURL,
ReviewAssignmentDocEvent, IanaExpertDocEvent, IRSGBallotDocEvent, DocExtResource, DocumentActionHolder,
- BofreqEditorDocEvent, BofreqResponsibleDocEvent )
+ BofreqEditorDocEvent, BofreqResponsibleDocEvent, StoredObject )
from ietf.utils.validators import validate_external_resource_value
@@ -218,3 +218,9 @@ class DocExtResourceAdmin(admin.ModelAdmin):
search_fields = ['doc__name', 'value', 'display_name', 'name__slug',]
raw_id_fields = ['doc', ]
admin.site.register(DocExtResource, DocExtResourceAdmin)
+
+class StoredObjectAdmin(admin.ModelAdmin):
+ list_display = ['store', 'name', 'modified', 'deleted']
+ list_filter = ['deleted']
+ search_fields = ['store', 'name', 'doc_name', 'doc_rev', 'deleted']
+admin.site.register(StoredObject, StoredObjectAdmin)
diff --git a/ietf/doc/expire.py b/ietf/doc/expire.py
index 98554bae0e..bf8523aa98 100644
--- a/ietf/doc/expire.py
+++ b/ietf/doc/expire.py
@@ -13,6 +13,7 @@
from typing import List, Optional # pyflakes:ignore
+from ietf.doc.storage_utils import exists_in_storage, remove_from_storage
from ietf.doc.utils import update_action_holders
from ietf.utils import log
from ietf.utils.mail import send_mail
@@ -156,11 +157,17 @@ def remove_ftp_copy(f):
if mark.exists():
mark.unlink()
+ def remove_from_active_draft_storage(file):
+ # Assumes the glob will never find a file with no suffix
+ ext = file.suffix[1:]
+ remove_from_storage("active-draft", f"{ext}/{file.name}", warn_if_missing=False)
+ # Note that the object is already in the "draft" storage.
src_dir = Path(settings.INTERNET_DRAFT_PATH)
for file in src_dir.glob("%s-%s.*" % (doc.name, rev)):
move_file(str(file.name))
remove_ftp_copy(str(file.name))
+ remove_from_active_draft_storage(file)
def expire_draft(doc):
# clean up files
@@ -218,6 +225,13 @@ def move_file_to(subdir):
mark = Path(settings.FTP_DIR) / "internet-drafts" / basename
if mark.exists():
mark.unlink()
+ if ext:
+ # Note that we're not moving these strays anywhere - the assumption
+ # is that the active-draft blobstore will not get strays.
+ # See, however, the note about "major system failures" at "unknown_ids"
+ blobname = f"{ext[1:]}/{basename}"
+ if exists_in_storage("active-draft", blobname):
+ remove_from_storage("active-draft", blobname)
try:
doc = Document.objects.get(name=filename, rev=revision)
diff --git a/ietf/doc/migrations/0025_storedobject_storedobject_unique_name_per_store.py b/ietf/doc/migrations/0025_storedobject_storedobject_unique_name_per_store.py
new file mode 100644
index 0000000000..e948ca3011
--- /dev/null
+++ b/ietf/doc/migrations/0025_storedobject_storedobject_unique_name_per_store.py
@@ -0,0 +1,66 @@
+# Copyright The IETF Trust 2025, All Rights Reserved
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ("doc", "0024_remove_ad_is_watching_states"),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name="StoredObject",
+ fields=[
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
+ ("store", models.CharField(max_length=256)),
+ ("name", models.CharField(max_length=1024)),
+ ("sha384", models.CharField(max_length=96)),
+ ("len", models.PositiveBigIntegerField()),
+ (
+ "store_created",
+ models.DateTimeField(
+ help_text="The instant the object ws first placed in the store"
+ ),
+ ),
+ (
+ "created",
+ models.DateTimeField(
+ help_text="Instant object became known. May not be the same as the storage's created value for the instance. It will hold ctime for objects imported from older disk storage"
+ ),
+ ),
+ (
+ "modified",
+ models.DateTimeField(
+ help_text="Last instant object was modified. May not be the same as the storage's modified value for the instance. It will hold mtime for objects imported from older disk storage unless they've actually been overwritten more recently"
+ ),
+ ),
+ ("doc_name", models.CharField(blank=True, max_length=255, null=True)),
+ ("doc_rev", models.CharField(blank=True, max_length=16, null=True)),
+ ("deleted", models.DateTimeField(null=True)),
+ ],
+ options={
+ "indexes": [
+ models.Index(
+ fields=["doc_name", "doc_rev"],
+ name="doc_storedo_doc_nam_d04465_idx",
+ )
+ ],
+ },
+ ),
+ migrations.AddConstraint(
+ model_name="storedobject",
+ constraint=models.UniqueConstraint(
+ fields=("store", "name"), name="unique_name_per_store"
+ ),
+ ),
+ ]
diff --git a/ietf/doc/models.py b/ietf/doc/models.py
index 03698c80c3..55da70972c 100644
--- a/ietf/doc/models.py
+++ b/ietf/doc/models.py
@@ -9,14 +9,16 @@
import django.db
import rfc2html
+from io import BufferedReader
from pathlib import Path
from lxml import etree
-from typing import Optional, TYPE_CHECKING
+from typing import Optional, Protocol, TYPE_CHECKING, Union
from weasyprint import HTML as wpHTML
from weasyprint.text.fonts import FontConfiguration
from django.db import models
from django.core import checks
+from django.core.files.base import File
from django.core.cache import caches
from django.core.validators import URLValidator, RegexValidator
from django.urls import reverse as urlreverse
@@ -30,6 +32,11 @@
import debug # pyflakes:ignore
from ietf.group.models import Group
+from ietf.doc.storage_utils import (
+ store_str as utils_store_str,
+ store_bytes as utils_store_bytes,
+ store_file as utils_store_file
+)
from ietf.name.models import ( DocTypeName, DocTagName, StreamName, IntendedStdLevelName, StdLevelName,
DocRelationshipName, DocReminderTypeName, BallotPositionName, ReviewRequestStateName, ReviewAssignmentStateName, FormalLanguageName,
DocUrlTagName, ExtResourceName)
@@ -714,10 +721,52 @@ def referenced_by_rfcs_as_rfc_or_draft(self):
if self.type_id == "rfc" and self.came_from_draft():
refs_to |= self.came_from_draft().referenced_by_rfcs()
return refs_to
-
+
class Meta:
abstract = True
+
+class HasNameRevAndTypeIdProtocol(Protocol):
+ """Typing Protocol describing a class that has name, rev, and type_id properties"""
+ @property
+ def name(self) -> str: ...
+ @property
+ def rev(self) -> str: ...
+ @property
+ def type_id(self) -> str: ...
+
+
+class StorableMixin:
+ """Mixin that adds storage helpers to a DocumentInfo subclass"""
+ def store_str(
+ self: HasNameRevAndTypeIdProtocol,
+ name: str,
+ content: str,
+ allow_overwrite: bool = False
+ ) -> None:
+ return utils_store_str(self.type_id, name, content, allow_overwrite, self.name, self.rev)
+
+ def store_bytes(
+ self: HasNameRevAndTypeIdProtocol,
+ name: str,
+ content: bytes,
+ allow_overwrite: bool = False,
+ doc_name: Optional[str] = None,
+ doc_rev: Optional[str] = None
+ ) -> None:
+ return utils_store_bytes(self.type_id, name, content, allow_overwrite, self.name, self.rev)
+
+ def store_file(
+ self: HasNameRevAndTypeIdProtocol,
+ name: str,
+ file: Union[File, BufferedReader],
+ allow_overwrite: bool = False,
+ doc_name: Optional[str] = None,
+ doc_rev: Optional[str] = None
+ ) -> None:
+ return utils_store_file(self.type_id, name, file, allow_overwrite, self.name, self.rev)
+
+
STATUSCHANGE_RELATIONS = ('tops','tois','tohist','toinf','tobcp','toexp')
class RelatedDocument(models.Model):
@@ -870,7 +919,7 @@ def role_for_doc(self):
'invalid'
)
-class Document(DocumentInfo):
+class Document(StorableMixin, DocumentInfo):
name = models.CharField(max_length=255, validators=[validate_docname,], unique=True) # immutable
action_holders = models.ManyToManyField(Person, through=DocumentActionHolder, blank=True)
@@ -1192,7 +1241,7 @@ class DocHistoryAuthor(DocumentAuthorInfo):
def __str__(self):
return u"%s %s (%s)" % (self.document.doc.name, self.person, self.order)
-class DocHistory(DocumentInfo):
+class DocHistory(StorableMixin, DocumentInfo):
doc = ForeignKey(Document, related_name="history_set")
name = models.CharField(max_length=255)
@@ -1538,3 +1587,31 @@ class BofreqEditorDocEvent(DocEvent):
class BofreqResponsibleDocEvent(DocEvent):
""" Capture the responsible leadership (IAB and IESG members) for a BOF Request """
responsible = models.ManyToManyField('person.Person', blank=True)
+
+class StoredObject(models.Model):
+ """Hold metadata about objects placed in object storage"""
+
+ store = models.CharField(max_length=256)
+ name = models.CharField(max_length=1024, null=False, blank=False) # N.B. the 1024 limit on name comes from S3
+ sha384 = models.CharField(max_length=96)
+ len = models.PositiveBigIntegerField()
+ store_created = models.DateTimeField(help_text="The instant the object ws first placed in the store")
+ created = models.DateTimeField(
+ null=False,
+ help_text="Instant object became known. May not be the same as the storage's created value for the instance. It will hold ctime for objects imported from older disk storage"
+ )
+ modified = models.DateTimeField(
+ null=False,
+ help_text="Last instant object was modified. May not be the same as the storage's modified value for the instance. It will hold mtime for objects imported from older disk storage unless they've actually been overwritten more recently"
+ )
+ doc_name = models.CharField(max_length=255, null=True, blank=True)
+ doc_rev = models.CharField(max_length=16, null=True, blank=True)
+ deleted = models.DateTimeField(null=True)
+
+ class Meta:
+ constraints = [
+ models.UniqueConstraint(fields=['store', 'name'], name='unique_name_per_store'),
+ ]
+ indexes = [
+ models.Index(fields=["doc_name", "doc_rev"]),
+ ]
diff --git a/ietf/doc/resources.py b/ietf/doc/resources.py
index bba57013b9..157a3ad556 100644
--- a/ietf/doc/resources.py
+++ b/ietf/doc/resources.py
@@ -18,7 +18,7 @@
RelatedDocHistory, BallotPositionDocEvent, AddedMessageEvent, SubmissionDocEvent,
ReviewRequestDocEvent, ReviewAssignmentDocEvent, EditedAuthorsDocEvent, DocumentURL,
IanaExpertDocEvent, IRSGBallotDocEvent, DocExtResource, DocumentActionHolder,
- BofreqEditorDocEvent,BofreqResponsibleDocEvent)
+ BofreqEditorDocEvent, BofreqResponsibleDocEvent, StoredObject)
from ietf.name.resources import BallotPositionNameResource, DocTypeNameResource
class BallotTypeResource(ModelResource):
@@ -842,3 +842,26 @@ class Meta:
"responsible": ALL_WITH_RELATIONS,
}
api.doc.register(BofreqResponsibleDocEventResource())
+
+
+class StoredObjectResource(ModelResource):
+ class Meta:
+ queryset = StoredObject.objects.all()
+ serializer = api.Serializer()
+ cache = SimpleCache()
+ #resource_name = 'storedobject'
+ ordering = ['id', ]
+ filtering = {
+ "id": ALL,
+ "store": ALL,
+ "name": ALL,
+ "sha384": ALL,
+ "len": ALL,
+ "store_created": ALL,
+ "created": ALL,
+ "modified": ALL,
+ "doc_name": ALL,
+ "doc_rev": ALL,
+ "deleted": ALL,
+ }
+api.doc.register(StoredObjectResource())
diff --git a/ietf/doc/storage_backends.py b/ietf/doc/storage_backends.py
new file mode 100644
index 0000000000..5eeab040e5
--- /dev/null
+++ b/ietf/doc/storage_backends.py
@@ -0,0 +1,192 @@
+# Copyright The IETF Trust 2025, All Rights Reserved
+
+import debug # pyflakes:ignore
+import json
+
+from contextlib import contextmanager
+from hashlib import sha384
+from io import BufferedReader
+from storages.backends.s3 import S3Storage
+from typing import Optional, Union
+
+from django.core.files.base import File
+
+from ietf.doc.models import StoredObject
+from ietf.utils.log import log
+from ietf.utils.timezone import timezone
+
+
+@contextmanager
+def maybe_log_timing(enabled, op, **kwargs):
+ """If enabled, log elapsed time and additional data from kwargs
+
+ Emits log even if an exception occurs
+ """
+ before = timezone.now()
+ exception = None
+ try:
+ yield
+ except Exception as err:
+ exception = err
+ raise
+ finally:
+ if enabled:
+ dt = timezone.now() - before
+ log(
+ json.dumps(
+ {
+ "log": "S3Storage_timing",
+ "seconds": dt.total_seconds(),
+ "op": op,
+ "exception": "" if exception is None else repr(exception),
+ **kwargs,
+ }
+ )
+ )
+
+
+# TODO-BLOBSTORE
+# Consider overriding save directly so that
+# we capture metadata for, e.g., ImageField objects
+class CustomS3Storage(S3Storage):
+
+ def __init__(self, **settings):
+ self.in_flight_custom_metadata = {} # type is Dict[str, Dict[str, str]]
+ super().__init__(**settings)
+
+ def get_default_settings(self):
+ # add a default for the ietf_log_blob_timing boolean
+ return super().get_default_settings() | {"ietf_log_blob_timing": False}
+
+ def _save(self, name, content):
+ with maybe_log_timing(
+ self.ietf_log_blob_timing, "_save", bucket_name=self.bucket_name, name=name
+ ):
+ return super()._save(name, content)
+
+ def _open(self, name, mode="rb"):
+ with maybe_log_timing(
+ self.ietf_log_blob_timing,
+ "_open",
+ bucket_name=self.bucket_name,
+ name=name,
+ mode=mode,
+ ):
+ return super()._open(name, mode)
+
+ def delete(self, name):
+ with maybe_log_timing(
+ self.ietf_log_blob_timing, "delete", bucket_name=self.bucket_name, name=name
+ ):
+ super().delete(name)
+
+ def store_file(
+ self,
+ kind: str,
+ name: str,
+ file: Union[File, BufferedReader],
+ allow_overwrite: bool = False,
+ doc_name: Optional[str] = None,
+ doc_rev: Optional[str] = None,
+ ):
+ is_new = not self.exists_in_storage(kind, name)
+ # debug.show('f"Asked to store {name} in {kind}: is_new={is_new}, allow_overwrite={allow_overwrite}"')
+ if not allow_overwrite and not is_new:
+ log(f"Failed to save {kind}:{name} - name already exists in store")
+ debug.show('f"Failed to save {kind}:{name} - name already exists in store"')
+ # raise Exception("Not ignoring overwrite attempts while testing")
+ else:
+ try:
+ new_name = self.save(name, file)
+ now = timezone.now()
+ record, created = StoredObject.objects.get_or_create(
+ store=kind,
+ name=name,
+ defaults=dict(
+ sha384=self.in_flight_custom_metadata[name]["sha384"],
+ len=int(self.in_flight_custom_metadata[name]["len"]),
+ store_created=now,
+ created=now,
+ modified=now,
+ doc_name=doc_name, # Note that these are assumed to be invariant
+ doc_rev=doc_rev, # for a given name
+ ),
+ )
+ if not created:
+ record.sha384 = self.in_flight_custom_metadata[name]["sha384"]
+ record.len = int(self.in_flight_custom_metadata[name]["len"])
+ record.modified = now
+ record.deleted = None
+ record.save()
+ if new_name != name:
+ complaint = f"Error encountered saving '{name}' - results stored in '{new_name}' instead."
+ log(complaint)
+ debug.show("complaint")
+ # Note that we are otherwise ignoring this condition - it should become an error later.
+ except Exception as e:
+ # Log and then swallow the exception while we're learning.
+ # Don't let failure pass so quietly when these are the autoritative bits.
+ complaint = f"Failed to save {kind}:{name}"
+ log(complaint, e)
+ debug.show('f"{complaint}: {e}"')
+ finally:
+ del self.in_flight_custom_metadata[name]
+ return None
+
+ def exists_in_storage(self, kind: str, name: str) -> bool:
+ try:
+ # open is realized with a HEAD
+ # See https://github.com/jschneier/django-storages/blob/b79ea310201e7afd659fe47e2882fe59aae5b517/storages/backends/s3.py#L528
+ with self.open(name):
+ return True
+ except FileNotFoundError:
+ return False
+
+ def remove_from_storage(
+ self, kind: str, name: str, warn_if_missing: bool = True
+ ) -> None:
+ now = timezone.now()
+ try:
+ with self.open(name):
+ pass
+ self.delete(name)
+ # debug.show('f"deleted {name} from {kind} storage"')
+ except FileNotFoundError:
+ if warn_if_missing:
+ complaint = (
+ f"WARNING: Asked to delete non-existent {name} from {kind} storage"
+ )
+ log(complaint)
+ debug.show("complaint")
+ existing_record = StoredObject.objects.filter(store=kind, name=name)
+ if not existing_record.exists() and warn_if_missing:
+ complaint = f"WARNING: Asked to delete {name} from {kind} storage, but there was no matching StorageObject"
+ log(complaint)
+ debug.show("complaint")
+ else:
+ # Note that existing_record is a queryset that will have one matching object
+ existing_record.filter(deleted__isnull=True).update(deleted=now)
+
+ def _get_write_parameters(self, name, content=None):
+ # debug.show('f"getting write parameters for {name}"')
+ params = super()._get_write_parameters(name, content)
+ if "Metadata" not in params:
+ params["Metadata"] = {}
+ try:
+ content.seek(0)
+ except AttributeError: # TODO-BLOBSTORE
+ debug.say("Encountered Non-Seekable content")
+ raise NotImplementedError("cannot handle unseekable content")
+ content_bytes = content.read()
+ if not isinstance(
+ content_bytes, bytes
+ ): # TODO-BLOBSTORE: This is sketch-development only -remove before committing
+ raise Exception(f"Expected bytes - got {type(content_bytes)}")
+ content.seek(0)
+ metadata = {
+ "len": f"{len(content_bytes)}",
+ "sha384": f"{sha384(content_bytes).hexdigest()}",
+ }
+ params["Metadata"].update(metadata)
+ self.in_flight_custom_metadata[name] = metadata
+ return params
diff --git a/ietf/doc/storage_utils.py b/ietf/doc/storage_utils.py
new file mode 100644
index 0000000000..4f0516339a
--- /dev/null
+++ b/ietf/doc/storage_utils.py
@@ -0,0 +1,103 @@
+# Copyright The IETF Trust 2025, All Rights Reserved
+
+from io import BufferedReader
+from typing import Optional, Union
+import debug # pyflakes ignore
+
+from django.conf import settings
+from django.core.files.base import ContentFile, File
+from django.core.files.storage import storages
+
+
+# TODO-BLOBSTORE (Future, maybe after leaving 3.9) : add a return type
+def _get_storage(kind: str):
+
+ if kind in settings.MORE_STORAGE_NAMES:
+ # TODO-BLOBSTORE - add a checker that verifies configuration will only return CustomS3Storages
+ return storages[kind]
+ else:
+ debug.say(f"Got into not-implemented looking for {kind}")
+ raise NotImplementedError(f"Don't know how to store {kind}")
+
+
+def exists_in_storage(kind: str, name: str) -> bool:
+ if settings.ENABLE_BLOBSTORAGE:
+ store = _get_storage(kind)
+ return store.exists_in_storage(kind, name)
+ else:
+ return False
+
+
+def remove_from_storage(kind: str, name: str, warn_if_missing: bool = True) -> None:
+ if settings.ENABLE_BLOBSTORAGE:
+ store = _get_storage(kind)
+ store.remove_from_storage(kind, name, warn_if_missing)
+ return None
+
+
+# TODO-BLOBSTORE: Try to refactor `kind` out of the signature of the methods already on the custom store (which knows its kind)
+def store_file(
+ kind: str,
+ name: str,
+ file: Union[File, BufferedReader],
+ allow_overwrite: bool = False,
+ doc_name: Optional[str] = None,
+ doc_rev: Optional[str] = None,
+) -> None:
+ # debug.show('f"asked to store {name} into {kind}"')
+ if settings.ENABLE_BLOBSTORAGE:
+ store = _get_storage(kind)
+ store.store_file(kind, name, file, allow_overwrite, doc_name, doc_rev)
+ return None
+
+
+def store_bytes(
+ kind: str,
+ name: str,
+ content: bytes,
+ allow_overwrite: bool = False,
+ doc_name: Optional[str] = None,
+ doc_rev: Optional[str] = None,
+) -> None:
+ if settings.ENABLE_BLOBSTORAGE:
+ store_file(kind, name, ContentFile(content), allow_overwrite)
+ return None
+
+
+def store_str(
+ kind: str,
+ name: str,
+ content: str,
+ allow_overwrite: bool = False,
+ doc_name: Optional[str] = None,
+ doc_rev: Optional[str] = None,
+) -> None:
+ if settings.ENABLE_BLOBSTORAGE:
+ content_bytes = content.encode("utf-8")
+ store_bytes(kind, name, content_bytes, allow_overwrite)
+ return None
+
+
+def retrieve_bytes(kind: str, name: str) -> bytes:
+ from ietf.doc.storage_backends import maybe_log_timing
+ content = b""
+ if settings.ENABLE_BLOBSTORAGE:
+ store = _get_storage(kind)
+ with store.open(name) as f:
+ with maybe_log_timing(
+ hasattr(store, "ietf_log_blob_timing") and store.ietf_log_blob_timing,
+ "read",
+ bucket_name=store.bucket_name if hasattr(store, "bucket_name") else "",
+ name=name,
+ ):
+ content = f.read()
+ return content
+
+
+def retrieve_str(kind: str, name: str) -> str:
+ content = ""
+ if settings.ENABLE_BLOBSTORAGE:
+ content_bytes = retrieve_bytes(kind, name)
+ # TODO-BLOBSTORE: try to decode all the different ways doc.text() does
+ content = content_bytes.decode("utf-8")
+ return content
diff --git a/ietf/doc/tasks.py b/ietf/doc/tasks.py
index 6eb901e6c7..e24c58e1e7 100644
--- a/ietf/doc/tasks.py
+++ b/ietf/doc/tasks.py
@@ -84,7 +84,7 @@ def generate_idnits2_rfc_status_task():
outpath = Path(settings.DERIVED_DIR) / "idnits2-rfc-status"
blob = generate_idnits2_rfc_status()
try:
- outpath.write_text(blob, encoding="utf8")
+ outpath.write_text(blob, encoding="utf8") # TODO-BLOBSTORE
except Exception as e:
log.log(f"failed to write idnits2-rfc-status: {e}")
@@ -94,7 +94,7 @@ def generate_idnits2_rfcs_obsoleted_task():
outpath = Path(settings.DERIVED_DIR) / "idnits2-rfcs-obsoleted"
blob = generate_idnits2_rfcs_obsoleted()
try:
- outpath.write_text(blob, encoding="utf8")
+ outpath.write_text(blob, encoding="utf8") # TODO-BLOBSTORE
except Exception as e:
log.log(f"failed to write idnits2-rfcs-obsoleted: {e}")
diff --git a/ietf/doc/tests_bofreq.py b/ietf/doc/tests_bofreq.py
index 2e27efd627..6a7c9393ef 100644
--- a/ietf/doc/tests_bofreq.py
+++ b/ietf/doc/tests_bofreq.py
@@ -16,6 +16,7 @@
from django.template.loader import render_to_string
from django.utils import timezone
+from ietf.doc.storage_utils import retrieve_str
from ietf.group.factories import RoleFactory
from ietf.doc.factories import BofreqFactory, NewRevisionDocEventFactory
from ietf.doc.models import State, Document, NewRevisionDocEvent
@@ -340,6 +341,7 @@ def test_submit(self):
doc = reload_db_objects(doc)
self.assertEqual('%02d'%(int(rev)+1) ,doc.rev)
self.assertEqual(f'# {username}', doc.text())
+ self.assertEqual(f'# {username}', retrieve_str('bofreq',doc.get_base_name()))
self.assertEqual(docevent_count+1, doc.docevent_set.count())
self.assertEqual(1, len(outbox))
rev = doc.rev
@@ -379,6 +381,7 @@ def test_start_new_bofreq(self):
self.assertEqual(list(bofreq_editors(bofreq)), [nobody])
self.assertEqual(bofreq.latest_event(NewRevisionDocEvent).rev, '00')
self.assertEqual(bofreq.text_or_error(), 'some stuff')
+ self.assertEqual(retrieve_str('bofreq',bofreq.get_base_name()), 'some stuff')
self.assertEqual(len(outbox),1)
finally:
os.unlink(file.name)
diff --git a/ietf/doc/tests_charter.py b/ietf/doc/tests_charter.py
index e0207fe842..62e49559e2 100644
--- a/ietf/doc/tests_charter.py
+++ b/ietf/doc/tests_charter.py
@@ -16,6 +16,7 @@
from ietf.doc.factories import CharterFactory, NewRevisionDocEventFactory, TelechatDocEventFactory
from ietf.doc.models import ( Document, State, BallotDocEvent, BallotType, NewRevisionDocEvent,
TelechatDocEvent, WriteupDocEvent )
+from ietf.doc.storage_utils import retrieve_str
from ietf.doc.utils_charter import ( next_revision, default_review_text, default_action_text,
charter_name_for_group )
from ietf.doc.utils import close_open_ballots
@@ -519,6 +520,11 @@ def test_submit_charter(self):
ftp_charter_path = Path(settings.FTP_DIR) / "charter" / charter_path.name
self.assertTrue(ftp_charter_path.exists())
self.assertTrue(charter_path.samefile(ftp_charter_path))
+ blobstore_contents = retrieve_str("charter", charter.get_base_name())
+ self.assertEqual(
+ blobstore_contents,
+ "Windows line\nMac line\nUnix line\n" + utf_8_snippet.decode("utf-8"),
+ )
def test_submit_initial_charter(self):
diff --git a/ietf/doc/tests_conflict_review.py b/ietf/doc/tests_conflict_review.py
index d2f94922b2..791db17f5a 100644
--- a/ietf/doc/tests_conflict_review.py
+++ b/ietf/doc/tests_conflict_review.py
@@ -16,6 +16,7 @@
from ietf.doc.factories import IndividualDraftFactory, ConflictReviewFactory, RgDraftFactory
from ietf.doc.models import Document, DocEvent, NewRevisionDocEvent, BallotPositionDocEvent, TelechatDocEvent, State, DocTagName
+from ietf.doc.storage_utils import retrieve_str
from ietf.doc.utils import create_ballot_if_not_open
from ietf.doc.views_conflict_review import default_approval_text
from ietf.group.models import Person
@@ -422,6 +423,7 @@ def test_initial_submission(self):
f.close()
self.assertTrue(ftp_path.exists())
self.assertTrue( "submission-00" in doc.latest_event(NewRevisionDocEvent).desc)
+ self.assertEqual(retrieve_str("conflrev",basename), "Some initial review text\n")
def test_subsequent_submission(self):
doc = Document.objects.get(name='conflict-review-imaginary-irtf-submission')
diff --git a/ietf/doc/tests_draft.py b/ietf/doc/tests_draft.py
index 2405806682..4753c4ff0c 100644
--- a/ietf/doc/tests_draft.py
+++ b/ietf/doc/tests_draft.py
@@ -24,6 +24,7 @@
from ietf.doc.models import ( Document, DocReminder, DocEvent,
ConsensusDocEvent, LastCallDocEvent, RelatedDocument, State, TelechatDocEvent,
WriteupDocEvent, DocRelationshipName, IanaExpertDocEvent )
+from ietf.doc.storage_utils import exists_in_storage, store_str
from ietf.doc.utils import get_tags_for_stream_id, create_ballot_if_not_open
from ietf.doc.views_draft import AdoptDraftForm
from ietf.name.models import DocTagName, RoleName
@@ -577,6 +578,11 @@ def setUp(self):
def write_draft_file(self, name, size):
with (Path(settings.INTERNET_DRAFT_PATH) / name).open('w') as f:
f.write("a" * size)
+ _, ext = os.path.splitext(name)
+ if ext:
+ ext=ext[1:]
+ store_str("active-draft", f"{ext}/{name}", "a"*size, allow_overwrite=True)
+ store_str("draft", f"{ext}/{name}", "a"*size, allow_overwrite=True)
class ResurrectTests(DraftFileMixin, TestCase):
@@ -649,6 +655,7 @@ def test_resurrect(self):
# ensure file restored from archive directory
self.assertTrue(os.path.exists(os.path.join(settings.INTERNET_DRAFT_PATH, txt)))
self.assertTrue(not os.path.exists(os.path.join(settings.INTERNET_DRAFT_ARCHIVE_DIR, txt)))
+ self.assertTrue(exists_in_storage("active-draft",f"txt/{txt}"))
class ExpireIDsTests(DraftFileMixin, TestCase):
@@ -775,6 +782,7 @@ def test_expire_drafts(self):
self.assertEqual(draft.action_holders.count(), 0)
self.assertIn('Removed all action holders', draft.latest_event(type='changed_action_holders').desc)
self.assertTrue(not os.path.exists(os.path.join(settings.INTERNET_DRAFT_PATH, txt)))
+ self.assertFalse(exists_in_storage("active-draft", f"txt/{txt}"))
self.assertTrue(os.path.exists(os.path.join(settings.INTERNET_DRAFT_ARCHIVE_DIR, txt)))
draft.delete()
@@ -798,6 +806,7 @@ def test_clean_up_draft_files(self):
clean_up_draft_files()
self.assertTrue(not os.path.exists(os.path.join(settings.INTERNET_DRAFT_PATH, unknown)))
+ self.assertFalse(exists_in_storage("active-draft", f"txt/{unknown}"))
self.assertTrue(os.path.exists(os.path.join(settings.INTERNET_DRAFT_ARCHIVE_DIR, "unknown_ids", unknown)))
@@ -808,6 +817,7 @@ def test_clean_up_draft_files(self):
clean_up_draft_files()
self.assertTrue(not os.path.exists(os.path.join(settings.INTERNET_DRAFT_PATH, malformed)))
+ self.assertFalse(exists_in_storage("active-draft", f"txt/{malformed}"))
self.assertTrue(os.path.exists(os.path.join(settings.INTERNET_DRAFT_ARCHIVE_DIR, "unknown_ids", malformed)))
@@ -822,9 +832,11 @@ def test_clean_up_draft_files(self):
clean_up_draft_files()
self.assertTrue(not os.path.exists(os.path.join(settings.INTERNET_DRAFT_PATH, txt)))
+ self.assertFalse(exists_in_storage("active-draft", f"txt/{txt}"))
self.assertTrue(os.path.exists(os.path.join(settings.INTERNET_DRAFT_ARCHIVE_DIR, txt)))
self.assertTrue(not os.path.exists(os.path.join(settings.INTERNET_DRAFT_PATH, pdf)))
+ self.assertFalse(exists_in_storage("active-draft", f"pdf/{pdf}"))
self.assertTrue(os.path.exists(os.path.join(settings.INTERNET_DRAFT_ARCHIVE_DIR, pdf)))
# expire draft
@@ -843,6 +855,7 @@ def test_clean_up_draft_files(self):
clean_up_draft_files()
self.assertTrue(not os.path.exists(os.path.join(settings.INTERNET_DRAFT_PATH, txt)))
+ self.assertFalse(exists_in_storage("active-draft", f"txt/{txt}"))
self.assertTrue(os.path.exists(os.path.join(settings.INTERNET_DRAFT_ARCHIVE_DIR, txt)))
diff --git a/ietf/doc/tests_material.py b/ietf/doc/tests_material.py
index aaea8fec3d..c87341c95b 100644
--- a/ietf/doc/tests_material.py
+++ b/ietf/doc/tests_material.py
@@ -18,6 +18,7 @@
from django.utils import timezone
from ietf.doc.models import Document, State, NewRevisionDocEvent
+from ietf.doc.storage_utils import retrieve_str
from ietf.group.factories import RoleFactory
from ietf.group.models import Group
from ietf.meeting.factories import MeetingFactory, SessionFactory, SessionPresentationFactory
@@ -123,6 +124,9 @@ def test_upload_slides(self):
ftp_filepath=Path(settings.FTP_DIR) / "slides" / basename
with ftp_filepath.open() as f:
self.assertEqual(f.read(), content)
+ # This test is very sloppy wrt the actual file content.
+ # Working with/around that for the moment.
+ self.assertEqual(retrieve_str("slides", basename), content)
# check that posting same name is prevented
test_file.seek(0)
@@ -237,4 +241,6 @@ def test_revise(self, mock_slides_manager_cls):
with io.open(os.path.join(doc.get_file_path(), doc.name + "-" + doc.rev + ".txt")) as f:
self.assertEqual(f.read(), content)
+ self.assertEqual(retrieve_str("slides", f"{doc.name}-{doc.rev}.txt"), content)
+
diff --git a/ietf/doc/tests_review.py b/ietf/doc/tests_review.py
index a956fd3287..e93bc02181 100644
--- a/ietf/doc/tests_review.py
+++ b/ietf/doc/tests_review.py
@@ -20,6 +20,7 @@
import debug # pyflakes:ignore
+from ietf.doc.storage_utils import retrieve_str
import ietf.review.mailarch
from ietf.doc.factories import ( NewRevisionDocEventFactory, IndividualDraftFactory, WgDraftFactory,
@@ -63,6 +64,10 @@ def verify_review_files_were_written(self, assignment, expected_content = "This
review_file = Path(self.review_subdir) / f"{assignment.review.name}.txt"
content = review_file.read_text()
self.assertEqual(content, expected_content)
+ self.assertEqual(
+ retrieve_str("review", review_file.name),
+ expected_content
+ )
review_ftp_file = Path(settings.FTP_DIR) / "review" / review_file.name
self.assertTrue(review_file.samefile(review_ftp_file))
diff --git a/ietf/doc/tests_statement.py b/ietf/doc/tests_statement.py
index 2071018b10..fea42b97d6 100644
--- a/ietf/doc/tests_statement.py
+++ b/ietf/doc/tests_statement.py
@@ -14,6 +14,7 @@
from ietf.doc.factories import StatementFactory, DocEventFactory
from ietf.doc.models import Document, State, NewRevisionDocEvent
+from ietf.doc.storage_utils import retrieve_str
from ietf.group.models import Group
from ietf.person.factories import PersonFactory
from ietf.utils.mail import outbox, empty_outbox
@@ -185,8 +186,16 @@ def test_submit(self):
self.assertEqual("%02d" % (int(rev) + 1), doc.rev)
if postdict["statement_submission"] == "enter":
self.assertEqual(f"# {username}", doc.text())
+ self.assertEqual(
+ retrieve_str("statement", f"{doc.name}-{doc.rev}.md"),
+ f"# {username}"
+ )
else:
self.assertEqual("not valid pdf", doc.text())
+ self.assertEqual(
+ retrieve_str("statement", f"{doc.name}-{doc.rev}.pdf"),
+ "not valid pdf"
+ )
self.assertEqual(docevent_count + 1, doc.docevent_set.count())
self.assertEqual(0, len(outbox))
rev = doc.rev
@@ -255,8 +264,16 @@ def test_start_new_statement(self):
self.assertIsNotNone(statement.history_set.last().latest_event(type="published_statement"))
if postdict["statement_submission"] == "enter":
self.assertEqual(statement.text_or_error(), "some stuff")
+ self.assertEqual(
+ retrieve_str("statement", statement.uploaded_filename),
+ "some stuff"
+ )
else:
self.assertTrue(statement.uploaded_filename.endswith("pdf"))
+ self.assertEqual(
+ retrieve_str("statement", f"{statement.name}-{statement.rev}.pdf"),
+ "not valid pdf"
+ )
self.assertEqual(len(outbox), 0)
existing_statement = StatementFactory()
diff --git a/ietf/doc/tests_status_change.py b/ietf/doc/tests_status_change.py
index bd4da4c092..cbdc1a049a 100644
--- a/ietf/doc/tests_status_change.py
+++ b/ietf/doc/tests_status_change.py
@@ -19,6 +19,7 @@
WgRfcFactory, DocEventFactory, WgDraftFactory )
from ietf.doc.models import ( Document, State, DocEvent,
BallotPositionDocEvent, NewRevisionDocEvent, TelechatDocEvent, WriteupDocEvent )
+from ietf.doc.storage_utils import retrieve_str
from ietf.doc.utils import create_ballot_if_not_open
from ietf.doc.views_status_change import default_approval_text
from ietf.group.models import Person
@@ -71,7 +72,7 @@ def test_start_review(self):
statchg_relation_row_blah="tois")
)
self.assertEqual(r.status_code, 302)
- status_change = Document.objects.get(name='status-change-imaginary-new')
+ status_change = Document.objects.get(name='status-change-imaginary-new')
self.assertEqual(status_change.get_state('statchg').slug,'adrev')
self.assertEqual(status_change.rev,'00')
self.assertEqual(status_change.ad.name,'Areað Irector')
@@ -563,6 +564,8 @@ def test_initial_submission(self):
ftp_filepath = Path(settings.FTP_DIR) / "status-changes" / basename
self.assertFalse(filepath.exists())
self.assertFalse(ftp_filepath.exists())
+ with self.assertRaises(FileNotFoundError):
+ retrieve_str("statchg",basename)
r = self.client.post(url,dict(content="Some initial review text\n",submit_response="1"))
self.assertEqual(r.status_code,302)
doc = Document.objects.get(name='status-change-imaginary-mid-review')
@@ -571,6 +574,10 @@ def test_initial_submission(self):
self.assertEqual(f.read(),"Some initial review text\n")
with ftp_filepath.open() as f:
self.assertEqual(f.read(),"Some initial review text\n")
+ self.assertEqual(
+ retrieve_str("statchg", basename),
+ "Some initial review text\n"
+ )
self.assertTrue( "mid-review-00" in doc.latest_event(NewRevisionDocEvent).desc)
def test_subsequent_submission(self):
@@ -607,7 +614,8 @@ def test_subsequent_submission(self):
self.assertContains(r, "does not appear to be a text file")
# sane post uploading a file
- test_file = StringIO("This is a new proposal.")
+ test_content = "This is a new proposal."
+ test_file = StringIO(test_content)
test_file.name = "unnamed"
r = self.client.post(url,dict(txt=test_file,submit_response="1"))
self.assertEqual(r.status_code, 302)
@@ -615,8 +623,12 @@ def test_subsequent_submission(self):
self.assertEqual(doc.rev,'01')
path = os.path.join(settings.STATUS_CHANGE_PATH, '%s-%s.txt' % (doc.name, doc.rev))
with io.open(path) as f:
- self.assertEqual(f.read(),"This is a new proposal.")
+ self.assertEqual(f.read(), test_content)
f.close()
+ self.assertEqual(
+ retrieve_str("statchg", f"{doc.name}-{doc.rev}.txt"),
+ test_content
+ )
self.assertTrue( "mid-review-01" in doc.latest_event(NewRevisionDocEvent).desc)
# verify reset text button works
diff --git a/ietf/doc/utils.py b/ietf/doc/utils.py
index 10fe9ff2d7..3ddd904c75 100644
--- a/ietf/doc/utils.py
+++ b/ietf/doc/utils.py
@@ -1510,7 +1510,7 @@ def update_or_create_draft_bibxml_file(doc, rev):
existing_bibxml = ""
if normalized_bibxml.strip() != existing_bibxml.strip():
log.log(f"Writing {ref_rev_file_path}")
- ref_rev_file_path.write_text(normalized_bibxml, encoding="utf8")
+ ref_rev_file_path.write_text(normalized_bibxml, encoding="utf8") # TODO-BLOBSTORE
def ensure_draft_bibxml_path_exists():
diff --git a/ietf/doc/views_bofreq.py b/ietf/doc/views_bofreq.py
index 3bd10287b2..71cbe30491 100644
--- a/ietf/doc/views_bofreq.py
+++ b/ietf/doc/views_bofreq.py
@@ -101,6 +101,7 @@ def submit(request, name):
content = form.cleaned_data['bofreq_content']
with io.open(bofreq.get_file_name(), 'w', encoding='utf-8') as destination:
destination.write(content)
+ bofreq.store_str(bofreq.get_base_name(), content)
email_bofreq_new_revision(request, bofreq)
return redirect('ietf.doc.views_doc.document_main', name=bofreq.name)
@@ -175,6 +176,7 @@ def new_bof_request(request):
content = form.cleaned_data['bofreq_content']
with io.open(bofreq.get_file_name(), 'w', encoding='utf-8') as destination:
destination.write(content)
+ bofreq.store_str(bofreq.get_base_name(), content)
email_bofreq_new_revision(request, bofreq)
return redirect('ietf.doc.views_doc.document_main', name=bofreq.name)
diff --git a/ietf/doc/views_charter.py b/ietf/doc/views_charter.py
index f8748d2126..e899f59227 100644
--- a/ietf/doc/views_charter.py
+++ b/ietf/doc/views_charter.py
@@ -441,9 +441,10 @@ def submit(request, name, option=None):
) # update rev
with charter_filename.open("w", encoding="utf-8") as destination:
if form.cleaned_data["txt"]:
- destination.write(form.cleaned_data["txt"])
+ content=form.cleaned_data["txt"]
else:
- destination.write(form.cleaned_data["content"])
+ content=form.cleaned_data["content"]
+ destination.write(content)
# Also provide a copy to the legacy ftp source directory, which is served by rsync
# This replaces the hardlink copy that ghostlink has made in the past
# Still using a hardlink as long as these are on the same filesystem.
@@ -454,7 +455,8 @@ def submit(request, name, option=None):
log(
"There was an error creating a hardlink at %s pointing to %s"
% (ftp_filename, charter_filename)
- )
+ )
+ charter.store_str(charter_filename.name, content)
if option in ["initcharter", "recharter"] and charter.ad == None:
diff --git a/ietf/doc/views_conflict_review.py b/ietf/doc/views_conflict_review.py
index e55661ccdf..159f1340a4 100644
--- a/ietf/doc/views_conflict_review.py
+++ b/ietf/doc/views_conflict_review.py
@@ -186,9 +186,10 @@ def save(self, review):
filepath = Path(settings.CONFLICT_REVIEW_PATH) / basename
with filepath.open('w', encoding='utf-8') as destination:
if self.cleaned_data['txt']:
- destination.write(self.cleaned_data['txt'])
+ content = self.cleaned_data['txt']
else:
- destination.write(self.cleaned_data['content'])
+ content = self.cleaned_data['content']
+ destination.write(content)
ftp_filepath = Path(settings.FTP_DIR) / "conflict-reviews" / basename
try:
os.link(filepath, ftp_filepath) # Path.hardlink_to is not available until 3.10
@@ -197,6 +198,7 @@ def save(self, review):
"There was an error creating a hardlink at %s pointing to %s: %s"
% (ftp_filepath, filepath, e)
)
+ review.store_str(basename, content)
#This is very close to submit on charter - can we get better reuse?
@role_required('Area Director','Secretariat')
diff --git a/ietf/doc/views_draft.py b/ietf/doc/views_draft.py
index 34104b2005..c80537afb3 100644
--- a/ietf/doc/views_draft.py
+++ b/ietf/doc/views_draft.py
@@ -32,6 +32,7 @@
generate_publication_request, email_adopted, email_intended_status_changed,
email_iesg_processing_document, email_ad_approved_doc,
email_iana_expert_review_state_changed )
+from ietf.doc.storage_utils import retrieve_bytes, store_bytes
from ietf.doc.utils import ( add_state_change_event, can_adopt_draft, can_unadopt_draft,
get_tags_for_stream_id, nice_consensus, update_action_holders,
update_reminder, update_telechat, make_notify_changed_event, get_initial_notify,
@@ -897,6 +898,11 @@ def restore_draft_file(request, draft):
except shutil.Error as ex:
messages.warning(request, 'There was an error restoring the Internet-Draft file: {} ({})'.format(file, ex))
log.log(" Exception %s when attempting to move %s" % (ex, file))
+ _, ext = os.path.splitext(os.path.basename(file))
+ if ext:
+ ext = ext[1:]
+ blobname = f"{ext}/{basename}.{ext}"
+ store_bytes("active-draft", blobname, retrieve_bytes("draft", blobname))
class ShepherdWriteupUploadForm(forms.Form):
diff --git a/ietf/doc/views_material.py b/ietf/doc/views_material.py
index 361bf5f1e2..6f8b8a8f12 100644
--- a/ietf/doc/views_material.py
+++ b/ietf/doc/views_material.py
@@ -167,6 +167,8 @@ def edit_material(request, name=None, acronym=None, action=None, doc_type=None):
with filepath.open('wb+') as dest:
for chunk in f.chunks():
dest.write(chunk)
+ f.seek(0)
+ doc.store_file(basename, f)
if not doc.meeting_related():
log.assertion('doc.type_id == "slides"')
ftp_filepath = Path(settings.FTP_DIR) / doc.type_id / basename
diff --git a/ietf/doc/views_review.py b/ietf/doc/views_review.py
index bb9e56742d..1f23c435fa 100644
--- a/ietf/doc/views_review.py
+++ b/ietf/doc/views_review.py
@@ -805,6 +805,7 @@ def complete_review(request, name, assignment_id=None, acronym=None):
review_path = Path(review.get_file_path()) / f"{review.name}.txt"
review_path.write_text(content)
+ review.store_str(f"{review.name}.txt", content, allow_overwrite=True) # We have a bug that review revisions dont create a new version!
review_ftp_path = Path(settings.FTP_DIR) / "review" / review_path.name
# See https://github.com/ietf-tools/datatracker/issues/6941 - when that's
# addressed, making this link should not be conditional
diff --git a/ietf/doc/views_statement.py b/ietf/doc/views_statement.py
index bf9f47ddfe..9dc8c8ad69 100644
--- a/ietf/doc/views_statement.py
+++ b/ietf/doc/views_statement.py
@@ -137,12 +137,15 @@ def submit(request, name):
mode="wb" if writing_pdf else "w"
) as destination:
if writing_pdf:
- for chunk in form.cleaned_data["statement_file"].chunks():
+ f = form.cleaned_data["statement_file"]
+ for chunk in f.chunks():
destination.write(chunk)
+ f.seek(0)
+ statement.store_file(statement.uploaded_filename, f)
else:
destination.write(markdown_content)
+ statement.store_str(statement.uploaded_filename, markdown_content)
return redirect("ietf.doc.views_doc.document_main", name=statement.name)
-
else:
if statement.uploaded_filename.endswith("pdf"):
text = CONST_PDF_REV_NOTICE
@@ -254,10 +257,14 @@ def new_statement(request):
mode="wb" if writing_pdf else "w"
) as destination:
if writing_pdf:
- for chunk in form.cleaned_data["statement_file"].chunks():
+ f = form.cleaned_data["statement_file"]
+ for chunk in f.chunks():
destination.write(chunk)
+ f.seek(0)
+ statement.store_file(statement.uploaded_filename, f)
else:
destination.write(markdown_content)
+ statement.store_str(statement.uploaded_filename, markdown_content)
return redirect("ietf.doc.views_doc.document_main", name=statement.name)
else:
diff --git a/ietf/doc/views_status_change.py b/ietf/doc/views_status_change.py
index 33b822348a..2bccc213c4 100644
--- a/ietf/doc/views_status_change.py
+++ b/ietf/doc/views_status_change.py
@@ -160,9 +160,11 @@ def save(self, doc):
filename = Path(settings.STATUS_CHANGE_PATH) / basename
with io.open(filename, 'w', encoding='utf-8') as destination:
if self.cleaned_data['txt']:
- destination.write(self.cleaned_data['txt'])
+ content = self.cleaned_data['txt']
else:
- destination.write(self.cleaned_data['content'])
+ content = self.cleaned_data['content']
+ destination.write(content)
+ doc.store_str(basename, content)
try:
ftp_filename = Path(settings.FTP_DIR) / "status-changes" / basename
os.link(filename, ftp_filename) # Path.hardlink is not available until 3.10
diff --git a/ietf/group/tasks.py b/ietf/group/tasks.py
index 8b4c994ba1..693aafb385 100644
--- a/ietf/group/tasks.py
+++ b/ietf/group/tasks.py
@@ -10,6 +10,7 @@
from django.conf import settings
from django.template.loader import render_to_string
+from ietf.doc.storage_utils import store_file
from ietf.utils import log
from .models import Group
@@ -43,6 +44,11 @@ def generate_wg_charters_files_task():
encoding="utf8",
)
+ with charters_file.open("rb") as f:
+ store_file("indexes", "1wg-charters.txt", f, allow_overwrite=True)
+ with charters_by_acronym_file.open("rb") as f:
+ store_file("indexes", "1wg-charters-by-acronym.txt", f, allow_overwrite=True)
+
charter_copy_dests = [
getattr(settings, "CHARTER_COPY_PATH", None),
getattr(settings, "CHARTER_COPY_OTHER_PATH", None),
@@ -102,3 +108,8 @@ def generate_wg_summary_files_task():
),
encoding="utf8",
)
+
+ with summary_file.open("rb") as f:
+ store_file("indexes", "1wg-summary.txt", f, allow_overwrite=True)
+ with summary_by_acronym_file.open("rb") as f:
+ store_file("indexes", "1wg-summary-by-acronym.txt", f, allow_overwrite=True)
diff --git a/ietf/group/tests_info.py b/ietf/group/tests_info.py
index 32d919c779..aaf937ee43 100644
--- a/ietf/group/tests_info.py
+++ b/ietf/group/tests_info.py
@@ -29,6 +29,7 @@
from ietf.community.utils import reset_name_contains_index_for_rule
from ietf.doc.factories import WgDraftFactory, IndividualDraftFactory, CharterFactory, BallotDocEventFactory
from ietf.doc.models import Document, DocEvent, State
+from ietf.doc.storage_utils import retrieve_str
from ietf.doc.utils_charter import charter_name_for_group
from ietf.group.admin import GroupForm as AdminGroupForm
from ietf.group.factories import (GroupFactory, RoleFactory, GroupEventFactory,
@@ -303,20 +304,26 @@ def test_generate_wg_summary_files_task(self):
generate_wg_summary_files_task()
- summary_by_area_contents = (
- Path(settings.GROUP_SUMMARY_PATH) / "1wg-summary.txt"
- ).read_text(encoding="utf8")
- self.assertIn(group.parent.name, summary_by_area_contents)
- self.assertIn(group.acronym, summary_by_area_contents)
- self.assertIn(group.name, summary_by_area_contents)
- self.assertIn(chair.address, summary_by_area_contents)
-
- summary_by_acronym_contents = (
- Path(settings.GROUP_SUMMARY_PATH) / "1wg-summary-by-acronym.txt"
- ).read_text(encoding="utf8")
- self.assertIn(group.acronym, summary_by_acronym_contents)
- self.assertIn(group.name, summary_by_acronym_contents)
- self.assertIn(chair.address, summary_by_acronym_contents)
+ for summary_by_area_contents in [
+ (
+ Path(settings.GROUP_SUMMARY_PATH) / "1wg-summary.txt"
+ ).read_text(encoding="utf8"),
+ retrieve_str("indexes", "1wg-summary.txt")
+ ]:
+ self.assertIn(group.parent.name, summary_by_area_contents)
+ self.assertIn(group.acronym, summary_by_area_contents)
+ self.assertIn(group.name, summary_by_area_contents)
+ self.assertIn(chair.address, summary_by_area_contents)
+
+ for summary_by_acronym_contents in [
+ (
+ Path(settings.GROUP_SUMMARY_PATH) / "1wg-summary-by-acronym.txt"
+ ).read_text(encoding="utf8"),
+ retrieve_str("indexes", "1wg-summary-by-acronym.txt")
+ ]:
+ self.assertIn(group.acronym, summary_by_acronym_contents)
+ self.assertIn(group.name, summary_by_acronym_contents)
+ self.assertIn(chair.address, summary_by_acronym_contents)
def test_chartering_groups(self):
group = CharterFactory(group__type_id='wg',group__parent=GroupFactory(type_id='area'),states=[('charter','intrev')]).group
diff --git a/ietf/idindex/tasks.py b/ietf/idindex/tasks.py
index 5e7e193bba..2f5f1871d7 100644
--- a/ietf/idindex/tasks.py
+++ b/ietf/idindex/tasks.py
@@ -15,6 +15,8 @@
from django.conf import settings
+from ietf.doc.storage_utils import store_file
+
from .index import all_id_txt, all_id2_txt, id_index_txt
@@ -38,6 +40,8 @@ def move_into_place(self, src_path: Path, dest_path: Path, hardlink_dirs: List[P
target = path / dest_path.name
target.unlink(missing_ok=True)
os.link(dest_path, target) # until python>=3.10
+ with dest_path.open("rb") as f:
+ store_file("indexes", dest_path.name, f, allow_overwrite=True)
def cleanup(self):
for tf_path in self.cleanup_list:
diff --git a/ietf/idindex/tests.py b/ietf/idindex/tests.py
index 44abf805f0..5cc7a7b3bb 100644
--- a/ietf/idindex/tests.py
+++ b/ietf/idindex/tests.py
@@ -15,6 +15,7 @@
from ietf.doc.factories import WgDraftFactory, RfcFactory
from ietf.doc.models import Document, RelatedDocument, State, LastCallDocEvent, NewRevisionDocEvent
+from ietf.doc.storage_utils import retrieve_str
from ietf.group.factories import GroupFactory
from ietf.name.models import DocRelationshipName
from ietf.idindex.index import all_id_txt, all_id2_txt, id_index_txt
@@ -203,5 +204,9 @@ def test_temp_file_manager(self):
self.assertFalse(path2.exists()) # left behind
# check destination contents and permissions
self.assertEqual(dest.read_text(), "yay")
+ self.assertEqual(
+ retrieve_str("indexes", "yay.txt"),
+ "yay"
+ )
self.assertEqual(dest.stat().st_mode & 0o777, 0o644)
self.assertTrue(dest.samefile(other_path / "yay.txt"))
diff --git a/ietf/liaisons/forms.py b/ietf/liaisons/forms.py
index 1d91041b25..1af29044b3 100644
--- a/ietf/liaisons/forms.py
+++ b/ietf/liaisons/forms.py
@@ -379,6 +379,8 @@ def save_attachments(self):
attach_file = io.open(os.path.join(settings.LIAISON_ATTACH_PATH, attach.name + extension), 'wb')
attach_file.write(attached_file.read())
attach_file.close()
+ attached_file.seek(0)
+ attach.store_file(attach.uploaded_filename, attached_file)
if not self.is_new:
# create modified event
diff --git a/ietf/liaisons/tests.py b/ietf/liaisons/tests.py
index a0186f6a01..1742687f14 100644
--- a/ietf/liaisons/tests.py
+++ b/ietf/liaisons/tests.py
@@ -19,6 +19,7 @@
from io import StringIO
from pyquery import PyQuery
+from ietf.doc.storage_utils import retrieve_str
from ietf.utils.test_utils import TestCase, login_testing_unauthorized
from ietf.utils.mail import outbox
@@ -414,7 +415,8 @@ def test_edit_liaison(self):
# edit
attachments_before = liaison.attachments.count()
- test_file = StringIO("hello world")
+ test_content = "hello world"
+ test_file = StringIO(test_content)
test_file.name = "unnamed"
r = self.client.post(url,
dict(from_groups=str(from_group.pk),
@@ -452,9 +454,12 @@ def test_edit_liaison(self):
self.assertEqual(attachment.title, "attachment")
with (Path(settings.LIAISON_ATTACH_PATH) / attachment.uploaded_filename).open() as f:
written_content = f.read()
+ self.assertEqual(written_content, test_content)
+ self.assertEqual(
+ retrieve_str(attachment.type_id, attachment.uploaded_filename),
+ test_content,
+ )
- test_file.seek(0)
- self.assertEqual(written_content, test_file.read())
def test_incoming_access(self):
'''Ensure only Secretariat, Liaison Managers, and Authorized Individuals
@@ -704,7 +709,8 @@ def test_add_incoming_liaison(self):
# add new
mailbox_before = len(outbox)
- test_file = StringIO("hello world")
+ test_content = "hello world"
+ test_file = StringIO(test_content)
test_file.name = "unnamed"
from_groups = [ str(g.pk) for g in Group.objects.filter(type="sdo") ]
to_group = Group.objects.get(acronym="mars")
@@ -756,6 +762,11 @@ def test_add_incoming_liaison(self):
self.assertEqual(attachment.title, "attachment")
with (Path(settings.LIAISON_ATTACH_PATH) / attachment.uploaded_filename).open() as f:
written_content = f.read()
+ self.assertEqual(written_content, test_content)
+ self.assertEqual(
+ retrieve_str(attachment.type_id, attachment.uploaded_filename),
+ test_content
+ )
test_file.seek(0)
self.assertEqual(written_content, test_file.read())
@@ -783,7 +794,8 @@ def test_add_outgoing_liaison(self):
# add new
mailbox_before = len(outbox)
- test_file = StringIO("hello world")
+ test_content = "hello world"
+ test_file = StringIO(test_content)
test_file.name = "unnamed"
from_group = Group.objects.get(acronym="mars")
to_group = Group.objects.filter(type="sdo")[0]
@@ -835,9 +847,11 @@ def test_add_outgoing_liaison(self):
self.assertEqual(attachment.title, "attachment")
with (Path(settings.LIAISON_ATTACH_PATH) / attachment.uploaded_filename).open() as f:
written_content = f.read()
-
- test_file.seek(0)
- self.assertEqual(written_content, test_file.read())
+ self.assertEqual(written_content, test_content)
+ self.assertEqual(
+ retrieve_str(attachment.type_id, attachment.uploaded_filename),
+ test_content
+ )
self.assertEqual(len(outbox), mailbox_before + 1)
self.assertTrue("Liaison Statement" in outbox[-1]["Subject"])
@@ -882,7 +896,8 @@ def test_liaison_add_attachment(self):
# get minimum edit post data
- file = StringIO('dummy file')
+ test_data = "dummy file"
+ file = StringIO(test_data)
file.name = "upload.txt"
post_data = dict(
from_groups = ','.join([ str(x.pk) for x in liaison.from_groups.all() ]),
@@ -909,6 +924,11 @@ def test_liaison_add_attachment(self):
self.assertEqual(liaison.attachments.count(),1)
event = liaison.liaisonstatementevent_set.order_by('id').last()
self.assertTrue(event.desc.startswith('Added attachment'))
+ attachment = liaison.attachments.get()
+ self.assertEqual(
+ retrieve_str(attachment.type_id, attachment.uploaded_filename),
+ test_data
+ )
def test_liaison_edit_attachment(self):
diff --git a/ietf/meeting/factories.py b/ietf/meeting/factories.py
index 69c1f0421b..eb36e9e756 100644
--- a/ietf/meeting/factories.py
+++ b/ietf/meeting/factories.py
@@ -9,6 +9,7 @@
from django.core.files.base import ContentFile
from django.db.models import Q
+from ietf.doc.storage_utils import store_str
from ietf.meeting.models import (Attended, Meeting, Session, SchedulingEvent, Schedule,
TimeSlot, SessionPresentation, FloorPlan, Room, SlideSubmission, Constraint,
MeetingHost, ProceedingsMaterial)
@@ -239,6 +240,10 @@ class Meta:
make_file = factory.PostGeneration(
lambda obj, create, extracted, **kwargs: open(obj.staged_filepath(),'a').close()
)
+
+ store_submission = factory.PostGeneration(
+ lambda obj, create, extracted, **kwargs: store_str("staging", obj.filename, "")
+ )
class ConstraintFactory(factory.django.DjangoModelFactory):
class Meta:
diff --git a/ietf/meeting/forms.py b/ietf/meeting/forms.py
index 3b66d2cd29..e1d1e90b8d 100644
--- a/ietf/meeting/forms.py
+++ b/ietf/meeting/forms.py
@@ -361,6 +361,7 @@ def save_agenda(self):
os.makedirs(directory)
with io.open(path, "w", encoding='utf-8') as file:
file.write(self.cleaned_data['agenda'])
+ doc.store_str(doc.uploaded_filename, self.cleaned_data['agenda'])
class InterimAnnounceForm(forms.ModelForm):
diff --git a/ietf/meeting/helpers.py b/ietf/meeting/helpers.py
index 7f1c85990e..39d271ae6b 100644
--- a/ietf/meeting/helpers.py
+++ b/ietf/meeting/helpers.py
@@ -649,6 +649,11 @@ def read_session_file(type, num, doc):
def read_agenda_file(num, doc):
return read_session_file('agenda', num, doc)
+# TODO-BLOBSTORE: this is _yet another_ draft derived variant created when users
+# ask for drafts from the meeting agenda page. Consider whether to refactor this
+# now to not call out to external binaries, and consider whether we need this extra
+# format at all in the draft blobstore. if so, it would probably be stored under
+# something like plainpdf/
def convert_draft_to_pdf(doc_name):
inpath = os.path.join(settings.IDSUBMIT_REPOSITORY_PATH, doc_name + ".txt")
outpath = os.path.join(settings.INTERNET_DRAFT_PDF_PATH, doc_name + ".pdf")
diff --git a/ietf/meeting/migrations/0010_alter_floorplan_image_alter_meetinghost_logo.py b/ietf/meeting/migrations/0010_alter_floorplan_image_alter_meetinghost_logo.py
new file mode 100644
index 0000000000..594a1a4048
--- /dev/null
+++ b/ietf/meeting/migrations/0010_alter_floorplan_image_alter_meetinghost_logo.py
@@ -0,0 +1,56 @@
+# Copyright The IETF Trust 2025, All Rights Reserved
+
+from django.db import migrations, models
+import ietf.meeting.models
+import ietf.utils.fields
+import ietf.utils.storage
+import ietf.utils.validators
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ("meeting", "0009_session_meetecho_recording_name"),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name="floorplan",
+ name="image",
+ field=models.ImageField(
+ blank=True,
+ default=None,
+ storage=ietf.utils.storage.BlobShadowFileSystemStorage(
+ kind="", location=None
+ ),
+ upload_to=ietf.meeting.models.floorplan_path,
+ ),
+ ),
+ migrations.AlterField(
+ model_name="meetinghost",
+ name="logo",
+ field=ietf.utils.fields.MissingOkImageField(
+ height_field="logo_height",
+ storage=ietf.utils.storage.BlobShadowFileSystemStorage(
+ kind="", location=None
+ ),
+ upload_to=ietf.meeting.models._host_upload_path,
+ validators=[
+ ietf.utils.validators.MaxImageSizeValidator(400, 400),
+ ietf.utils.validators.WrappedValidator(
+ ietf.utils.validators.validate_file_size, True
+ ),
+ ietf.utils.validators.WrappedValidator(
+ ietf.utils.validators.validate_file_extension,
+ [".png", ".jpg", ".jpeg"],
+ ),
+ ietf.utils.validators.WrappedValidator(
+ ietf.utils.validators.validate_mime_type,
+ ["image/jpeg", "image/png"],
+ True,
+ ),
+ ],
+ width_field="logo_width",
+ ),
+ ),
+ ]
diff --git a/ietf/meeting/models.py b/ietf/meeting/models.py
index 8c6fb97413..5284420731 100644
--- a/ietf/meeting/models.py
+++ b/ietf/meeting/models.py
@@ -39,7 +39,7 @@
from ietf.person.models import Person
from ietf.utils.decorators import memoize
from ietf.utils.history import find_history_replacements_active_at, find_history_active_at
-from ietf.utils.storage import NoLocationMigrationFileSystemStorage
+from ietf.utils.storage import BlobShadowFileSystemStorage
from ietf.utils.text import xslugify
from ietf.utils.timezone import datetime_from_date, date_today
from ietf.utils.models import ForeignKey
@@ -527,7 +527,12 @@ class FloorPlan(models.Model):
modified= models.DateTimeField(auto_now=True)
meeting = ForeignKey(Meeting)
order = models.SmallIntegerField()
- image = models.ImageField(storage=NoLocationMigrationFileSystemStorage(), upload_to=floorplan_path, blank=True, default=None)
+ image = models.ImageField(
+ storage=BlobShadowFileSystemStorage(kind="floorplan"),
+ upload_to=floorplan_path,
+ blank=True,
+ default=None,
+ )
#
class Meta:
ordering = ['-id',]
@@ -1431,8 +1436,12 @@ class MeetingHost(models.Model):
"""Meeting sponsor"""
meeting = ForeignKey(Meeting, related_name='meetinghosts')
name = models.CharField(max_length=255, blank=False)
+ # TODO-BLOBSTORE - capture these logos and look for other ImageField like model fields.
logo = MissingOkImageField(
- storage=NoLocationMigrationFileSystemStorage(location=settings.MEETINGHOST_LOGO_PATH),
+ storage=BlobShadowFileSystemStorage(
+ kind="meetinghostlogo",
+ location=settings.MEETINGHOST_LOGO_PATH,
+ ),
upload_to=_host_upload_path,
width_field='logo_width',
height_field='logo_height',
diff --git a/ietf/meeting/tests_views.py b/ietf/meeting/tests_views.py
index 581725dbc8..848c9b7723 100644
--- a/ietf/meeting/tests_views.py
+++ b/ietf/meeting/tests_views.py
@@ -38,6 +38,7 @@
import debug # pyflakes:ignore
from ietf.doc.models import Document, NewRevisionDocEvent
+from ietf.doc.storage_utils import exists_in_storage, remove_from_storage, retrieve_bytes, retrieve_str
from ietf.group.models import Group, Role, GroupFeatures
from ietf.group.utils import can_manage_group
from ietf.person.models import Person
@@ -55,6 +56,7 @@
from ietf.name.models import SessionStatusName, ImportantDateName, RoleName, ProceedingsMaterialTypeName
from ietf.utils.decorators import skip_coverage
from ietf.utils.mail import outbox, empty_outbox, get_payload_text
+from ietf.utils.test_runner import TestBlobstoreManager
from ietf.utils.test_utils import TestCase, login_testing_unauthorized, unicontent
from ietf.utils.timezone import date_today, time_now
@@ -112,7 +114,7 @@ def setUp(self):
# files will upload to the locations specified in settings.py.
# Note that this will affect any use of the storage class in
# meeting.models - i.e., FloorPlan.image and MeetingHost.logo
- self.patcher = patch('ietf.meeting.models.NoLocationMigrationFileSystemStorage.base_location',
+ self.patcher = patch('ietf.meeting.models.BlobShadowFileSystemStorage.base_location',
new_callable=PropertyMock)
mocked = self.patcher.start()
mocked.return_value = self.storage_dir
@@ -5228,6 +5230,7 @@ def test_interim_request_options(self):
def do_interim_request_single_virtual(self, emails_expected):
make_meeting_test_data()
+ TestBlobstoreManager().emptyTestBlobstores()
group = Group.objects.get(acronym='mars')
date = date_today() + datetime.timedelta(days=30)
time = time_now().replace(microsecond=0,second=0)
@@ -5278,6 +5281,12 @@ def do_interim_request_single_virtual(self, emails_expected):
doc = session.materials.first()
path = os.path.join(doc.get_file_path(),doc.filename_with_rev())
self.assertTrue(os.path.exists(path))
+ with Path(path).open() as f:
+ self.assertEqual(f.read(), agenda)
+ self.assertEqual(
+ retrieve_str("agenda",doc.uploaded_filename),
+ agenda
+ )
# check notices to secretariat and chairs
self.assertEqual(len(outbox), length_before + emails_expected)
return meeting
@@ -5299,6 +5308,7 @@ def test_interim_request_single_virtual_settings_approval_not_required(self):
def test_interim_request_single_in_person(self):
make_meeting_test_data()
+ TestBlobstoreManager().emptyTestBlobstores()
group = Group.objects.get(acronym='mars')
date = date_today() + datetime.timedelta(days=30)
time = time_now().replace(microsecond=0,second=0)
@@ -5345,6 +5355,10 @@ def test_interim_request_single_in_person(self):
timeslot = session.official_timeslotassignment().timeslot
self.assertEqual(timeslot.time,dt)
self.assertEqual(timeslot.duration,duration)
+ self.assertEqual(
+ retrieve_str("agenda",session.agenda().uploaded_filename),
+ agenda
+ )
def test_interim_request_multi_day(self):
make_meeting_test_data()
@@ -5412,6 +5426,11 @@ def test_interim_request_multi_day(self):
self.assertEqual(timeslot.time,dt2)
self.assertEqual(timeslot.duration,duration)
self.assertEqual(session.agenda_note,agenda_note)
+ for session in meeting.session_set.all():
+ self.assertEqual(
+ retrieve_str("agenda",session.agenda().uploaded_filename),
+ agenda
+ )
def test_interim_request_multi_day_non_consecutive(self):
make_meeting_test_data()
@@ -5474,6 +5493,7 @@ def test_interim_request_multi_day_cancel(self):
def test_interim_request_series(self):
make_meeting_test_data()
+ TestBlobstoreManager().emptyTestBlobstores()
meeting_count_before = Meeting.objects.filter(type='interim').count()
date = date_today() + datetime.timedelta(days=30)
if (date.month, date.day) == (12, 31):
@@ -5561,6 +5581,11 @@ def test_interim_request_series(self):
self.assertEqual(timeslot.time,dt2)
self.assertEqual(timeslot.duration,duration)
self.assertEqual(session.agenda_note,agenda_note)
+ for session in meeting.session_set.all():
+ self.assertEqual(
+ retrieve_str("agenda",session.agenda().uploaded_filename),
+ agenda
+ )
# test_interim_pending subsumed by test_appears_on_pending
@@ -6099,6 +6124,7 @@ def strfdelta(self, tdelta, fmt):
def test_interim_request_edit_agenda_updates_doc(self):
"""Updating the agenda through the request edit form should update the doc correctly"""
make_interim_test_data()
+ TestBlobstoreManager().emptyTestBlobstores()
meeting = add_event_info_to_session_qs(Session.objects.filter(meeting__type='interim', group__acronym='mars')).filter(current_status='sched').first().meeting
group = meeting.session_set.first().group
url = urlreverse('ietf.meeting.views.interim_request_edit', kwargs={'number': meeting.number})
@@ -6134,6 +6160,10 @@ def test_interim_request_edit_agenda_updates_doc(self):
self.assertNotEqual(agenda_doc.uploaded_filename, uploaded_filename_before, 'Uploaded filename should be updated')
with (Path(agenda_doc.get_file_path()) / agenda_doc.uploaded_filename).open() as f:
self.assertEqual(f.read(), 'modified agenda contents', 'New agenda contents should be saved')
+ self.assertEqual(
+ retrieve_str(agenda_doc.type_id, agenda_doc.uploaded_filename),
+ "modified agenda contents"
+ )
def test_interim_request_details_permissions(self):
make_interim_test_data()
@@ -6354,12 +6384,14 @@ def test_upload_bluesheets(self):
q = PyQuery(r.content)
self.assertIn('Upload', str(q("title")))
self.assertFalse(session.presentations.exists())
- test_file = StringIO('%PDF-1.4\n%âãÏÓ\nthis is some text for a test')
+ test_content = '%PDF-1.4\n%âãÏÓ\nthis is some text for a test'
+ test_file = StringIO(test_content)
test_file.name = "not_really.pdf"
r = self.client.post(url,dict(file=test_file))
self.assertEqual(r.status_code, 302)
bs_doc = session.presentations.filter(document__type_id='bluesheets').first().document
self.assertEqual(bs_doc.rev,'00')
+ self.assertEqual(retrieve_str("bluesheets", f"{bs_doc.name}-{bs_doc.rev}.pdf"), test_content)
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
q = PyQuery(r.content)
@@ -6389,12 +6421,14 @@ def test_upload_bluesheets_interim(self):
q = PyQuery(r.content)
self.assertIn('Upload', str(q("title")))
self.assertFalse(session.presentations.exists())
- test_file = StringIO('%PDF-1.4\n%âãÏÓ\nthis is some text for a test')
+ test_content = '%PDF-1.4\n%âãÏÓ\nthis is some text for a test'
+ test_file = StringIO(test_content)
test_file.name = "not_really.pdf"
r = self.client.post(url,dict(file=test_file))
self.assertEqual(r.status_code, 302)
bs_doc = session.presentations.filter(document__type_id='bluesheets').first().document
self.assertEqual(bs_doc.rev,'00')
+ self.assertEqual(retrieve_str("bluesheets", f"{bs_doc.name}-{bs_doc.rev}.pdf"), test_content)
def test_upload_bluesheets_interim_chair_access(self):
make_meeting_test_data()
@@ -6467,27 +6501,36 @@ def test_upload_minutes_agenda(self):
text = doc.text()
self.assertIn('Some text', text)
self.assertNotIn('', text)
-
+ text = retrieve_str(doctype, f"{doc.name}-{doc.rev}.html")
+ self.assertIn('Some text', text)
+ self.assertNotIn('', text)
+
# txt upload
- test_file = BytesIO(b'This is some text for a test, with the word\nvirtual at the beginning of a line.')
+ test_bytes = b'This is some text for a test, with the word\nvirtual at the beginning of a line.'
+ test_file = BytesIO(test_bytes)
test_file.name = "some.txt"
r = self.client.post(url,dict(submission_method="upload",file=test_file,apply_to_all=False))
self.assertEqual(r.status_code, 302)
doc = session.presentations.filter(document__type_id=doctype).first().document
self.assertEqual(doc.rev,'01')
self.assertFalse(session2.presentations.filter(document__type_id=doctype))
+ retrieved_bytes = retrieve_bytes(doctype, f"{doc.name}-{doc.rev}.txt")
+ self.assertEqual(retrieved_bytes, test_bytes)
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
q = PyQuery(r.content)
self.assertIn('Revise', str(q("Title")))
- test_file = BytesIO(b'this is some different text for a test')
+ test_bytes = b'this is some different text for a test'
+ test_file = BytesIO(test_bytes)
test_file.name = "also_some.txt"
r = self.client.post(url,dict(submission_method="upload",file=test_file,apply_to_all=True))
self.assertEqual(r.status_code, 302)
doc = Document.objects.get(pk=doc.pk)
self.assertEqual(doc.rev,'02')
self.assertTrue(session2.presentations.filter(document__type_id=doctype))
+ retrieved_bytes = retrieve_bytes(doctype, f"{doc.name}-{doc.rev}.txt")
+ self.assertEqual(retrieved_bytes, test_bytes)
# Test bad encoding
test_file = BytesIO('
Title
Some\x93text'.encode('latin1'))
@@ -6540,12 +6583,15 @@ def test_upload_minutes_agenda_interim(self):
q = PyQuery(r.content)
self.assertIn('Upload', str(q("title")))
self.assertFalse(session.presentations.filter(document__type_id=doctype))
- test_file = BytesIO(b'this is some text for a test')
+ test_bytes = b'this is some text for a test'
+ test_file = BytesIO(test_bytes)
test_file.name = "not_really.txt"
r = self.client.post(url,dict(submission_method="upload",file=test_file))
self.assertEqual(r.status_code, 302)
doc = session.presentations.filter(document__type_id=doctype).first().document
self.assertEqual(doc.rev,'00')
+ retrieved_bytes = retrieve_bytes(doctype, f"{doc.name}-{doc.rev}.txt")
+ self.assertEqual(retrieved_bytes, test_bytes)
# Verify that we don't have dead links
url = urlreverse('ietf.meeting.views.session_details', kwargs={'num':session.meeting.number, 'acronym': session.group.acronym})
@@ -6567,12 +6613,15 @@ def test_upload_narrativeminutes(self):
q = PyQuery(r.content)
self.assertIn('Upload', str(q("title")))
self.assertFalse(session.presentations.filter(document__type_id=doctype))
- test_file = BytesIO(b'this is some text for a test')
+ test_bytes = b'this is some text for a test'
+ test_file = BytesIO(test_bytes)
test_file.name = "not_really.txt"
r = self.client.post(url,dict(submission_method="upload",file=test_file))
self.assertEqual(r.status_code, 302)
doc = session.presentations.filter(document__type_id=doctype).first().document
self.assertEqual(doc.rev,'00')
+ retrieved_bytes = retrieve_bytes(doctype, f"{doc.name}-{doc.rev}.txt")
+ self.assertEqual(retrieved_bytes, test_bytes)
# Verify that we don't have dead links
url = urlreverse('ietf.meeting.views.session_details', kwargs={'num':session.meeting.number, 'acronym': session.group.acronym})
@@ -6597,18 +6646,22 @@ def test_enter_agenda(self):
self.assertRedirects(r, redirect_url)
doc = session.presentations.filter(document__type_id='agenda').first().document
self.assertEqual(doc.rev,'00')
+ self.assertEqual(retrieve_str("agenda",f"{doc.name}-{doc.rev}.md"), test_text)
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
q = PyQuery(r.content)
self.assertIn('Revise', str(q("Title")))
- test_file = BytesIO(b'Upload after enter')
+ test_bytes = b'Upload after enter'
+ test_file = BytesIO(test_bytes)
test_file.name = "some.txt"
r = self.client.post(url,dict(submission_method="upload",file=test_file))
self.assertRedirects(r, redirect_url)
doc = Document.objects.get(pk=doc.pk)
self.assertEqual(doc.rev,'01')
+ retrieved_bytes = retrieve_bytes("agenda", f"{doc.name}-{doc.rev}.txt")
+ self.assertEqual(retrieved_bytes, test_bytes)
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
@@ -6620,6 +6673,8 @@ def test_enter_agenda(self):
self.assertRedirects(r, redirect_url)
doc = Document.objects.get(pk=doc.pk)
self.assertEqual(doc.rev,'02')
+ self.assertEqual(retrieve_str("agenda",f"{doc.name}-{doc.rev}.md"), test_text)
+
@override_settings(MEETECHO_API_CONFIG="fake settings") # enough to trigger API calls
@patch("ietf.meeting.views.SlidesManager")
@@ -6635,7 +6690,8 @@ def test_upload_slides(self, mock_slides_manager_cls):
q = PyQuery(r.content)
self.assertIn('Upload', str(q("title")))
self.assertFalse(session1.presentations.filter(document__type_id='slides'))
- test_file = BytesIO(b'this is not really a slide')
+ test_bytes = b'this is not really a slide'
+ test_file = BytesIO(test_bytes)
test_file.name = 'not_really.txt'
r = self.client.post(url,dict(file=test_file,title='a test slide file',apply_to_all=True,approved=True))
self.assertEqual(r.status_code, 302)
@@ -6647,6 +6703,7 @@ def test_upload_slides(self, mock_slides_manager_cls):
self.assertEqual(mock_slides_manager_cls.call_count, 1)
self.assertEqual(mock_slides_manager_cls.call_args, call(api_config="fake settings"))
self.assertEqual(mock_slides_manager_cls.return_value.add.call_count, 2)
+ self.assertEqual(retrieve_bytes("slides", f"{sp.document.name}-{sp.document.rev}.txt"), test_bytes)
# don't care which order they were called in, just that both sessions were updated
self.assertCountEqual(
mock_slides_manager_cls.return_value.add.call_args_list,
@@ -6658,7 +6715,8 @@ def test_upload_slides(self, mock_slides_manager_cls):
mock_slides_manager_cls.reset_mock()
url = urlreverse('ietf.meeting.views.upload_session_slides',kwargs={'num':session2.meeting.number,'session_id':session2.id})
- test_file = BytesIO(b'some other thing still not slidelike')
+ test_bytes = b'some other thing still not slidelike'
+ test_file = BytesIO(test_bytes)
test_file.name = 'also_not_really.txt'
r = self.client.post(url,dict(file=test_file,title='a different slide file',apply_to_all=False,approved=True))
self.assertEqual(r.status_code, 302)
@@ -6671,6 +6729,7 @@ def test_upload_slides(self, mock_slides_manager_cls):
self.assertEqual(mock_slides_manager_cls.call_count, 1)
self.assertEqual(mock_slides_manager_cls.call_args, call(api_config="fake settings"))
self.assertEqual(mock_slides_manager_cls.return_value.add.call_count, 1)
+ self.assertEqual(retrieve_bytes("slides", f"{sp.document.name}-{sp.document.rev}.txt"), test_bytes)
self.assertEqual(
mock_slides_manager_cls.return_value.add.call_args,
call(session=session2, slides=sp.document, order=2),
@@ -6682,7 +6741,8 @@ def test_upload_slides(self, mock_slides_manager_cls):
self.assertTrue(r.status_code, 200)
q = PyQuery(r.content)
self.assertIn('Revise', str(q("title")))
- test_file = BytesIO(b'new content for the second slide deck')
+ test_bytes = b'new content for the second slide deck'
+ test_file = BytesIO(test_bytes)
test_file.name = 'doesnotmatter.txt'
r = self.client.post(url,dict(file=test_file,title='rename the presentation',apply_to_all=False, approved=True))
self.assertEqual(r.status_code, 302)
@@ -6692,6 +6752,7 @@ def test_upload_slides(self, mock_slides_manager_cls):
self.assertEqual(replacement_sp.rev,'01')
self.assertEqual(replacement_sp.document.rev,'01')
self.assertEqual(mock_slides_manager_cls.call_count, 1)
+ self.assertEqual(retrieve_bytes("slides", f"{replacement_sp.document.name}-{replacement_sp.document.rev}.txt"), test_bytes)
self.assertEqual(mock_slides_manager_cls.call_args, call(api_config="fake settings"))
self.assertEqual(mock_slides_manager_cls.return_value.revise.call_count, 1)
self.assertEqual(
@@ -6771,7 +6832,6 @@ def test_remove_sessionpresentation(self, mock_slides_manager_cls):
self.assertEqual(2, agenda.docevent_set.count())
self.assertFalse(mock_slides_manager_cls.called)
-
def test_propose_session_slides(self):
for type_id in ['ietf','interim']:
session = SessionFactory(meeting__type_id=type_id)
@@ -6798,7 +6858,8 @@ def test_propose_session_slides(self):
login_testing_unauthorized(self,newperson.user.username,upload_url)
r = self.client.get(upload_url)
self.assertEqual(r.status_code,200)
- test_file = BytesIO(b'this is not really a slide')
+ test_bytes = b'this is not really a slide'
+ test_file = BytesIO(test_bytes)
test_file.name = 'not_really.txt'
empty_outbox()
r = self.client.post(upload_url,dict(file=test_file,title='a test slide file',apply_to_all=True,approved=False))
@@ -6806,6 +6867,10 @@ def test_propose_session_slides(self):
session = Session.objects.get(pk=session.pk)
self.assertEqual(session.slidesubmission_set.count(),1)
self.assertEqual(len(outbox),1)
+ self.assertEqual(
+ retrieve_bytes("staging", session.slidesubmission_set.get().filename),
+ test_bytes
+ )
r = self.client.get(session_overview_url)
self.assertEqual(r.status_code, 200)
@@ -6825,13 +6890,20 @@ def test_propose_session_slides(self):
login_testing_unauthorized(self,chair.user.username,upload_url)
r = self.client.get(upload_url)
self.assertEqual(r.status_code,200)
- test_file = BytesIO(b'this is not really a slide either')
+ test_bytes = b'this is not really a slide either'
+ test_file = BytesIO(test_bytes)
test_file.name = 'again_not_really.txt'
empty_outbox()
r = self.client.post(upload_url,dict(file=test_file,title='a selfapproved test slide file',apply_to_all=True,approved=True))
self.assertEqual(r.status_code, 302)
self.assertEqual(len(outbox),0)
self.assertEqual(session.slidesubmission_set.count(),2)
+ sp = session.presentations.get(document__title__contains="selfapproved")
+ self.assertFalse(exists_in_storage("staging", sp.document.uploaded_filename))
+ self.assertEqual(
+ retrieve_bytes("slides", sp.document.uploaded_filename),
+ test_bytes
+ )
self.client.logout()
self.client.login(username=chair.user.username, password=chair.user.username+"+password")
@@ -6854,6 +6926,8 @@ def test_disapprove_proposed_slides(self):
self.assertEqual(r.status_code,302)
self.assertEqual(SlideSubmission.objects.filter(status__slug = 'rejected').count(), 1)
self.assertEqual(SlideSubmission.objects.filter(status__slug = 'pending').count(), 0)
+ if submission.filename is not None and submission.filename != "":
+ self.assertFalse(exists_in_storage("staging", submission.filename))
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
self.assertRegex(r.content.decode(), r"These\s+slides\s+have\s+already\s+been\s+rejected")
@@ -6872,6 +6946,7 @@ def test_approve_proposed_slides(self, mock_slides_manager_cls):
r = self.client.get(url)
self.assertEqual(r.status_code,200)
empty_outbox()
+ self.assertTrue(exists_in_storage("staging", submission.filename))
r = self.client.post(url,dict(title='different title',approve='approve'))
self.assertEqual(r.status_code,302)
self.assertEqual(SlideSubmission.objects.filter(status__slug = 'pending').count(), 0)
@@ -6881,6 +6956,8 @@ def test_approve_proposed_slides(self, mock_slides_manager_cls):
self.assertIsNotNone(submission.doc)
self.assertEqual(session.presentations.count(),1)
self.assertEqual(session.presentations.first().document.title,'different title')
+ self.assertTrue(exists_in_storage("slides", submission.doc.uploaded_filename))
+ self.assertFalse(exists_in_storage("staging", submission.filename))
self.assertEqual(mock_slides_manager_cls.call_count, 1)
self.assertEqual(mock_slides_manager_cls.call_args, call(api_config="fake settings"))
self.assertEqual(mock_slides_manager_cls.return_value.add.call_count, 1)
@@ -6900,6 +6977,7 @@ def test_approve_proposed_slides(self, mock_slides_manager_cls):
@override_settings(MEETECHO_API_CONFIG="fake settings") # enough to trigger API calls
@patch("ietf.meeting.views.SlidesManager")
def test_approve_proposed_slides_multisession_apply_one(self, mock_slides_manager_cls):
+ TestBlobstoreManager().emptyTestBlobstores()
submission = SlideSubmissionFactory(session__meeting__type_id='ietf')
session1 = submission.session
session2 = SessionFactory(group=submission.session.group, meeting=submission.session.meeting)
@@ -6928,6 +7006,7 @@ def test_approve_proposed_slides_multisession_apply_one(self, mock_slides_manage
@override_settings(MEETECHO_API_CONFIG="fake settings") # enough to trigger API calls
@patch("ietf.meeting.views.SlidesManager")
def test_approve_proposed_slides_multisession_apply_all(self, mock_slides_manager_cls):
+ TestBlobstoreManager().emptyTestBlobstores()
submission = SlideSubmissionFactory(session__meeting__type_id='ietf')
session1 = submission.session
session2 = SessionFactory(group=submission.session.group, meeting=submission.session.meeting)
@@ -6972,12 +7051,15 @@ def test_submit_and_approve_multiple_versions(self, mock_slides_manager_cls):
submission = SlideSubmission.objects.get(session=session)
+ self.assertTrue(exists_in_storage("staging", submission.filename))
approve_url = urlreverse('ietf.meeting.views.approve_proposed_slides', kwargs={'slidesubmission_id':submission.pk,'num':submission.session.meeting.number})
login_testing_unauthorized(self, chair.user.username, approve_url)
r = self.client.post(approve_url,dict(title=submission.title,approve='approve'))
submission.refresh_from_db()
self.assertEqual(r.status_code,302)
self.client.logout()
+ self.assertFalse(exists_in_storage("staging", submission.filename))
+ self.assertTrue(exists_in_storage("slides", submission.doc.uploaded_filename))
self.assertEqual(mock_slides_manager_cls.call_count, 1)
self.assertEqual(mock_slides_manager_cls.call_args, call(api_config="fake settings"))
self.assertEqual(mock_slides_manager_cls.return_value.add.call_count, 1)
@@ -7003,11 +7085,16 @@ def test_submit_and_approve_multiple_versions(self, mock_slides_manager_cls):
(first_submission, second_submission) = SlideSubmission.objects.filter(session=session, status__slug = 'pending').order_by('id')
+ self.assertTrue(exists_in_storage("staging", first_submission.filename))
+ self.assertTrue(exists_in_storage("staging", second_submission.filename))
approve_url = urlreverse('ietf.meeting.views.approve_proposed_slides', kwargs={'slidesubmission_id':second_submission.pk,'num':second_submission.session.meeting.number})
login_testing_unauthorized(self, chair.user.username, approve_url)
r = self.client.post(approve_url,dict(title=submission.title,approve='approve'))
first_submission.refresh_from_db()
second_submission.refresh_from_db()
+ self.assertTrue(exists_in_storage("staging", first_submission.filename))
+ self.assertFalse(exists_in_storage("staging", second_submission.filename))
+ self.assertTrue(exists_in_storage("slides", second_submission.doc.uploaded_filename))
self.assertEqual(r.status_code,302)
self.assertEqual(mock_slides_manager_cls.call_count, 1)
self.assertEqual(mock_slides_manager_cls.call_args, call(api_config="fake settings"))
@@ -7024,6 +7111,7 @@ def test_submit_and_approve_multiple_versions(self, mock_slides_manager_cls):
self.assertEqual(r.status_code,302)
self.client.logout()
self.assertFalse(mock_slides_manager_cls.called)
+ self.assertFalse(exists_in_storage("staging", first_submission.filename))
self.assertEqual(SlideSubmission.objects.filter(status__slug = 'pending').count(),0)
self.assertEqual(SlideSubmission.objects.filter(status__slug = 'rejected').count(),1)
@@ -7114,6 +7202,10 @@ def test_imports_previewed_text(self):
minutes_path = Path(self.meeting.get_materials_path()) / 'minutes'
with (minutes_path / self.session.minutes().uploaded_filename).open() as f:
self.assertEqual(f.read(), 'original markdown text')
+ self.assertEqual(
+ retrieve_str("minutes", self.session.minutes().uploaded_filename),
+ 'original markdown text'
+ )
def test_refuses_identical_import(self):
"""Should not be able to import text identical to the current revision"""
@@ -7173,7 +7265,9 @@ def test_handles_missing_previous_revision_file(self):
# remove the file uploaded for the first rev
minutes_docs = self.session.presentations.filter(document__type='minutes')
self.assertEqual(minutes_docs.count(), 1)
- Path(minutes_docs.first().document.get_file_name()).unlink()
+ to_remove = Path(minutes_docs.first().document.get_file_name())
+ to_remove.unlink()
+ remove_from_storage("minutes", to_remove.name)
self.assertEqual(r.status_code, 302)
with requests_mock.Mocker() as mock:
diff --git a/ietf/meeting/utils.py b/ietf/meeting/utils.py
index 92bae5ac23..9603278399 100644
--- a/ietf/meeting/utils.py
+++ b/ietf/meeting/utils.py
@@ -24,6 +24,7 @@
import debug # pyflakes:ignore
from ietf.dbtemplate.models import DBTemplate
+from ietf.doc.storage_utils import store_bytes, store_str
from ietf.meeting.models import (Session, SchedulingEvent, TimeSlot,
Constraint, SchedTimeSessAssignment, SessionPresentation, Attended)
from ietf.doc.models import Document, State, NewRevisionDocEvent, StateDocEvent
@@ -772,7 +773,12 @@ def handle_upload_file(file, filename, meeting, subdir, request=None, encoding=N
# Whole file sanitization; add back what's missing from a complete
# document (sanitize will remove these).
clean = clean_html(text)
- destination.write(clean.encode("utf8"))
+ clean_bytes = clean.encode('utf8')
+ destination.write(clean_bytes)
+ # Assumes contents of subdir are always document type ids
+ # TODO-BLOBSTORE: see if we can refactor this so that the connection to the document isn't lost
+ # In the meantime, consider faking it by parsing filename (shudder).
+ store_bytes(subdir, filename.name, clean_bytes)
if request and clean != text:
messages.warning(request,
(
@@ -783,6 +789,11 @@ def handle_upload_file(file, filename, meeting, subdir, request=None, encoding=N
else:
for chunk in chunks:
destination.write(chunk)
+ file.seek(0)
+ if hasattr(file, "chunks"):
+ chunks = file.chunks()
+ # TODO-BLOBSTORE: See above question about refactoring
+ store_bytes(subdir, filename.name, b"".join(chunks))
return None
@@ -809,13 +820,15 @@ def new_doc_for_session(type_id, session):
session.presentations.create(document=doc,rev='00')
return doc
+# TODO-BLOBSTORE - consider adding doc to this signature and factoring away type_id
def write_doc_for_session(session, type_id, filename, contents):
filename = Path(filename)
path = Path(session.meeting.get_materials_path()) / type_id
path.mkdir(parents=True, exist_ok=True)
with open(path / filename, "wb") as file:
file.write(contents.encode('utf-8'))
- return
+ store_str(type_id, filename.name, contents)
+ return None
def create_recording(session, url, title=None, user=None):
'''
diff --git a/ietf/meeting/views.py b/ietf/meeting/views.py
index 1226e30d60..3fa605ed7e 100644
--- a/ietf/meeting/views.py
+++ b/ietf/meeting/views.py
@@ -52,6 +52,7 @@
from ietf.doc.fields import SearchableDocumentsField
from ietf.doc.models import Document, State, DocEvent, NewRevisionDocEvent
+from ietf.doc.storage_utils import remove_from_storage, retrieve_bytes, store_file
from ietf.group.models import Group
from ietf.group.utils import can_manage_session_materials, can_manage_some_groups, can_manage_group
from ietf.person.models import Person, User
@@ -3091,6 +3092,8 @@ def upload_session_slides(request, session_id, num, name=None):
for chunk in file.chunks():
destination.write(chunk)
destination.close()
+ file.seek(0)
+ store_file("staging", filename, file)
submission.filename = filename
submission.save()
@@ -4645,7 +4648,6 @@ def err(code, text):
save_err = save_bluesheet(request, session, file)
if save_err:
return err(400, save_err)
-
return HttpResponse("Done", status=200, content_type='text/plain')
@@ -4957,6 +4959,8 @@ def approve_proposed_slides(request, slidesubmission_id, num):
if not os.path.exists(path):
os.makedirs(path)
shutil.move(submission.staged_filepath(), os.path.join(path, target_filename))
+ doc.store_bytes(target_filename, retrieve_bytes("staging", submission.filename))
+ remove_from_storage("staging", submission.filename)
post_process(doc)
DocEvent.objects.create(type="approved_slides", doc=doc, rev=doc.rev, by=request.user.person, desc="Slides approved")
@@ -4994,11 +4998,14 @@ def approve_proposed_slides(request, slidesubmission_id, num):
# in a SlideSubmission object without a file. Handle
# this case and keep processing the 'disapprove' even if
# the filename doesn't exist.
- try:
- if submission.filename != None and submission.filename != '':
+
+ if submission.filename != None and submission.filename != '':
+ try:
os.unlink(submission.staged_filepath())
- except (FileNotFoundError, IsADirectoryError):
- pass
+ except (FileNotFoundError, IsADirectoryError):
+ pass
+ remove_from_storage("staging", submission.filename)
+
acronym = submission.session.group.acronym
submission.status = SlideSubmissionStatusName.objects.get(slug='rejected')
submission.save()
diff --git a/ietf/nomcom/models.py b/ietf/nomcom/models.py
index 2ed1124c5c..c206e467bd 100644
--- a/ietf/nomcom/models.py
+++ b/ietf/nomcom/models.py
@@ -42,6 +42,7 @@ class ReminderDates(models.Model):
class NomCom(models.Model):
+ # TODO-BLOBSTORE: migrate this to a database field instead of a FileField and update code accordingly
public_key = models.FileField(storage=NoLocationMigrationFileSystemStorage(location=settings.NOMCOM_PUBLIC_KEYS_DIR),
upload_to=upload_path_handler, blank=True, null=True)
diff --git a/ietf/person/migrations/0004_alter_person_photo_alter_person_photo_thumb.py b/ietf/person/migrations/0004_alter_person_photo_alter_person_photo_thumb.py
new file mode 100644
index 0000000000..f34382fa70
--- /dev/null
+++ b/ietf/person/migrations/0004_alter_person_photo_alter_person_photo_thumb.py
@@ -0,0 +1,38 @@
+# Copyright The IETF Trust 2025, All Rights Reserved
+
+from django.db import migrations, models
+import ietf.utils.storage
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ("person", "0003_alter_personalapikey_endpoint"),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name="person",
+ name="photo",
+ field=models.ImageField(
+ blank=True,
+ default=None,
+ storage=ietf.utils.storage.BlobShadowFileSystemStorage(
+ kind="", location=None
+ ),
+ upload_to="photo",
+ ),
+ ),
+ migrations.AlterField(
+ model_name="person",
+ name="photo_thumb",
+ field=models.ImageField(
+ blank=True,
+ default=None,
+ storage=ietf.utils.storage.BlobShadowFileSystemStorage(
+ kind="", location=None
+ ),
+ upload_to="photo",
+ ),
+ ),
+ ]
diff --git a/ietf/person/models.py b/ietf/person/models.py
index 85989acfc1..93364478ae 100644
--- a/ietf/person/models.py
+++ b/ietf/person/models.py
@@ -29,7 +29,7 @@
from ietf.name.models import ExtResourceName
from ietf.person.name import name_parts, initials, plain_name
from ietf.utils.mail import send_mail_preformatted
-from ietf.utils.storage import NoLocationMigrationFileSystemStorage
+from ietf.utils.storage import BlobShadowFileSystemStorage
from ietf.utils.mail import formataddr
from ietf.person.name import unidecode_name
from ietf.utils import log
@@ -60,8 +60,18 @@ class Person(models.Model):
pronouns_selectable = jsonfield.JSONCharField("Pronouns", max_length=120, blank=True, null=True, default=list )
pronouns_freetext = models.CharField(" ", max_length=30, null=True, blank=True, help_text="Optionally provide your personal pronouns. These will be displayed on your public profile page and alongside your name in Meetecho and, in future, other systems. Select any number of the checkboxes OR provide a custom string up to 30 characters.")
biography = models.TextField(blank=True, help_text="Short biography for use on leadership pages. Use plain text or reStructuredText markup.")
- photo = models.ImageField(storage=NoLocationMigrationFileSystemStorage(), upload_to=settings.PHOTOS_DIRNAME, blank=True, default=None)
- photo_thumb = models.ImageField(storage=NoLocationMigrationFileSystemStorage(), upload_to=settings.PHOTOS_DIRNAME, blank=True, default=None)
+ photo = models.ImageField(
+ storage=BlobShadowFileSystemStorage(kind="photo"),
+ upload_to=settings.PHOTOS_DIRNAME,
+ blank=True,
+ default=None,
+ )
+ photo_thumb = models.ImageField(
+ storage=BlobShadowFileSystemStorage(kind="photo"),
+ upload_to=settings.PHOTOS_DIRNAME,
+ blank=True,
+ default=None,
+ )
name_from_draft = models.CharField("Full Name (from submission)", null=True, max_length=255, editable=False, help_text="Name as found in an Internet-Draft submission.")
def __str__(self):
diff --git a/ietf/settings.py b/ietf/settings.py
index 125127ba15..faee42237c 100644
--- a/ietf/settings.py
+++ b/ietf/settings.py
@@ -183,6 +183,12 @@
# Server-side static.ietf.org URL (used in pdfized)
STATIC_IETF_ORG_INTERNAL = STATIC_IETF_ORG
+ENABLE_BLOBSTORAGE = True
+
+BLOBSTORAGE_MAX_ATTEMPTS = 1
+BLOBSTORAGE_CONNECT_TIMEOUT = 2
+BLOBSTORAGE_READ_TIMEOUT = 2
+
WSGI_APPLICATION = "ietf.wsgi.application"
AUTHENTICATION_BACKENDS = ( 'ietf.ietfauth.backends.CaseInsensitiveModelBackend', )
@@ -736,6 +742,38 @@ def skip_unreadable_post(record):
"schedule_name": r"(?P[A-Za-z0-9-:_]+)",
}
+STORAGES: dict[str, Any] = {
+ "default": {"BACKEND": "django.core.files.storage.FileSystemStorage"},
+ "staticfiles": {"BACKEND": "django.contrib.staticfiles.storage.StaticFilesStorage"},
+}
+
+# settings_local will need to configure storages for these names
+MORE_STORAGE_NAMES: list[str] = [
+ "bofreq",
+ "charter",
+ "conflrev",
+ "active-draft",
+ "draft",
+ "slides",
+ "minutes",
+ "agenda",
+ "bluesheets",
+ "procmaterials",
+ "narrativeminutes",
+ "statement",
+ "statchg",
+ "liai-att",
+ "chatlog",
+ "polls",
+ "staging",
+ "bibxml-ids",
+ "indexes",
+ "floorplan",
+ "meetinghostlogo",
+ "photo",
+ "review",
+]
+
# Override this in settings_local.py if needed
# *_PATH variables ends with a slash/ .
diff --git a/ietf/settings_test.py b/ietf/settings_test.py
index 94ca22c71b..fe77152d42 100755
--- a/ietf/settings_test.py
+++ b/ietf/settings_test.py
@@ -14,7 +14,8 @@
import shutil
import tempfile
from ietf.settings import * # pyflakes:ignore
-from ietf.settings import TEST_CODE_COVERAGE_CHECKER
+from ietf.settings import STORAGES, TEST_CODE_COVERAGE_CHECKER, MORE_STORAGE_NAMES, BLOBSTORAGE_CONNECT_TIMEOUT, BLOBSTORAGE_READ_TIMEOUT, BLOBSTORAGE_MAX_ATTEMPTS
+import botocore.config
import debug # pyflakes:ignore
debug.debug = True
@@ -105,3 +106,30 @@ def tempdir_with_cleanup(**kwargs):
'level': 'INFO',
},
}
+
+# Configure storages for the blob store - use env settings if present. See the --no-manage-blobstore test option.
+_blob_store_endpoint_url = os.environ.get("DATATRACKER_BLOB_STORE_ENDPOINT_URL", "http://blobstore:9000")
+_blob_store_access_key = os.environ.get("DATATRACKER_BLOB_STORE_ACCESS_KEY", "minio_root")
+_blob_store_secret_key = os.environ.get("DATATRACKER_BLOB_STORE_SECRET_KEY", "minio_pass")
+_blob_store_bucket_prefix = os.environ.get("DATATRACKER_BLOB_STORE_BUCKET_PREFIX", "test-")
+_blob_store_enable_profiling = (
+ os.environ.get("DATATRACKER_BLOB_STORE_ENABLE_PROFILING", "false").lower() == "true"
+)
+for storagename in MORE_STORAGE_NAMES:
+ STORAGES[storagename] = {
+ "BACKEND": "ietf.doc.storage_backends.CustomS3Storage",
+ "OPTIONS": dict(
+ endpoint_url=_blob_store_endpoint_url,
+ access_key=_blob_store_access_key,
+ secret_key=_blob_store_secret_key,
+ security_token=None,
+ client_config=botocore.config.Config(
+ signature_version="s3v4",
+ connect_timeout=BLOBSTORAGE_CONNECT_TIMEOUT,
+ read_timeout=BLOBSTORAGE_READ_TIMEOUT,
+ retries={"total_max_attempts": BLOBSTORAGE_MAX_ATTEMPTS},
+ ),
+ bucket_name=f"{_blob_store_bucket_prefix}{storagename}",
+ ietf_log_blob_timing=_blob_store_enable_profiling,
+ ),
+ }
diff --git a/ietf/submit/tests.py b/ietf/submit/tests.py
index 6a56839177..9a993480cd 100644
--- a/ietf/submit/tests.py
+++ b/ietf/submit/tests.py
@@ -31,6 +31,7 @@
ReviewFactory, WgRfcFactory)
from ietf.doc.models import ( Document, DocEvent, State,
BallotPositionDocEvent, DocumentAuthor, SubmissionDocEvent )
+from ietf.doc.storage_utils import exists_in_storage, retrieve_str, store_str
from ietf.doc.utils import create_ballot_if_not_open, can_edit_docextresources, update_action_holders
from ietf.group.factories import GroupFactory, RoleFactory
from ietf.group.models import Group
@@ -53,6 +54,7 @@
from ietf.utils import tool_version
from ietf.utils.accesstoken import generate_access_token
from ietf.utils.mail import outbox, get_payload_text
+from ietf.utils.test_runner import TestBlobstoreManager
from ietf.utils.test_utils import login_testing_unauthorized, TestCase
from ietf.utils.timezone import date_today
from ietf.utils.draft import PlaintextDraft
@@ -355,6 +357,7 @@ def verify_bibxml_ids_creation(self, draft):
def submit_new_wg(self, formats):
# submit new -> supply submitter info -> approve
+ TestBlobstoreManager().emptyTestBlobstores()
GroupFactory(type_id='wg',acronym='ames')
mars = GroupFactory(type_id='wg', acronym='mars')
RoleFactory(name_id='chair', group=mars, person__user__username='marschairman')
@@ -428,6 +431,13 @@ def submit_new_wg(self, formats):
self.assertTrue(draft.latest_event(type="added_suggested_replaces"))
self.assertTrue(not os.path.exists(os.path.join(self.staging_dir, "%s-%s.txt" % (name, rev))))
self.assertTrue(os.path.exists(os.path.join(self.repository_dir, "%s-%s.txt" % (name, rev))))
+ check_ext = ["xml", "txt", "html"] if "xml" in formats else ["txt"]
+ for ext in check_ext:
+ basename=f"{name}-{rev}.{ext}"
+ extname=f"{ext}/{basename}"
+ self.assertFalse(exists_in_storage("staging", basename))
+ self.assertTrue(exists_in_storage("active-draft", extname))
+ self.assertTrue(exists_in_storage("draft", extname))
self.assertEqual(draft.type_id, "draft")
self.assertEqual(draft.stream_id, "ietf")
self.assertTrue(draft.expires >= timezone.now() + datetime.timedelta(days=settings.INTERNET_DRAFT_DAYS_TO_EXPIRE - 1))
@@ -535,6 +545,7 @@ def test_submit_new_wg_as_author_bad_submitter(self):
def submit_new_concluded_wg_as_author(self, group_state_id='conclude'):
"""A new concluded WG submission by a logged-in author needs AD approval"""
+ TestBlobstoreManager().emptyTestBlobstores()
mars = GroupFactory(type_id='wg', acronym='mars', state_id=group_state_id)
draft = WgDraftFactory(group=mars)
setup_default_community_list_for_group(draft.group)
@@ -580,6 +591,7 @@ def test_submit_new_wg_with_extresources(self):
def submit_existing(self, formats, change_authors=True, group_type='wg', stream_type='ietf'):
# submit new revision of existing -> supply submitter info -> prev authors confirm
+ TestBlobstoreManager().emptyTestBlobstores()
def _assert_authors_are_action_holders(draft, expect=True):
for author in draft.authors():
@@ -771,6 +783,13 @@ def inspect_docevents(docevents, event_delta, event_type, be_in_desc, by_name):
self.assertTrue(os.path.exists(os.path.join(self.archive_dir, "%s-%s.txt" % (name, old_rev))))
self.assertTrue(not os.path.exists(os.path.join(self.staging_dir, "%s-%s.txt" % (name, rev))))
self.assertTrue(os.path.exists(os.path.join(self.repository_dir, "%s-%s.txt" % (name, rev))))
+ check_ext = ["xml", "txt", "html"] if "xml" in formats else ["txt"]
+ for ext in check_ext:
+ basename=f"{name}-{rev}.{ext}"
+ extname=f"{ext}/{basename}"
+ self.assertFalse(exists_in_storage("staging", basename))
+ self.assertTrue(exists_in_storage("active-draft", extname))
+ self.assertTrue(exists_in_storage("draft", extname))
self.assertEqual(draft.type_id, "draft")
if stream_type == 'ietf':
self.assertEqual(draft.stream_id, "ietf")
@@ -909,6 +928,7 @@ def test_submit_existing_iab_with_extresources(self):
def submit_new_individual(self, formats):
# submit new -> supply submitter info -> confirm
+ TestBlobstoreManager().emptyTestBlobstores()
name = "draft-authorname-testing-tests"
rev = "00"
@@ -971,7 +991,13 @@ def submit_new_individual(self, formats):
self.assertTrue(variant_path.samefile(variant_ftp_path))
variant_all_archive_path = Path(settings.INTERNET_ALL_DRAFTS_ARCHIVE_DIR) / variant_path.name
self.assertTrue(variant_path.samefile(variant_all_archive_path))
-
+ check_ext = ["xml", "txt", "html"] if "xml" in formats else ["txt"]
+ for ext in check_ext:
+ basename=f"{name}-{rev}.{ext}"
+ extname=f"{ext}/{basename}"
+ self.assertFalse(exists_in_storage("staging", basename))
+ self.assertTrue(exists_in_storage("active-draft", extname))
+ self.assertTrue(exists_in_storage("draft", extname))
def test_submit_new_individual_txt(self):
@@ -988,6 +1014,7 @@ def test_submit_new_individual_txt_xml(self):
self.submit_new_individual(["txt", "xml"])
def submit_new_draft_no_org_or_address(self, formats):
+ TestBlobstoreManager().emptyTestBlobstores()
name = 'draft-testing-no-org-or-address'
author = PersonFactory()
@@ -1078,6 +1105,7 @@ def _assert_extresource_change_event(self, doc, is_present=True):
self.assertIsNone(event, 'External resource change event was unexpectedly created')
def submit_new_draft_with_extresources(self, group):
+ TestBlobstoreManager().emptyTestBlobstores()
name = 'draft-testing-with-extresources'
status_url, author = self.do_submission(name, rev='00', group=group)
@@ -1107,6 +1135,7 @@ def test_submit_new_individual_with_extresources(self):
def submit_new_individual_logged_in(self, formats):
# submit new -> supply submitter info -> done
+ TestBlobstoreManager().emptyTestBlobstores()
name = "draft-authorname-testing-logged-in"
rev = "00"
@@ -1250,6 +1279,7 @@ def submit_existing_with_extresources(self, group_type, stream_type='ietf'):
Unlike some other tests in this module, does not confirm draft if this would be required.
"""
+ TestBlobstoreManager().emptyTestBlobstores()
orig_draft: Document = DocumentFactory( # type: ignore[annotation-unchecked]
type_id='draft',
group=GroupFactory(type_id=group_type) if group_type else None,
@@ -1290,6 +1320,7 @@ def test_submit_update_individual_with_extresources(self):
def submit_new_individual_replacing_wg(self, logged_in=False, group_state_id='active', notify_ad=False):
"""Chair of an active WG should be notified if individual draft is proposed to replace a WG draft"""
+ TestBlobstoreManager().emptyTestBlobstores()
name = "draft-authorname-testing-tests"
rev = "00"
group = None
@@ -1416,6 +1447,7 @@ def test_cancel_submission(self):
# cancel
r = self.client.post(status_url, dict(action=action))
self.assertTrue(not os.path.exists(os.path.join(self.staging_dir, "%s-%s.txt" % (name, rev))))
+ self.assertFalse(exists_in_storage("staging",f"{name}-{rev}.txt"))
def test_edit_submission_and_force_post(self):
# submit -> edit
@@ -1605,16 +1637,21 @@ def test_submit_all_file_types(self):
self.assertEqual(Submission.objects.filter(name=name).count(), 1)
self.assertTrue(os.path.exists(os.path.join(self.staging_dir, "%s-%s.txt" % (name, rev))))
+ self.assertTrue(exists_in_storage("staging",f"{name}-{rev}.txt"))
fd = io.open(os.path.join(self.staging_dir, "%s-%s.txt" % (name, rev)))
txt_contents = fd.read()
fd.close()
self.assertTrue(name in txt_contents)
self.assertTrue(os.path.exists(os.path.join(self.staging_dir, "%s-%s.xml" % (name, rev))))
+ self.assertTrue(exists_in_storage("staging",f"{name}-{rev}.txt"))
fd = io.open(os.path.join(self.staging_dir, "%s-%s.xml" % (name, rev)))
xml_contents = fd.read()
fd.close()
self.assertTrue(name in xml_contents)
self.assertTrue('' in xml_contents)
+ xml_contents = retrieve_str("staging", f"{name}-{rev}.xml")
+ self.assertTrue(name in xml_contents)
+ self.assertTrue('' in xml_contents)
def test_expire_submissions(self):
s = Submission.objects.create(name="draft-ietf-mars-foo",
@@ -1901,6 +1938,7 @@ def do_wg_approval_auth_test(self, state, chair_can_approve=False):
Assumes approval allowed by AD and secretary and, optionally, chair of WG
"""
+ TestBlobstoreManager().emptyTestBlobstores()
class _SubmissionFactory:
"""Helper class to generate fresh submissions"""
def __init__(self, author, state):
@@ -2750,6 +2788,7 @@ class AsyncSubmissionTests(BaseSubmitTestCase):
"""Tests of async submission-related tasks"""
def test_process_and_accept_uploaded_submission(self):
"""process_and_accept_uploaded_submission should properly process a submission"""
+ TestBlobstoreManager().emptyTestBlobstores()
_today = date_today()
xml, author = submission_file('draft-somebody-test-00', 'draft-somebody-test-00.xml', None, 'test_submission.xml')
xml_data = xml.read()
@@ -2765,10 +2804,13 @@ def test_process_and_accept_uploaded_submission(self):
xml_path = Path(settings.IDSUBMIT_STAGING_PATH) / 'draft-somebody-test-00.xml'
with xml_path.open('w') as f:
f.write(xml_data)
+ store_str("staging", "draft-somebody-test-00.xml", xml_data)
txt_path = xml_path.with_suffix('.txt')
self.assertFalse(txt_path.exists())
html_path = xml_path.with_suffix('.html')
self.assertFalse(html_path.exists())
+ for ext in ["txt", "html"]:
+ self.assertFalse(exists_in_storage("staging",f"draft-somebody-test-00.{ext}"))
process_and_accept_uploaded_submission(submission)
submission = Submission.objects.get(pk=submission.pk) # refresh
@@ -2784,6 +2826,8 @@ def test_process_and_accept_uploaded_submission(self):
# at least test that these were created
self.assertTrue(txt_path.exists())
self.assertTrue(html_path.exists())
+ for ext in ["txt", "html"]:
+ self.assertTrue(exists_in_storage("staging", f"draft-somebody-test-00.{ext}"))
self.assertEqual(submission.file_size, os.stat(txt_path).st_size)
self.assertIn('Completed submission validation checks', submission.submissionevent_set.last().desc)
@@ -2798,6 +2842,7 @@ def test_process_and_accept_uploaded_submission_invalid(self):
txt.close()
# submitter is not an author
+ TestBlobstoreManager().emptyTestBlobstores()
submitter = PersonFactory()
submission = SubmissionFactory(
name='draft-somebody-test',
@@ -2809,12 +2854,14 @@ def test_process_and_accept_uploaded_submission_invalid(self):
xml_path = Path(settings.IDSUBMIT_STAGING_PATH) / 'draft-somebody-test-00.xml'
with xml_path.open('w') as f:
f.write(xml_data)
+ store_str("staging", "draft-somebody-test-00.xml", xml_data)
process_and_accept_uploaded_submission(submission)
submission = Submission.objects.get(pk=submission.pk) # refresh
self.assertEqual(submission.state_id, 'cancel')
self.assertIn('not one of the document authors', submission.submissionevent_set.last().desc)
# author has no email address in XML
+ TestBlobstoreManager().emptyTestBlobstores()
submission = SubmissionFactory(
name='draft-somebody-test',
rev='00',
@@ -2825,12 +2872,14 @@ def test_process_and_accept_uploaded_submission_invalid(self):
xml_path = Path(settings.IDSUBMIT_STAGING_PATH) / 'draft-somebody-test-00.xml'
with xml_path.open('w') as f:
f.write(re.sub(r'.*', '', xml_data))
+ store_str("staging", "draft-somebody-test-00.xml", re.sub(r'.*', '', xml_data))
process_and_accept_uploaded_submission(submission)
submission = Submission.objects.get(pk=submission.pk) # refresh
self.assertEqual(submission.state_id, 'cancel')
self.assertIn('Email address not found for all authors', submission.submissionevent_set.last().desc)
# no title
+ TestBlobstoreManager().emptyTestBlobstores()
submission = SubmissionFactory(
name='draft-somebody-test',
rev='00',
@@ -2841,12 +2890,14 @@ def test_process_and_accept_uploaded_submission_invalid(self):
xml_path = Path(settings.IDSUBMIT_STAGING_PATH) / 'draft-somebody-test-00.xml'
with xml_path.open('w') as f:
f.write(re.sub(r'.*', '', xml_data))
+ store_str("staging", "draft-somebody-test-00.xml", re.sub(r'.*', '', xml_data))
process_and_accept_uploaded_submission(submission)
submission = Submission.objects.get(pk=submission.pk) # refresh
self.assertEqual(submission.state_id, 'cancel')
self.assertIn('Could not extract a valid title', submission.submissionevent_set.last().desc)
# draft name mismatch
+ TestBlobstoreManager().emptyTestBlobstores()
submission = SubmissionFactory(
name='draft-different-name',
rev='00',
@@ -2857,12 +2908,14 @@ def test_process_and_accept_uploaded_submission_invalid(self):
xml_path = Path(settings.IDSUBMIT_STAGING_PATH) / 'draft-different-name-00.xml'
with xml_path.open('w') as f:
f.write(xml_data)
+ store_str("staging", "draft-different-name-00.xml", xml_data)
process_and_accept_uploaded_submission(submission)
submission = Submission.objects.get(pk=submission.pk) # refresh
self.assertEqual(submission.state_id, 'cancel')
self.assertIn('Submission rejected: XML Internet-Draft filename', submission.submissionevent_set.last().desc)
# rev mismatch
+ TestBlobstoreManager().emptyTestBlobstores()
submission = SubmissionFactory(
name='draft-somebody-test',
rev='01',
@@ -2873,12 +2926,14 @@ def test_process_and_accept_uploaded_submission_invalid(self):
xml_path = Path(settings.IDSUBMIT_STAGING_PATH) / 'draft-somebody-test-01.xml'
with xml_path.open('w') as f:
f.write(xml_data)
+ store_str("staging", "draft-somebody-test-01.xml", xml_data)
process_and_accept_uploaded_submission(submission)
submission = Submission.objects.get(pk=submission.pk) # refresh
self.assertEqual(submission.state_id, 'cancel')
self.assertIn('Submission rejected: XML Internet-Draft revision', submission.submissionevent_set.last().desc)
# not xml
+ TestBlobstoreManager().emptyTestBlobstores()
submission = SubmissionFactory(
name='draft-somebody-test',
rev='00',
@@ -2889,12 +2944,14 @@ def test_process_and_accept_uploaded_submission_invalid(self):
txt_path = Path(settings.IDSUBMIT_STAGING_PATH) / 'draft-somebody-test-00.txt'
with txt_path.open('w') as f:
f.write(txt_data)
+ store_str("staging", "draft-somebody-test-00.txt", txt_data)
process_and_accept_uploaded_submission(submission)
submission = Submission.objects.get(pk=submission.pk) # refresh
self.assertEqual(submission.state_id, 'cancel')
self.assertIn('Only XML Internet-Draft submissions', submission.submissionevent_set.last().desc)
# wrong state
+ TestBlobstoreManager().emptyTestBlobstores()
submission = SubmissionFactory(
name='draft-somebody-test',
rev='00',
@@ -2903,8 +2960,9 @@ def test_process_and_accept_uploaded_submission_invalid(self):
state_id='uploaded',
)
xml_path = Path(settings.IDSUBMIT_STAGING_PATH) / 'draft-somebody-test-00.xml'
- with xml_path.open('w') as f:
+ with xml_path.open('w') as f: # Why is this state being written if the thing that uses it is mocked out?
f.write(xml_data)
+ store_str("staging", "draft-somebody-test-00.xml", xml_data)
with mock.patch('ietf.submit.utils.process_submission_xml') as mock_proc_xml:
process_and_accept_uploaded_submission(submission)
submission = Submission.objects.get(pk=submission.pk) # refresh
@@ -2912,6 +2970,7 @@ def test_process_and_accept_uploaded_submission_invalid(self):
self.assertEqual(submission.state_id, 'uploaded', 'State should not be changed')
# failed checker
+ TestBlobstoreManager().emptyTestBlobstores()
submission = SubmissionFactory(
name='draft-somebody-test',
rev='00',
@@ -2922,6 +2981,7 @@ def test_process_and_accept_uploaded_submission_invalid(self):
xml_path = Path(settings.IDSUBMIT_STAGING_PATH) / 'draft-somebody-test-00.xml'
with xml_path.open('w') as f:
f.write(xml_data)
+ store_str("staging", "draft-somebody-test-00.xml", xml_data)
with mock.patch(
'ietf.submit.utils.apply_checkers',
side_effect = lambda _, __: submission.checks.create(
@@ -2958,6 +3018,7 @@ def test_process_and_accept_uploaded_submission_task_ignores_invalid_id(self, mo
self.assertEqual(mock_method.call_count, 0)
def test_process_submission_xml(self):
+ TestBlobstoreManager().emptyTestBlobstores()
xml_path = Path(settings.IDSUBMIT_STAGING_PATH) / "draft-somebody-test-00.xml"
xml, _ = submission_file(
"draft-somebody-test-00",
@@ -2968,6 +3029,7 @@ def test_process_submission_xml(self):
)
xml_contents = xml.read()
xml_path.write_text(xml_contents)
+ store_str("staging", "draft-somebody-test-00.xml", xml_contents)
output = process_submission_xml("draft-somebody-test", "00")
self.assertEqual(output["filename"], "draft-somebody-test")
self.assertEqual(output["rev"], "00")
@@ -2983,23 +3045,32 @@ def test_process_submission_xml(self):
self.assertEqual(output["xml_version"], "3")
# Should behave on missing or partial elements
+ TestBlobstoreManager().emptyTestBlobstores()
xml_path.write_text(re.sub(r"", "", xml_contents)) # strip entirely
+ store_str("staging", "draft-somebody-test-00.xml", re.sub(r"", "", xml_contents))
output = process_submission_xml("draft-somebody-test", "00")
self.assertEqual(output["document_date"], None)
+ TestBlobstoreManager().emptyTestBlobstores()
xml_path.write_text(re.sub(r")", r"\1 day=\2", xml_contents)) # remove month
+ store_str("staging", "draft-somebody-test-00.xml", re.sub(r"()", r"\1 day=\2", xml_contents))
output = process_submission_xml("draft-somebody-test", "00")
self.assertEqual(output["document_date"], date_today())
+ TestBlobstoreManager().emptyTestBlobstores()
xml_path.write_text(re.sub(r"", r"", xml_contents)) # remove day
+ store_str("staging", "draft-somebody-test-00.xml", re.sub(r"", r"", xml_contents))
output = process_submission_xml("draft-somebody-test", "00")
self.assertEqual(output["document_date"], date_today())
# name mismatch
+ TestBlobstoreManager().emptyTestBlobstores()
xml, _ = submission_file(
"draft-somebody-wrong-name-00", # name that appears in the file
"draft-somebody-test-00.xml",
@@ -3008,10 +3079,13 @@ def test_process_submission_xml(self):
title="Correct Draft Title",
)
xml_path.write_text(xml.read())
+ xml.seek(0)
+ store_str("staging", "draft-somebody-test-00.xml", xml.read())
with self.assertRaisesMessage(SubmissionError, "disagrees with submission filename"):
process_submission_xml("draft-somebody-test", "00")
# rev mismatch
+ TestBlobstoreManager().emptyTestBlobstores()
xml, _ = submission_file(
"draft-somebody-test-01", # name that appears in the file
"draft-somebody-test-00.xml",
@@ -3020,10 +3094,13 @@ def test_process_submission_xml(self):
title="Correct Draft Title",
)
xml_path.write_text(xml.read())
+ xml.seek(0)
+ store_str("staging", "draft-somebody-test-00.xml", xml.read())
with self.assertRaisesMessage(SubmissionError, "disagrees with submission revision"):
process_submission_xml("draft-somebody-test", "00")
# missing title
+ TestBlobstoreManager().emptyTestBlobstores()
xml, _ = submission_file(
"draft-somebody-test-00", # name that appears in the file
"draft-somebody-test-00.xml",
@@ -3032,10 +3109,13 @@ def test_process_submission_xml(self):
title="",
)
xml_path.write_text(xml.read())
+ xml.seek(0)
+ store_str("staging", "draft-somebody-test-00.xml", xml.read())
with self.assertRaisesMessage(SubmissionError, "Could not extract a valid title"):
process_submission_xml("draft-somebody-test", "00")
def test_process_submission_text(self):
+ TestBlobstoreManager().emptyTestBlobstores()
txt_path = Path(settings.IDSUBMIT_STAGING_PATH) / "draft-somebody-test-00.txt"
txt, _ = submission_file(
"draft-somebody-test-00",
@@ -3045,6 +3125,8 @@ def test_process_submission_text(self):
title="Correct Draft Title",
)
txt_path.write_text(txt.read())
+ txt.seek(0)
+ store_str("staging", "draft-somebody-test-00.txt", txt.read())
output = process_submission_text("draft-somebody-test", "00")
self.assertEqual(output["filename"], "draft-somebody-test")
self.assertEqual(output["rev"], "00")
@@ -3060,6 +3142,7 @@ def test_process_submission_text(self):
self.assertIsNone(output["xml_version"])
# name mismatch
+ TestBlobstoreManager().emptyTestBlobstores()
txt, _ = submission_file(
"draft-somebody-wrong-name-00", # name that appears in the file
"draft-somebody-test-00.txt",
@@ -3069,11 +3152,14 @@ def test_process_submission_text(self):
)
with txt_path.open('w') as fd:
fd.write(txt.read())
+ txt.seek(0)
+ store_str("staging", "draft-somebody-test-00.txt", txt.read())
txt.close()
with self.assertRaisesMessage(SubmissionError, 'disagrees with submission filename'):
process_submission_text("draft-somebody-test", "00")
# rev mismatch
+ TestBlobstoreManager().emptyTestBlobstores()
txt, _ = submission_file(
"draft-somebody-test-01", # name that appears in the file
"draft-somebody-test-00.txt",
@@ -3083,6 +3169,8 @@ def test_process_submission_text(self):
)
with txt_path.open('w') as fd:
fd.write(txt.read())
+ txt.seek(0)
+ store_str("staging", "draft-somebody-test-00.txt", txt.read())
txt.close()
with self.assertRaisesMessage(SubmissionError, 'disagrees with submission revision'):
process_submission_text("draft-somebody-test", "00")
@@ -3221,6 +3309,7 @@ def test_find_submission_filenames(self):
path = Path(self.staging_dir)
for ext in ['txt', 'xml', 'pdf', 'md']:
(path / f'{draft.name}-{draft.rev}.{ext}').touch()
+ store_str("staging", f"{draft.name}-{draft.rev}.{ext}", "")
files = find_submission_filenames(draft)
self.assertCountEqual(
files,
@@ -3280,6 +3369,7 @@ def test_validate_submission_rev(self):
new_wg_doc = WgDraftFactory(rev='01', relations=[('replaces',old_wg_doc)])
path = Path(self.archive_dir) / f'{new_wg_doc.name}-{new_wg_doc.rev}.txt'
path.touch()
+ store_str("staging", f"{new_wg_doc.name}-{new_wg_doc.rev}.txt", "")
bad_revs = (None, '', '2', 'aa', '00', '01', '100', '002', u'öö')
for rev in bad_revs:
diff --git a/ietf/submit/utils.py b/ietf/submit/utils.py
index f19f2384c8..3e06f45c54 100644
--- a/ietf/submit/utils.py
+++ b/ietf/submit/utils.py
@@ -36,6 +36,7 @@
DocumentAuthor, AddedMessageEvent )
from ietf.doc.models import NewRevisionDocEvent
from ietf.doc.models import RelatedDocument, DocRelationshipName, DocExtResource
+from ietf.doc.storage_utils import remove_from_storage, retrieve_bytes, store_bytes, store_file, store_str
from ietf.doc.utils import (add_state_change_event, rebuild_reference_relations,
set_replaces_for_document, prettify_std_name, update_doc_extresources,
can_edit_docextresources, update_documentauthors, update_action_holders,
@@ -455,6 +456,7 @@ def post_submission(request, submission, approved_doc_desc, approved_subm_desc):
from ietf.doc.expire import move_draft_files_to_archive
move_draft_files_to_archive(draft, prev_rev)
+ submission.draft = draft
move_files_to_repository(submission)
submission.state = DraftSubmissionStateName.objects.get(slug="posted")
log.log(f"{submission.name}: moved files")
@@ -488,7 +490,6 @@ def post_submission(request, submission, approved_doc_desc, approved_subm_desc):
if new_possibly_replaces:
send_review_possibly_replaces_request(request, draft, submitter_info)
- submission.draft = draft
submission.save()
create_submission_event(request, submission, approved_subm_desc)
@@ -498,6 +499,7 @@ def post_submission(request, submission, approved_doc_desc, approved_subm_desc):
ref_rev_file_name = os.path.join(os.path.join(settings.BIBXML_BASE_PATH, 'bibxml-ids'), 'reference.I-D.%s-%s.xml' % (draft.name, draft.rev ))
with io.open(ref_rev_file_name, "w", encoding='utf-8') as f:
f.write(ref_text)
+ store_str("bibxml-ids", f"reference.I-D.{draft.name}-{draft.rev}.txt", ref_text) # TODO-BLOBSTORE verify with test
log.log(f"{submission.name}: done")
@@ -666,6 +668,12 @@ def move_files_to_repository(submission):
ftp_dest = Path(settings.FTP_DIR) / "internet-drafts" / dest.name
os.link(dest, all_archive_dest)
os.link(dest, ftp_dest)
+ # Shadow what's happening to the fs in the blobstores. When the stores become
+ # authoritative, the source and dest checks will need to apply to the stores instead.
+ content_bytes = retrieve_bytes("staging", fname)
+ store_bytes("active-draft", f"{ext}/{fname}", content_bytes)
+ submission.draft.store_bytes(f"{ext}/{fname}", content_bytes)
+ remove_from_storage("staging", fname)
elif dest.exists():
log.log("Intended to move '%s' to '%s', but found source missing while destination exists.")
elif f".{ext}" in submission.file_types.split(','):
@@ -678,6 +686,7 @@ def remove_staging_files(name, rev):
exts = [f'.{ext}' for ext in settings.IDSUBMIT_FILE_TYPES]
for ext in exts:
basename.with_suffix(ext).unlink(missing_ok=True)
+ remove_from_storage("staging", basename.with_suffix(ext).name, warn_if_missing=False)
def remove_submission_files(submission):
@@ -766,6 +775,8 @@ def save_files(form):
for chunk in f.chunks():
destination.write(chunk)
log.log("saved file %s" % name)
+ f.seek(0)
+ store_file("staging", f"{form.filename}-{form.revision}.{ext}", f)
return file_name
@@ -988,6 +999,10 @@ def render_missing_formats(submission):
xml_version,
)
)
+ # When the blobstores become autoritative - the guard at the
+ # containing if statement needs to be based on the store
+ with Path(txt_path).open("rb") as f:
+ store_file("staging", f"{submission.name}-{submission.rev}.txt", f)
# --- Convert to html ---
html_path = staging_path(submission.name, submission.rev, '.html')
@@ -1010,6 +1025,8 @@ def render_missing_formats(submission):
xml_version,
)
)
+ with Path(html_path).open("rb") as f:
+ store_file("staging", f"{submission.name}-{submission.rev}.html", f)
def accept_submission(submission: Submission, request: Optional[HttpRequest] = None, autopost=False):
@@ -1361,6 +1378,7 @@ def process_and_validate_submission(submission):
except SubmissionError:
raise # pass SubmissionErrors up the stack
except Exception as err:
+ # (this is a good point to just `raise err` when diagnosing Submission test failures)
# convert other exceptions into SubmissionErrors
log.log(f'Unexpected exception while processing submission {submission.pk}.')
log.log(traceback.format_exc())
diff --git a/ietf/utils/storage.py b/ietf/utils/storage.py
index 0aa02cab86..9f41f3d50f 100644
--- a/ietf/utils/storage.py
+++ b/ietf/utils/storage.py
@@ -1,8 +1,56 @@
+# Copyright The IETF Trust 2020-2025, All Rights Reserved
+"""Django Storage classes"""
+from pathlib import Path
+
+from django.conf import settings
from django.core.files.storage import FileSystemStorage
+from ietf.doc.storage_utils import store_file
+from .log import log
+
class NoLocationMigrationFileSystemStorage(FileSystemStorage):
- def deconstruct(obj): # pylint: disable=no-self-argument
- path, args, kwargs = FileSystemStorage.deconstruct(obj)
- kwargs["location"] = None
- return (path, args, kwargs)
+ def deconstruct(self):
+ path, args, kwargs = super().deconstruct()
+ kwargs["location"] = None # don't record location in migrations
+ return path, args, kwargs
+
+
+class BlobShadowFileSystemStorage(NoLocationMigrationFileSystemStorage):
+ """FileSystemStorage that shadows writes to the blob store as well
+
+ Strips directories from the filename when naming the blob.
+ """
+
+ def __init__(
+ self,
+ *, # disallow positional arguments
+ kind: str,
+ location=None,
+ base_url=None,
+ file_permissions_mode=None,
+ directory_permissions_mode=None,
+ ):
+ self.kind = kind
+ super().__init__(
+ location, base_url, file_permissions_mode, directory_permissions_mode
+ )
+
+ def save(self, name, content, max_length=None):
+ # Write content to the filesystem - this deals with chunks, etc...
+ saved_name = super().save(name, content, max_length)
+
+ if settings.ENABLE_BLOBSTORAGE:
+ # Retrieve the content and write to the blob store
+ blob_name = Path(saved_name).name # strips path
+ try:
+ with self.open(saved_name, "rb") as f:
+ store_file(self.kind, blob_name, f, allow_overwrite=True)
+ except Exception as err:
+ log(f"Failed to shadow {saved_name} at {self.kind}:{blob_name}: {err}")
+ return saved_name # includes the path!
+
+ def deconstruct(self):
+ path, args, kwargs = super().deconstruct()
+ kwargs["kind"] = "" # don't record "kind" in migrations
+ return path, args, kwargs
diff --git a/ietf/utils/test_runner.py b/ietf/utils/test_runner.py
index 49d53e1e1d..3c89a2d01c 100644
--- a/ietf/utils/test_runner.py
+++ b/ietf/utils/test_runner.py
@@ -48,6 +48,8 @@
import subprocess
import tempfile
import copy
+import boto3
+import botocore.config
import factory.random
import urllib3
import warnings
@@ -85,6 +87,8 @@
from ietf.utils.test_smtpserver import SMTPTestServerDriver
from ietf.utils.test_utils import TestCase
+from mypy_boto3_s3.service_resource import Bucket
+
loaded_templates = set()
visited_urls = set()
@@ -722,9 +726,25 @@ def add_arguments(cls, parser):
parser.add_argument('--rerun-until-failure',
action='store_true', dest='rerun', default=False,
help='Run the indicated tests in a loop until a failure occurs. ' )
-
- def __init__(self, ignore_lower_coverage=False, skip_coverage=False, save_version_coverage=None, html_report=None, permit_mixed_migrations=None, show_logging=None, validate_html=None, validate_html_harder=None, rerun=None, **kwargs):
- #
+ parser.add_argument('--no-manage-blobstore', action='store_false', dest='manage_blobstore',
+ help='Disable creating/deleting test buckets in the blob store.'
+ 'When this argument is used, a set of buckets with "test-" prefixed to their '
+ 'names must already exist.')
+
+ def __init__(
+ self,
+ ignore_lower_coverage=False,
+ skip_coverage=False,
+ save_version_coverage=None,
+ html_report=None,
+ permit_mixed_migrations=None,
+ show_logging=None,
+ validate_html=None,
+ validate_html_harder=None,
+ rerun=None,
+ manage_blobstore=True,
+ **kwargs
+ ): #
self.ignore_lower_coverage = ignore_lower_coverage
self.check_coverage = not skip_coverage
self.save_version_coverage = save_version_coverage
@@ -752,6 +772,8 @@ def __init__(self, ignore_lower_coverage=False, skip_coverage=False, save_versio
# contains parent classes to later subclasses, the parent classes will determine the ordering, so use the most
# specific classes necessary to get the right ordering:
self.reorder_by = (PyFlakesTestCase, MyPyTest,) + self.reorder_by + (StaticLiveServerTestCase, TemplateTagTest, CoverageTest,)
+ #self.buckets = set()
+ self.blobstoremanager = TestBlobstoreManager() if manage_blobstore else None
def setup_test_environment(self, **kwargs):
global template_coverage_collection
@@ -936,6 +958,9 @@ def setup_test_environment(self, **kwargs):
print(" (extra pedantically)")
self.vnu = start_vnu_server()
+ if self.blobstoremanager is not None:
+ self.blobstoremanager.createTestBlobstores()
+
super(IetfTestRunner, self).setup_test_environment(**kwargs)
def teardown_test_environment(self, **kwargs):
@@ -966,6 +991,9 @@ def teardown_test_environment(self, **kwargs):
if self.vnu:
self.vnu.terminate()
+ if self.blobstoremanager is not None:
+ self.blobstoremanager.destroyTestBlobstores()
+
super(IetfTestRunner, self).teardown_test_environment(**kwargs)
def validate(self, testcase):
@@ -1220,3 +1248,39 @@ def tearDown(self):
for k, v in self.replaced_settings.items():
setattr(settings, k, v)
super().tearDown()
+
+class TestBlobstoreManager():
+ # N.B. buckets and blobstore are intentional Class-level attributes
+ buckets: set[Bucket] = set()
+
+ blobstore = boto3.resource("s3",
+ endpoint_url="http://blobstore:9000",
+ aws_access_key_id="minio_root",
+ aws_secret_access_key="minio_pass",
+ aws_session_token=None,
+ config = botocore.config.Config(signature_version="s3v4"),
+ #config=botocore.config.Config(signature_version=botocore.UNSIGNED),
+ verify=False
+ )
+
+ def createTestBlobstores(self):
+ for storagename in settings.MORE_STORAGE_NAMES:
+ bucketname = f"test-{storagename}"
+ try:
+ bucket = self.blobstore.create_bucket(Bucket=bucketname)
+ self.buckets.add(bucket)
+ except self.blobstore.meta.client.exceptions.BucketAlreadyOwnedByYou:
+ bucket = self.blobstore.Bucket(bucketname)
+ self.buckets.add(bucket)
+
+ def destroyTestBlobstores(self):
+ self.emptyTestBlobstores(destroy=True)
+
+ def emptyTestBlobstores(self, destroy=False):
+ # debug.show('f"Asked to empty test blobstores with destroy={destroy}"')
+ for bucket in self.buckets:
+ bucket.objects.delete()
+ if destroy:
+ bucket.delete()
+ if destroy:
+ self.buckets = set()
diff --git a/k8s/settings_local.py b/k8s/settings_local.py
index f266ffcd62..912607f466 100644
--- a/k8s/settings_local.py
+++ b/k8s/settings_local.py
@@ -6,7 +6,9 @@
import json
from ietf import __release_hash__
-from ietf.settings import * # pyflakes:ignore
+from ietf.settings import * # pyflakes:ignore
+from ietf.settings import STORAGES, MORE_STORAGE_NAMES, BLOBSTORAGE_CONNECT_TIMEOUT, BLOBSTORAGE_READ_TIMEOUT, BLOBSTORAGE_MAX_ATTEMPTS
+import botocore.config
def _multiline_to_list(s):
@@ -29,7 +31,7 @@ def _multiline_to_list(s):
if _SECRET_KEY is not None:
SECRET_KEY = _SECRET_KEY
else:
- raise RuntimeError("DATATRACKER_DJANGO_SECRET_KEY must be set")
+ raise RuntimeError("DATATRACKER_DJANGO_SECRET_KEY must be set")
_NOMCOM_APP_SECRET_B64 = os.environ.get("DATATRACKER_NOMCOM_APP_SECRET_B64", None)
if _NOMCOM_APP_SECRET_B64 is not None:
@@ -41,7 +43,7 @@ def _multiline_to_list(s):
if _IANA_SYNC_PASSWORD is not None:
IANA_SYNC_PASSWORD = _IANA_SYNC_PASSWORD
else:
- raise RuntimeError("DATATRACKER_IANA_SYNC_PASSWORD must be set")
+ raise RuntimeError("DATATRACKER_IANA_SYNC_PASSWORD must be set")
_RFC_EDITOR_SYNC_PASSWORD = os.environ.get("DATATRACKER_RFC_EDITOR_SYNC_PASSWORD", None)
if _RFC_EDITOR_SYNC_PASSWORD is not None:
@@ -59,25 +61,25 @@ def _multiline_to_list(s):
if _GITHUB_BACKUP_API_KEY is not None:
GITHUB_BACKUP_API_KEY = _GITHUB_BACKUP_API_KEY
else:
- raise RuntimeError("DATATRACKER_GITHUB_BACKUP_API_KEY must be set")
+ raise RuntimeError("DATATRACKER_GITHUB_BACKUP_API_KEY must be set")
_API_KEY_TYPE = os.environ.get("DATATRACKER_API_KEY_TYPE", None)
if _API_KEY_TYPE is not None:
API_KEY_TYPE = _API_KEY_TYPE
else:
- raise RuntimeError("DATATRACKER_API_KEY_TYPE must be set")
+ raise RuntimeError("DATATRACKER_API_KEY_TYPE must be set")
_API_PUBLIC_KEY_PEM_B64 = os.environ.get("DATATRACKER_API_PUBLIC_KEY_PEM_B64", None)
if _API_PUBLIC_KEY_PEM_B64 is not None:
API_PUBLIC_KEY_PEM = b64decode(_API_PUBLIC_KEY_PEM_B64)
else:
- raise RuntimeError("DATATRACKER_API_PUBLIC_KEY_PEM_B64 must be set")
+ raise RuntimeError("DATATRACKER_API_PUBLIC_KEY_PEM_B64 must be set")
_API_PRIVATE_KEY_PEM_B64 = os.environ.get("DATATRACKER_API_PRIVATE_KEY_PEM_B64", None)
if _API_PRIVATE_KEY_PEM_B64 is not None:
API_PRIVATE_KEY_PEM = b64decode(_API_PRIVATE_KEY_PEM_B64)
else:
- raise RuntimeError("DATATRACKER_API_PRIVATE_KEY_PEM_B64 must be set")
+ raise RuntimeError("DATATRACKER_API_PRIVATE_KEY_PEM_B64 must be set")
# Set DEBUG if DATATRACKER_DEBUG env var is the word "true"
DEBUG = os.environ.get("DATATRACKER_DEBUG", "false").lower() == "true"
@@ -102,7 +104,9 @@ def _multiline_to_list(s):
# Configure persistent connections. A setting of 0 is Django's default.
_conn_max_age = os.environ.get("DATATRACKER_DB_CONN_MAX_AGE", "0")
# A string "none" means unlimited age.
-DATABASES["default"]["CONN_MAX_AGE"] = None if _conn_max_age.lower() == "none" else int(_conn_max_age)
+DATABASES["default"]["CONN_MAX_AGE"] = (
+ None if _conn_max_age.lower() == "none" else int(_conn_max_age)
+)
# Enable connection health checks if DATATRACKER_DB_CONN_HEALTH_CHECK is the string "true"
_conn_health_checks = bool(
os.environ.get("DATATRACKER_DB_CONN_HEALTH_CHECKS", "false").lower() == "true"
@@ -114,9 +118,11 @@ def _multiline_to_list(s):
if _admins_str is not None:
ADMINS = [parseaddr(admin) for admin in _multiline_to_list(_admins_str)]
else:
- raise RuntimeError("DATATRACKER_ADMINS must be set")
+ raise RuntimeError("DATATRACKER_ADMINS must be set")
-USING_DEBUG_EMAIL_SERVER = os.environ.get("DATATRACKER_EMAIL_DEBUG", "false").lower() == "true"
+USING_DEBUG_EMAIL_SERVER = (
+ os.environ.get("DATATRACKER_EMAIL_DEBUG", "false").lower() == "true"
+)
EMAIL_HOST = os.environ.get("DATATRACKER_EMAIL_HOST", "localhost")
EMAIL_PORT = int(os.environ.get("DATATRACKER_EMAIL_PORT", "2025"))
@@ -126,7 +132,7 @@ def _multiline_to_list(s):
CELERY_BROKER_URL = "amqp://datatracker:{password}@{host}/{queue}".format(
host=os.environ.get("RABBITMQ_HOSTNAME", "dt-rabbitmq"),
password=_celery_password,
- queue=os.environ.get("RABBITMQ_QUEUE", "dt")
+ queue=os.environ.get("RABBITMQ_QUEUE", "dt"),
)
IANA_SYNC_USERNAME = "ietfsync"
@@ -140,10 +146,10 @@ def _multiline_to_list(s):
raise RuntimeError("DATATRACKER_REGISTRATION_API_KEY must be set")
STATS_REGISTRATION_ATTENDEES_JSON_URL = f"https://registration.ietf.org/{{number}}/attendees/?apikey={_registration_api_key}"
-#FIRST_CUTOFF_DAYS = 12
-#SECOND_CUTOFF_DAYS = 12
-#SUBMISSION_CUTOFF_DAYS = 26
-#SUBMISSION_CORRECTION_DAYS = 57
+# FIRST_CUTOFF_DAYS = 12
+# SECOND_CUTOFF_DAYS = 12
+# SUBMISSION_CUTOFF_DAYS = 26
+# SUBMISSION_CORRECTION_DAYS = 57
MEETING_MATERIALS_SUBMISSION_CUTOFF_DAYS = 26
MEETING_MATERIALS_SUBMISSION_CORRECTION_DAYS = 54
@@ -155,7 +161,7 @@ def _multiline_to_list(s):
if _MEETECHO_CLIENT_ID is not None and _MEETECHO_CLIENT_SECRET is not None:
MEETECHO_API_CONFIG = {
"api_base": os.environ.get(
- "DATATRACKER_MEETECHO_API_BASE",
+ "DATATRACKER_MEETECHO_API_BASE",
"https://meetings.conf.meetecho.com/api/v1/",
),
"client_id": _MEETECHO_CLIENT_ID,
@@ -173,7 +179,9 @@ def _multiline_to_list(s):
raise RuntimeError(
"Only one of DATATRACKER_APP_API_TOKENS_JSON and DATATRACKER_APP_API_TOKENS_JSON_B64 may be set"
)
- _APP_API_TOKENS_JSON = b64decode(os.environ.get("DATATRACKER_APP_API_TOKENS_JSON_B64"))
+ _APP_API_TOKENS_JSON = b64decode(
+ os.environ.get("DATATRACKER_APP_API_TOKENS_JSON_B64")
+ )
else:
_APP_API_TOKENS_JSON = os.environ.get("DATATRACKER_APP_API_TOKENS_JSON", None)
@@ -189,7 +197,9 @@ def _multiline_to_list(s):
# Leave DATATRACKER_MATOMO_SITE_ID unset to disable Matomo reporting
if "DATATRACKER_MATOMO_SITE_ID" in os.environ:
- MATOMO_DOMAIN_PATH = os.environ.get("DATATRACKER_MATOMO_DOMAIN_PATH", "analytics.ietf.org")
+ MATOMO_DOMAIN_PATH = os.environ.get(
+ "DATATRACKER_MATOMO_DOMAIN_PATH", "analytics.ietf.org"
+ )
MATOMO_SITE_ID = os.environ.get("DATATRACKER_MATOMO_SITE_ID")
MATOMO_DISABLE_COOKIES = True
@@ -197,9 +207,13 @@ def _multiline_to_list(s):
_SCOUT_KEY = os.environ.get("DATATRACKER_SCOUT_KEY", None)
if _SCOUT_KEY is not None:
if SERVER_MODE == "production":
- PROD_PRE_APPS = ["scout_apm.django", ]
+ PROD_PRE_APPS = [
+ "scout_apm.django",
+ ]
else:
- DEV_PRE_APPS = ["scout_apm.django", ]
+ DEV_PRE_APPS = [
+ "scout_apm.django",
+ ]
SCOUT_MONITOR = True
SCOUT_KEY = _SCOUT_KEY
SCOUT_NAME = os.environ.get("DATATRACKER_SCOUT_NAME", "Datatracker")
@@ -216,16 +230,17 @@ def _multiline_to_list(s):
STATIC_URL = os.environ.get("DATATRACKER_STATIC_URL", None)
if STATIC_URL is None:
from ietf import __version__
+
STATIC_URL = f"https://static.ietf.org/dt/{__version__}/"
# Set these to the same as "production" in settings.py, whether production mode or not
MEDIA_ROOT = "/a/www/www6s/lib/dt/media/"
-MEDIA_URL = "https://www.ietf.org/lib/dt/media/"
+MEDIA_URL = "https://www.ietf.org/lib/dt/media/"
PHOTOS_DIRNAME = "photo"
PHOTOS_DIR = MEDIA_ROOT + PHOTOS_DIRNAME
# Normally only set for debug, but needed until we have a real FS
-DJANGO_VITE_MANIFEST_PATH = os.path.join(BASE_DIR, 'static/dist-neue/manifest.json')
+DJANGO_VITE_MANIFEST_PATH = os.path.join(BASE_DIR, "static/dist-neue/manifest.json")
# Binaries that are different in the docker image
DE_GFM_BINARY = "/usr/local/bin/de-gfm"
@@ -235,6 +250,7 @@ def _multiline_to_list(s):
MEMCACHED_HOST = os.environ.get("DT_MEMCACHED_SERVICE_HOST", "127.0.0.1")
MEMCACHED_PORT = os.environ.get("DT_MEMCACHED_SERVICE_PORT", "11211")
from ietf import __version__
+
CACHES = {
"default": {
"BACKEND": "ietf.utils.cache.LenientMemcacheCache",
@@ -285,3 +301,46 @@ def _multiline_to_list(s):
# Console logs as JSON instead of plain when running in k8s
LOGGING["handlers"]["console"]["formatter"] = "json"
+
+# Configure storages for the blob store
+_blob_store_endpoint_url = os.environ.get("DATATRACKER_BLOB_STORE_ENDPOINT_URL")
+_blob_store_access_key = os.environ.get("DATATRACKER_BLOB_STORE_ACCESS_KEY")
+_blob_store_secret_key = os.environ.get("DATATRACKER_BLOB_STORE_SECRET_KEY")
+if None in (_blob_store_endpoint_url, _blob_store_access_key, _blob_store_secret_key):
+ raise RuntimeError(
+ "All of DATATRACKER_BLOB_STORE_ENDPOINT_URL, DATATRACKER_BLOB_STORE_ACCESS_KEY, "
+ "and DATATRACKER_BLOB_STORE_SECRET_KEY must be set"
+ )
+_blob_store_bucket_prefix = os.environ.get(
+ "DATATRACKER_BLOB_STORE_BUCKET_PREFIX", ""
+)
+_blob_store_enable_profiling = (
+ os.environ.get("DATATRACKER_BLOB_STORE_ENABLE_PROFILING", "false").lower() == "true"
+)
+_blob_store_max_attempts = (
+ os.environ.get("DATATRACKER_BLOB_STORE_MAX_ATTEMPTS", BLOBSTORAGE_MAX_ATTEMPTS)
+)
+_blob_store_connect_timeout = (
+ os.environ.get("DATATRACKER_BLOB_STORE_CONNECT_TIMEOUT", BLOBSTORAGE_CONNECT_TIMEOUT)
+)
+_blob_store_read_timeout = (
+ os.environ.get("DATATRACKER_BLOB_STORE_READ_TIMEOUT", BLOBSTORAGE_READ_TIMEOUT)
+)
+for storage_name in MORE_STORAGE_NAMES:
+ STORAGES[storage_name] = {
+ "BACKEND": "ietf.doc.storage_backends.CustomS3Storage",
+ "OPTIONS": dict(
+ endpoint_url=_blob_store_endpoint_url,
+ access_key=_blob_store_access_key,
+ secret_key=_blob_store_secret_key,
+ security_token=None,
+ client_config=botocore.config.Config(
+ signature_version="s3v4",
+ connect_timeout=_blob_store_connect_timeout,
+ read_timeout=_blob_store_read_timeout,
+ retries={"total_max_attempts": _blob_store_max_attempts},
+ ),
+ bucket_name=f"{_blob_store_bucket_prefix}{storage_name}".strip(),
+ ietf_log_blob_timing=_blob_store_enable_profiling,
+ ),
+ }
diff --git a/requirements.txt b/requirements.txt
index 073a6bfa0a..66a785e929 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -6,6 +6,9 @@ beautifulsoup4>=4.11.1 # Only used in tests
bibtexparser>=1.2.0 # Only used in tests
bleach>=6
types-bleach>=6
+boto3>=1.35,<1.36
+boto3-stubs[s3]>=1.35,<1.36
+botocore>=1.35,<1.36
celery>=5.2.6
coverage>=4.5.4,<5.0 # Coverage 5.x moves from a json database to SQLite. Moving to 5.x will require substantial rewrites in ietf.utils.test_runner and ietf.release.views
defusedxml>=0.7.1 # for TastyPie when using xml; not a declared dependency
@@ -21,6 +24,7 @@ django-markup>=1.5 # Limited use - need to reconcile against direct use of ma
django-oidc-provider==0.8.2 # 0.8.3 changes logout flow and claim return
django-referrer-policy>=1.0
django-simple-history>=3.0.0
+django-storages>=1.14.4
django-stubs>=4.2.7,<5 # The django-stubs version used determines the the mypy version indicated below
django-tastypie>=0.14.7,<0.15.0 # Version must be locked in sync with version of Django
django-vite>=2.0.2,<3
@@ -75,7 +79,7 @@ tblib>=1.7.0 # So that the django test runner provides tracebacks
tlds>=2022042700 # Used to teach bleach about which TLDs currently exist
tqdm>=4.64.0
Unidecode>=1.3.4
-urllib3>=2
+urllib3>=1.26,<2
weasyprint>=59
xml2rfc[pdf]>=3.23.0
xym>=0.6,<1.0
From be7ad9acabca649cd77374e3aea574dfdcebc7d8 Mon Sep 17 00:00:00 2001
From: Jennifer Richards
Date: Wed, 19 Feb 2025 21:42:07 -0400
Subject: [PATCH 064/460] ci: cast env vars to correct types (#8558)
---
k8s/settings_local.py | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/k8s/settings_local.py b/k8s/settings_local.py
index 912607f466..a0e197b7f5 100644
--- a/k8s/settings_local.py
+++ b/k8s/settings_local.py
@@ -317,13 +317,13 @@ def _multiline_to_list(s):
_blob_store_enable_profiling = (
os.environ.get("DATATRACKER_BLOB_STORE_ENABLE_PROFILING", "false").lower() == "true"
)
-_blob_store_max_attempts = (
+_blob_store_max_attempts = int(
os.environ.get("DATATRACKER_BLOB_STORE_MAX_ATTEMPTS", BLOBSTORAGE_MAX_ATTEMPTS)
)
-_blob_store_connect_timeout = (
+_blob_store_connect_timeout = float(
os.environ.get("DATATRACKER_BLOB_STORE_CONNECT_TIMEOUT", BLOBSTORAGE_CONNECT_TIMEOUT)
)
-_blob_store_read_timeout = (
+_blob_store_read_timeout = float(
os.environ.get("DATATRACKER_BLOB_STORE_READ_TIMEOUT", BLOBSTORAGE_READ_TIMEOUT)
)
for storage_name in MORE_STORAGE_NAMES:
From 2f8b9c3cfa157ed14d19b372c877eaed3beb6391 Mon Sep 17 00:00:00 2001
From: Jennifer Richards
Date: Thu, 20 Feb 2025 13:12:43 -0400
Subject: [PATCH 065/460] fix: ignore exceptions from blobstore ops (#8565)
* fix: ignore exceptions from to blobstore ops
* fix: log repr(err) instead of just err
---
ietf/doc/storage_utils.py | 67 +++++++++++++++++++++++++++------------
ietf/utils/storage.py | 6 ++--
2 files changed, 49 insertions(+), 24 deletions(-)
diff --git a/ietf/doc/storage_utils.py b/ietf/doc/storage_utils.py
index 4f0516339a..1bc2aa293c 100644
--- a/ietf/doc/storage_utils.py
+++ b/ietf/doc/storage_utils.py
@@ -8,6 +8,8 @@
from django.core.files.base import ContentFile, File
from django.core.files.storage import storages
+from ietf.utils.log import log
+
# TODO-BLOBSTORE (Future, maybe after leaving 3.9) : add a return type
def _get_storage(kind: str):
@@ -22,16 +24,22 @@ def _get_storage(kind: str):
def exists_in_storage(kind: str, name: str) -> bool:
if settings.ENABLE_BLOBSTORAGE:
- store = _get_storage(kind)
- return store.exists_in_storage(kind, name)
+ try:
+ store = _get_storage(kind)
+ return store.exists_in_storage(kind, name)
+ except Exception as err:
+ log(f"Blobstore Error: Failed to test existence of {kind}:{name}: {repr(err)}")
else:
return False
def remove_from_storage(kind: str, name: str, warn_if_missing: bool = True) -> None:
if settings.ENABLE_BLOBSTORAGE:
- store = _get_storage(kind)
- store.remove_from_storage(kind, name, warn_if_missing)
+ try:
+ store = _get_storage(kind)
+ store.remove_from_storage(kind, name, warn_if_missing)
+ except Exception as err:
+ log(f"Blobstore Error: Failed to remove {kind}:{name}: {repr(err)}")
return None
@@ -46,8 +54,11 @@ def store_file(
) -> None:
# debug.show('f"asked to store {name} into {kind}"')
if settings.ENABLE_BLOBSTORAGE:
- store = _get_storage(kind)
- store.store_file(kind, name, file, allow_overwrite, doc_name, doc_rev)
+ try:
+ store = _get_storage(kind)
+ store.store_file(kind, name, file, allow_overwrite, doc_name, doc_rev)
+ except Exception as err:
+ log(f"Blobstore Error: Failed to store file {kind}:{name}: {repr(err)}")
return None
@@ -60,7 +71,11 @@ def store_bytes(
doc_rev: Optional[str] = None,
) -> None:
if settings.ENABLE_BLOBSTORAGE:
- store_file(kind, name, ContentFile(content), allow_overwrite)
+ try:
+ store_file(kind, name, ContentFile(content), allow_overwrite)
+ except Exception as err:
+ # n.b., not likely to get an exception here because store_file or store_bytes will catch it
+ log(f"Blobstore Error: Failed to store bytes to {kind}:{name}: {repr(err)}")
return None
@@ -73,8 +88,12 @@ def store_str(
doc_rev: Optional[str] = None,
) -> None:
if settings.ENABLE_BLOBSTORAGE:
- content_bytes = content.encode("utf-8")
- store_bytes(kind, name, content_bytes, allow_overwrite)
+ try:
+ content_bytes = content.encode("utf-8")
+ store_bytes(kind, name, content_bytes, allow_overwrite)
+ except Exception as err:
+ # n.b., not likely to get an exception here because store_file or store_bytes will catch it
+ log(f"Blobstore Error: Failed to store string to {kind}:{name}: {repr(err)}")
return None
@@ -82,22 +101,28 @@ def retrieve_bytes(kind: str, name: str) -> bytes:
from ietf.doc.storage_backends import maybe_log_timing
content = b""
if settings.ENABLE_BLOBSTORAGE:
- store = _get_storage(kind)
- with store.open(name) as f:
- with maybe_log_timing(
- hasattr(store, "ietf_log_blob_timing") and store.ietf_log_blob_timing,
- "read",
- bucket_name=store.bucket_name if hasattr(store, "bucket_name") else "",
- name=name,
- ):
- content = f.read()
+ try:
+ store = _get_storage(kind)
+ with store.open(name) as f:
+ with maybe_log_timing(
+ hasattr(store, "ietf_log_blob_timing") and store.ietf_log_blob_timing,
+ "read",
+ bucket_name=store.bucket_name if hasattr(store, "bucket_name") else "",
+ name=name,
+ ):
+ content = f.read()
+ except Exception as err:
+ log(f"Blobstore Error: Failed to read bytes from {kind}:{name}: {repr(err)}")
return content
def retrieve_str(kind: str, name: str) -> str:
content = ""
if settings.ENABLE_BLOBSTORAGE:
- content_bytes = retrieve_bytes(kind, name)
- # TODO-BLOBSTORE: try to decode all the different ways doc.text() does
- content = content_bytes.decode("utf-8")
+ try:
+ content_bytes = retrieve_bytes(kind, name)
+ # TODO-BLOBSTORE: try to decode all the different ways doc.text() does
+ content = content_bytes.decode("utf-8")
+ except Exception as err:
+ log(f"Blobstore Error: Failed to read string from {kind}:{name}: {repr(err)}")
return content
diff --git a/ietf/utils/storage.py b/ietf/utils/storage.py
index 9f41f3d50f..42fcf884aa 100644
--- a/ietf/utils/storage.py
+++ b/ietf/utils/storage.py
@@ -41,13 +41,13 @@ def save(self, name, content, max_length=None):
saved_name = super().save(name, content, max_length)
if settings.ENABLE_BLOBSTORAGE:
- # Retrieve the content and write to the blob store
- blob_name = Path(saved_name).name # strips path
try:
+ # Retrieve the content and write to the blob store
+ blob_name = Path(saved_name).name # strips path
with self.open(saved_name, "rb") as f:
store_file(self.kind, blob_name, f, allow_overwrite=True)
except Exception as err:
- log(f"Failed to shadow {saved_name} at {self.kind}:{blob_name}: {err}")
+ log(f"Blobstore Error: Failed to shadow {saved_name} at {self.kind}:{blob_name}: {repr(err)}")
return saved_name # includes the path!
def deconstruct(self):
From aeba63bb41a02bab3f4ef043d5ac8d5e38e24b80 Mon Sep 17 00:00:00 2001
From: Robert Sparks
Date: Thu, 20 Feb 2025 13:13:44 -0600
Subject: [PATCH 066/460] chore: ensure proper return type (#8566)
* chore: ensure proper return type
* test: comment out (intentionally) failing assertion
---------
Co-authored-by: Jennifer Richards
---
ietf/doc/storage_utils.py | 3 +--
ietf/doc/tests_status_change.py | 5 +++--
2 files changed, 4 insertions(+), 4 deletions(-)
diff --git a/ietf/doc/storage_utils.py b/ietf/doc/storage_utils.py
index 1bc2aa293c..012efc9071 100644
--- a/ietf/doc/storage_utils.py
+++ b/ietf/doc/storage_utils.py
@@ -29,8 +29,7 @@ def exists_in_storage(kind: str, name: str) -> bool:
return store.exists_in_storage(kind, name)
except Exception as err:
log(f"Blobstore Error: Failed to test existence of {kind}:{name}: {repr(err)}")
- else:
- return False
+ return False
def remove_from_storage(kind: str, name: str, warn_if_missing: bool = True) -> None:
diff --git a/ietf/doc/tests_status_change.py b/ietf/doc/tests_status_change.py
index cbdc1a049a..da1a4f1906 100644
--- a/ietf/doc/tests_status_change.py
+++ b/ietf/doc/tests_status_change.py
@@ -564,8 +564,9 @@ def test_initial_submission(self):
ftp_filepath = Path(settings.FTP_DIR) / "status-changes" / basename
self.assertFalse(filepath.exists())
self.assertFalse(ftp_filepath.exists())
- with self.assertRaises(FileNotFoundError):
- retrieve_str("statchg",basename)
+ # TODO-BLOBSTORE: next assert is disabled because we currently suppress all exceptions
+ # with self.assertRaises(FileNotFoundError):
+ # retrieve_str("statchg",basename)
r = self.client.post(url,dict(content="Some initial review text\n",submit_response="1"))
self.assertEqual(r.status_code,302)
doc = Document.objects.get(name='status-change-imaginary-mid-review')
From 041fa83d21ce7d8320e09a2a08f9c0b7577f6fba Mon Sep 17 00:00:00 2001
From: Jennifer Richards
Date: Thu, 20 Feb 2025 16:45:22 -0400
Subject: [PATCH 067/460] chore: handle errors in app-configure-blobstore.py
(#8567)
---
docker/scripts/app-configure-blobstore.py | 18 +++++++++++++-----
1 file changed, 13 insertions(+), 5 deletions(-)
diff --git a/docker/scripts/app-configure-blobstore.py b/docker/scripts/app-configure-blobstore.py
index 7b5ce962eb..ae87bf1afe 100755
--- a/docker/scripts/app-configure-blobstore.py
+++ b/docker/scripts/app-configure-blobstore.py
@@ -2,6 +2,8 @@
# Copyright The IETF Trust 2024, All Rights Reserved
import boto3
+import botocore.config
+import botocore.exceptions
import os
import sys
@@ -16,13 +18,19 @@ def init_blobstore():
aws_secret_access_key=os.environ.get("BLOB_STORE_SECRET_KEY", "minio_pass"),
aws_session_token=None,
config=botocore.config.Config(signature_version="s3v4"),
- verify=False,
)
for bucketname in MORE_STORAGE_NAMES:
- blobstore.create_bucket(
- Bucket=f"{os.environ.get('BLOB_STORE_BUCKET_PREFIX', '')}{bucketname}".strip()
- )
-
+ try:
+ blobstore.create_bucket(
+ Bucket=f"{os.environ.get('BLOB_STORE_BUCKET_PREFIX', '')}{bucketname}".strip()
+ )
+ except botocore.exceptions.ClientError as err:
+ if err.response["Error"]["Code"] == "BucketAlreadyExists":
+ print(f"Bucket {bucketname} already exists")
+ else:
+ print(f"Error creating {bucketname}: {err.response['Error']['Code']}")
+ else:
+ print(f"Bucket {bucketname} created")
if __name__ == "__main__":
sys.exit(init_blobstore())
From fb310e5ce209ca3ce1b5e557e8beb795d9b75a02 Mon Sep 17 00:00:00 2001
From: Jennifer Richards
Date: Fri, 21 Feb 2025 11:49:16 -0400
Subject: [PATCH 068/460] feat: useful error when submission has inconsistent
date (#8576)
* chore: handle errors in app-configure-blobstore.py
* feat: sensible error for inconsistent
---
ietf/submit/utils.py | 9 +++++++--
ietf/utils/xmldraft.py | 15 ++++++++++++++-
2 files changed, 21 insertions(+), 3 deletions(-)
diff --git a/ietf/submit/utils.py b/ietf/submit/utils.py
index 3e06f45c54..61274c0116 100644
--- a/ietf/submit/utils.py
+++ b/ietf/submit/utils.py
@@ -58,7 +58,7 @@
from ietf.utils.mail import is_valid_email
from ietf.utils.text import parse_unicode, normalize_text
from ietf.utils.timezone import date_today
-from ietf.utils.xmldraft import XMLDraft
+from ietf.utils.xmldraft import InvalidMetadataError, XMLDraft
from ietf.person.name import unidecode_name
@@ -1201,6 +1201,11 @@ def process_submission_xml(filename, revision):
if not title:
raise SubmissionError("Could not extract a valid title from the XML")
+ try:
+ document_date = xml_draft.get_creation_date()
+ except InvalidMetadataError as err:
+ raise SubmissionError(str(err)) from err
+
return {
"filename": xml_draft.filename,
"rev": xml_draft.revision,
@@ -1210,7 +1215,7 @@ def process_submission_xml(filename, revision):
for auth in xml_draft.get_author_list()
],
"abstract": None, # not supported from XML
- "document_date": xml_draft.get_creation_date(),
+ "document_date": document_date,
"pages": None, # not supported from XML
"words": None, # not supported from XML
"first_two_pages": None, # not supported from XML
diff --git a/ietf/utils/xmldraft.py b/ietf/utils/xmldraft.py
index c39c4d0a06..73baf917d8 100644
--- a/ietf/utils/xmldraft.py
+++ b/ietf/utils/xmldraft.py
@@ -159,7 +159,16 @@ def parse_creation_date(date_elt):
day = today.day
else:
day = 15
- return datetime.date(year, month, day)
+ try:
+ creation_date = datetime.date(year, month, day)
+ except Exception:
+ raise InvalidMetadataError(
+ "The element in the section specified an incomplete date "
+ "that was not consistent with today's date. If you specify only a year, "
+ "it must be the four-digit current year. To use today's date, omit the "
+ "date tag or use ."
+ )
+ return creation_date
def get_creation_date(self):
return self.parse_creation_date(self.xmlroot.find("front/date"))
@@ -269,3 +278,7 @@ def parser_msgs(self):
class InvalidXMLError(Exception):
"""File is not valid XML"""
pass
+
+
+class InvalidMetadataError(Exception):
+ """XML is well-formed but has invalid metadata"""
From 183cd995aa289632f194bff8b4b9c9b32a042200 Mon Sep 17 00:00:00 2001
From: Jennifer Richards
Date: Fri, 21 Feb 2025 14:17:56 -0400
Subject: [PATCH 069/460] fix: maintain original TZID letter case (#8577)
---
ietf/meeting/tests_views.py | 15 ++++++++++++++-
ietf/templates/meeting/agenda.ics | 2 +-
2 files changed, 15 insertions(+), 2 deletions(-)
diff --git a/ietf/meeting/tests_views.py b/ietf/meeting/tests_views.py
index 848c9b7723..519f5f7c2d 100644
--- a/ietf/meeting/tests_views.py
+++ b/ietf/meeting/tests_views.py
@@ -385,7 +385,20 @@ def test_meeting_agenda(self):
assert_ical_response_is_valid(self, r)
self.assertContains(r, "BEGIN:VTIMEZONE")
self.assertContains(r, "END:VTIMEZONE")
-
+ self.assertContains(r, meeting.time_zone, msg_prefix="time_zone should appear in its original case")
+ self.assertNotEqual(
+ meeting.time_zone,
+ meeting.time_zone.lower(),
+ "meeting needs a mixed-case tz for this test",
+ )
+ self.assertNotContains(r, meeting.time_zone.lower(), msg_prefix="time_zone should not be lower-cased")
+ self.assertNotEqual(
+ meeting.time_zone,
+ meeting.time_zone.upper(),
+ "meeting needs a mixed-case tz for this test",
+ )
+ self.assertNotContains(r, meeting.time_zone.upper(), msg_prefix="time_zone should not be upper-cased")
+
# iCal, single group
r = self.client.get(ical_url + "?show=" + session.group.parent.acronym.upper())
assert_ical_response_is_valid(self, r)
diff --git a/ietf/templates/meeting/agenda.ics b/ietf/templates/meeting/agenda.ics
index 8bc8222bbe..eb83dd479a 100644
--- a/ietf/templates/meeting/agenda.ics
+++ b/ietf/templates/meeting/agenda.ics
@@ -1,4 +1,4 @@
-{% load humanize tz %}{% autoescape off %}{% timezone schedule.meeting.tz %}{% with tzname=schedule.meeting.time_zone|lower %}{% load ietf_filters textfilters %}{% load cache %}{% cache 1800 ietf_meeting_agenda_ics schedule.meeting.number request.path request.GET %}BEGIN:VCALENDAR
+{% load humanize tz %}{% autoescape off %}{% timezone schedule.meeting.tz %}{% with tzname=schedule.meeting.time_zone %}{% load ietf_filters textfilters %}{% load cache %}{% cache 1800 ietf_meeting_agenda_ics schedule.meeting.number request.path request.GET %}BEGIN:VCALENDAR
VERSION:2.0
METHOD:PUBLISH
PRODID:-//IETF//datatracker.ietf.org ical agenda//EN
From a9a8f9ba0116aea7480e83a2dfc133196cf9280f Mon Sep 17 00:00:00 2001
From: Jennifer Richards
Date: Fri, 21 Feb 2025 20:08:12 -0400
Subject: [PATCH 070/460] chore(deps): remove pydyf pin, update weasyprint req
(#8580)
* chore(deps): remove pydyf pin, update weasyprint req
* chore(deps): drop pdf extra from xml2rfc dep
This should come back when we use xml2rfc for PDF generation
---
requirements.txt | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/requirements.txt b/requirements.txt
index 66a785e929..d8b6e0742f 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -57,7 +57,7 @@ oic>=1.3 # Used only by tests
Pillow>=9.1.0
psycopg2>=2.9.6
pyang>=2.5.3
-pydyf>0.8.0,<0.10.0 # until weasyprint adjusts for 0.10.0 and later
+pydyf>0.8.0
pyflakes>=2.4.0
pyopenssl>=22.0.0 # Used by urllib3.contrib, which is used by PyQuery but not marked as a dependency
pyquery>=1.4.3
@@ -80,6 +80,6 @@ tlds>=2022042700 # Used to teach bleach about which TLDs currently exist
tqdm>=4.64.0
Unidecode>=1.3.4
urllib3>=1.26,<2
-weasyprint>=59
-xml2rfc[pdf]>=3.23.0
+weasyprint>=64.1
+xml2rfc>=3.23.0
xym>=0.6,<1.0
From cb8ef96f364c3e76d6726f12889d534ab1b49a66 Mon Sep 17 00:00:00 2001
From: Jennifer Richards
Date: Mon, 3 Mar 2025 11:21:39 -0400
Subject: [PATCH 071/460] fix: more submission date feedback; refactor xml2rfc
log capture (#8621)
* feat: catch and report any parsing error
* refactor: error handling in a more testable way
* fix: no bare `except`
* test: exception cases for test_parse_creation_date
* fix: explicitly reject non-numeric day/year
* test: suppress xml2rfc output in test
* refactor: context manager to capture xml2rfc output
* refactor: more capture_xml2rfc_output usage
* fix: capture_xml2rfc_output exception handling
---
ietf/submit/utils.py | 162 ++++++++++++++++++++---------------------
ietf/utils/tests.py | 107 ++++++++++++++++++++++++++-
ietf/utils/xmldraft.py | 66 ++++++++++++-----
3 files changed, 232 insertions(+), 103 deletions(-)
diff --git a/ietf/submit/utils.py b/ietf/submit/utils.py
index 61274c0116..a0c7dd8511 100644
--- a/ietf/submit/utils.py
+++ b/ietf/submit/utils.py
@@ -58,7 +58,7 @@
from ietf.utils.mail import is_valid_email
from ietf.utils.text import parse_unicode, normalize_text
from ietf.utils.timezone import date_today
-from ietf.utils.xmldraft import InvalidMetadataError, XMLDraft
+from ietf.utils.xmldraft import InvalidMetadataError, XMLDraft, capture_xml2rfc_output
from ietf.person.name import unidecode_name
@@ -926,105 +926,101 @@ def render_missing_formats(submission):
If a txt file already exists, leaves it in place. Overwrites an existing html file
if there is one.
"""
- # Capture stdio/stdout from xml2rfc
- xml2rfc_stdout = io.StringIO()
- xml2rfc_stderr = io.StringIO()
- xml2rfc.log.write_out = xml2rfc_stdout
- xml2rfc.log.write_err = xml2rfc_stderr
- xml_path = staging_path(submission.name, submission.rev, '.xml')
- parser = xml2rfc.XmlRfcParser(str(xml_path), quiet=True)
- try:
- # --- Parse the xml ---
- xmltree = parser.parse(remove_comments=False)
- except Exception as err:
- raise XmlRfcError(
- "Error parsing XML",
- xml2rfc_stdout=xml2rfc_stdout.getvalue(),
- xml2rfc_stderr=xml2rfc_stderr.getvalue(),
- ) from err
- # If we have v2, run it through v2v3. Keep track of the submitted version, though.
- xmlroot = xmltree.getroot()
- xml_version = xmlroot.get('version', '2')
- if xml_version == '2':
- v2v3 = xml2rfc.V2v3XmlWriter(xmltree)
+ with capture_xml2rfc_output() as xml2rfc_logs:
+ xml_path = staging_path(submission.name, submission.rev, '.xml')
+ parser = xml2rfc.XmlRfcParser(str(xml_path), quiet=True)
try:
- xmltree.tree = v2v3.convert2to3()
+ # --- Parse the xml ---
+ xmltree = parser.parse(remove_comments=False)
except Exception as err:
raise XmlRfcError(
- "Error converting v2 XML to v3",
- xml2rfc_stdout=xml2rfc_stdout.getvalue(),
- xml2rfc_stderr=xml2rfc_stderr.getvalue(),
+ "Error parsing XML",
+ xml2rfc_stdout=xml2rfc_logs["stdout"].getvalue(),
+ xml2rfc_stderr=xml2rfc_logs["stderr"].getvalue(),
) from err
-
- # --- Prep the xml ---
- today = date_today()
- prep = xml2rfc.PrepToolWriter(xmltree, quiet=True, liberal=True, keep_pis=[xml2rfc.V3_PI_TARGET])
- prep.options.accept_prepped = True
- prep.options.date = today
- try:
- xmltree.tree = prep.prep()
- except RfcWriterError:
- raise XmlRfcError(
- f"Error during xml2rfc prep: {prep.errors}",
- xml2rfc_stdout=xml2rfc_stdout.getvalue(),
- xml2rfc_stderr=xml2rfc_stderr.getvalue(),
- )
- except Exception as err:
- raise XmlRfcError(
- "Unexpected error during xml2rfc prep",
- xml2rfc_stdout=xml2rfc_stdout.getvalue(),
- xml2rfc_stderr=xml2rfc_stderr.getvalue(),
- ) from err
-
- # --- Convert to txt ---
- txt_path = staging_path(submission.name, submission.rev, '.txt')
- if not txt_path.exists():
- writer = xml2rfc.TextWriter(xmltree, quiet=True)
- writer.options.accept_prepped = True
+ # If we have v2, run it through v2v3. Keep track of the submitted version, though.
+ xmlroot = xmltree.getroot()
+ xml_version = xmlroot.get('version', '2')
+ if xml_version == '2':
+ v2v3 = xml2rfc.V2v3XmlWriter(xmltree)
+ try:
+ xmltree.tree = v2v3.convert2to3()
+ except Exception as err:
+ raise XmlRfcError(
+ "Error converting v2 XML to v3",
+ xml2rfc_stdout=xml2rfc_logs["stdout"].getvalue(),
+ xml2rfc_stderr=xml2rfc_logs["stderr"].getvalue(),
+ ) from err
+
+ # --- Prep the xml ---
+ today = date_today()
+ prep = xml2rfc.PrepToolWriter(xmltree, quiet=True, liberal=True, keep_pis=[xml2rfc.V3_PI_TARGET])
+ prep.options.accept_prepped = True
+ prep.options.date = today
+ try:
+ xmltree.tree = prep.prep()
+ except RfcWriterError:
+ raise XmlRfcError(
+ f"Error during xml2rfc prep: {prep.errors}",
+ xml2rfc_stdout=xml2rfc_logs["stdout"].getvalue(),
+ xml2rfc_stderr=xml2rfc_logs["stderr"].getvalue(),
+ )
+ except Exception as err:
+ raise XmlRfcError(
+ "Unexpected error during xml2rfc prep",
+ xml2rfc_stdout=xml2rfc_logs["stdout"].getvalue(),
+ xml2rfc_stderr=xml2rfc_logs["stderr"].getvalue(),
+ ) from err
+
+ # --- Convert to txt ---
+ txt_path = staging_path(submission.name, submission.rev, '.txt')
+ if not txt_path.exists():
+ writer = xml2rfc.TextWriter(xmltree, quiet=True)
+ writer.options.accept_prepped = True
+ writer.options.date = today
+ try:
+ writer.write(txt_path)
+ except Exception as err:
+ raise XmlRfcError(
+ "Error generating text format from XML",
+ xml2rfc_stdout=xml2rfc_logs["stdout"].getvalue(),
+ xml2rfc_stderr=xml2rfc_logs["stderr"].getvalue(),
+ ) from err
+ log.log(
+ 'In %s: xml2rfc %s generated %s from %s (version %s)' % (
+ str(xml_path.parent),
+ xml2rfc.__version__,
+ txt_path.name,
+ xml_path.name,
+ xml_version,
+ )
+ )
+ # When the blobstores become autoritative - the guard at the
+ # containing if statement needs to be based on the store
+ with Path(txt_path).open("rb") as f:
+ store_file("staging", f"{submission.name}-{submission.rev}.txt", f)
+
+ # --- Convert to html ---
+ html_path = staging_path(submission.name, submission.rev, '.html')
+ writer = xml2rfc.HtmlWriter(xmltree, quiet=True)
writer.options.date = today
try:
- writer.write(txt_path)
+ writer.write(str(html_path))
except Exception as err:
raise XmlRfcError(
- "Error generating text format from XML",
- xml2rfc_stdout=xml2rfc_stdout.getvalue(),
- xml2rfc_stderr=xml2rfc_stderr.getvalue(),
+ "Error generating HTML format from XML",
+ xml2rfc_stdout=xml2rfc_logs["stdout"].getvalue(),
+ xml2rfc_stderr=xml2rfc_logs["stderr"].getvalue(),
) from err
log.log(
'In %s: xml2rfc %s generated %s from %s (version %s)' % (
str(xml_path.parent),
xml2rfc.__version__,
- txt_path.name,
+ html_path.name,
xml_path.name,
xml_version,
)
)
- # When the blobstores become autoritative - the guard at the
- # containing if statement needs to be based on the store
- with Path(txt_path).open("rb") as f:
- store_file("staging", f"{submission.name}-{submission.rev}.txt", f)
-
- # --- Convert to html ---
- html_path = staging_path(submission.name, submission.rev, '.html')
- writer = xml2rfc.HtmlWriter(xmltree, quiet=True)
- writer.options.date = today
- try:
- writer.write(str(html_path))
- except Exception as err:
- raise XmlRfcError(
- "Error generating HTML format from XML",
- xml2rfc_stdout=xml2rfc_stdout.getvalue(),
- xml2rfc_stderr=xml2rfc_stderr.getvalue(),
- ) from err
- log.log(
- 'In %s: xml2rfc %s generated %s from %s (version %s)' % (
- str(xml_path.parent),
- xml2rfc.__version__,
- html_path.name,
- xml_path.name,
- xml_version,
- )
- )
with Path(html_path).open("rb") as f:
store_file("staging", f"{submission.name}-{submission.rev}.html", f)
diff --git a/ietf/utils/tests.py b/ietf/utils/tests.py
index 0a1986a608..2dd861cd11 100644
--- a/ietf/utils/tests.py
+++ b/ietf/utils/tests.py
@@ -23,6 +23,8 @@
from importlib import import_module
from textwrap import dedent
from tempfile import mkdtemp
+from xml2rfc import log as xml2rfc_log
+from xml2rfc.util.date import extract_date as xml2rfc_extract_date
from django.apps import apps
from django.contrib.auth.models import User
@@ -57,7 +59,7 @@
from ietf.utils.test_utils import TestCase, unicontent
from ietf.utils.text import parse_unicode
from ietf.utils.timezone import timezone_not_near_midnight
-from ietf.utils.xmldraft import XMLDraft
+from ietf.utils.xmldraft import XMLDraft, InvalidMetadataError, capture_xml2rfc_output
class SendingMail(TestCase):
@@ -544,7 +546,7 @@ def test_get_refs_v2(self):
def test_parse_creation_date(self):
# override date_today to avoid skew when test runs around midnight
today = datetime.date.today()
- with patch("ietf.utils.xmldraft.date_today", return_value=today):
+ with capture_xml2rfc_output(), patch("ietf.utils.xmldraft.date_today", return_value=today):
# Note: using a dict as a stand-in for XML elements, which rely on the get() method
self.assertEqual(
XMLDraft.parse_creation_date({"year": "2022", "month": "11", "day": "24"}),
@@ -590,6 +592,74 @@ def test_parse_creation_date(self):
),
datetime.date(today.year, 1 if today.month != 1 else 2, 15),
)
+ # Some exeception-inducing conditions
+ with self.assertRaises(
+ InvalidMetadataError,
+ msg="raise an InvalidMetadataError if a year-only date is not current",
+ ):
+ XMLDraft.parse_creation_date(
+ {
+ "year": str(today.year - 1),
+ "month": "",
+ "day": "",
+ }
+ )
+ with self.assertRaises(
+ InvalidMetadataError,
+ msg="raise an InvalidMetadataError for a non-numeric year"
+ ):
+ XMLDraft.parse_creation_date(
+ {
+ "year": "two thousand twenty-five",
+ "month": "2",
+ "day": "28",
+ }
+ )
+ with self.assertRaises(
+ InvalidMetadataError,
+ msg="raise an InvalidMetadataError for an invalid month"
+ ):
+ XMLDraft.parse_creation_date(
+ {
+ "year": "2024",
+ "month": "13",
+ "day": "28",
+ }
+ )
+ with self.assertRaises(
+ InvalidMetadataError,
+ msg="raise an InvalidMetadataError for a misspelled month"
+ ):
+ XMLDraft.parse_creation_date(
+ {
+ "year": "2024",
+ "month": "Oktobur",
+ "day": "28",
+ }
+ )
+ with self.assertRaises(
+ InvalidMetadataError,
+ msg="raise an InvalidMetadataError for an invalid day"
+ ):
+ XMLDraft.parse_creation_date(
+ {
+ "year": "2024",
+ "month": "feb",
+ "day": "31",
+ }
+ )
+ with self.assertRaises(
+ InvalidMetadataError,
+ msg="raise an InvalidMetadataError for a non-numeric day"
+ ):
+ XMLDraft.parse_creation_date(
+ {
+ "year": "2024",
+ "month": "feb",
+ "day": "twenty-four",
+ }
+ )
+
def test_parse_docname(self):
with self.assertRaises(ValueError) as cm:
@@ -671,6 +741,39 @@ def test_render_author_name(self):
"J. Q.",
)
+ def test_capture_xml2rfc_output(self):
+ """capture_xml2rfc_output reroutes and captures xml2rfc logs"""
+ orig_write_out = xml2rfc_log.write_out
+ orig_write_err = xml2rfc_log.write_err
+ with capture_xml2rfc_output() as outer_log_streams: # ensure no output
+ # such meta! very Inception!
+ with capture_xml2rfc_output() as inner_log_streams:
+ # arbitrary xml2rfc method that triggers a log, nothing special otherwise
+ xml2rfc_extract_date({"year": "fish"}, datetime.date(2025,3,1))
+ self.assertNotEqual(inner_log_streams, outer_log_streams)
+ self.assertEqual(xml2rfc_log.write_out, outer_log_streams["stdout"], "out stream should be restored")
+ self.assertEqual(xml2rfc_log.write_err, outer_log_streams["stderr"], "err stream should be restored")
+ self.assertEqual(xml2rfc_log.write_out, orig_write_out, "original out stream should be restored")
+ self.assertEqual(xml2rfc_log.write_err, orig_write_err, "original err stream should be restored")
+
+ # don't happen to get any output on stdout and not paranoid enough to force some, just test stderr
+ self.assertGreater(len(inner_log_streams["stderr"].getvalue()), 0, "want output on inner streams")
+ self.assertEqual(len(outer_log_streams["stdout"].getvalue()), 0, "no output on outer streams")
+ self.assertEqual(len(outer_log_streams["stderr"].getvalue()), 0, "no output on outer streams")
+
+ def test_capture_xml2rfc_output_exception_handling(self):
+ """capture_xml2rfc_output restores streams after an exception"""
+ orig_write_out = xml2rfc_log.write_out
+ orig_write_err = xml2rfc_log.write_err
+ with capture_xml2rfc_output() as outer_log_streams: # ensure no output
+ with self.assertRaises(RuntimeError), capture_xml2rfc_output() as inner_log_streams:
+ raise RuntimeError("nooo")
+ self.assertNotEqual(inner_log_streams, outer_log_streams)
+ self.assertEqual(xml2rfc_log.write_out, outer_log_streams["stdout"], "out stream should be restored")
+ self.assertEqual(xml2rfc_log.write_err, outer_log_streams["stderr"], "err stream should be restored")
+ self.assertEqual(xml2rfc_log.write_out, orig_write_out, "original out stream should be restored")
+ self.assertEqual(xml2rfc_log.write_err, orig_write_err, "original err stream should be restored")
+
class NameTests(TestCase):
diff --git a/ietf/utils/xmldraft.py b/ietf/utils/xmldraft.py
index 73baf917d8..3ac9a269c7 100644
--- a/ietf/utils/xmldraft.py
+++ b/ietf/utils/xmldraft.py
@@ -7,7 +7,7 @@
import debug # pyflakes: ignore
-from contextlib import ExitStack
+from contextlib import contextmanager
from lxml.etree import XMLSyntaxError
from xml2rfc.util.date import augment_date, extract_date
from ietf.utils.timezone import date_today
@@ -15,6 +15,21 @@
from .draft import Draft
+@contextmanager
+def capture_xml2rfc_output():
+ orig_write_out = xml2rfc.log.write_out
+ orig_write_err = xml2rfc.log.write_err
+ parser_out = io.StringIO()
+ parser_err = io.StringIO()
+ xml2rfc.log.write_out = parser_out
+ xml2rfc.log.write_err = parser_err
+ try:
+ yield {"stdout": parser_out, "stderr": parser_err}
+ finally:
+ xml2rfc.log.write_out = orig_write_out
+ xml2rfc.log.write_err = orig_write_err
+
+
class XMLDraft(Draft):
"""Draft from XML source
@@ -38,27 +53,18 @@ def parse_xml(filename):
Converts to xml2rfc v3 schema, then returns the root of the v3 tree and the original
xml version.
"""
- orig_write_out = xml2rfc.log.write_out
- orig_write_err = xml2rfc.log.write_err
- parser_out = io.StringIO()
- parser_err = io.StringIO()
-
- with ExitStack() as stack:
- @stack.callback
- def cleanup(): # called when context exited, even if there's an exception
- xml2rfc.log.write_out = orig_write_out
- xml2rfc.log.write_err = orig_write_err
-
- xml2rfc.log.write_out = parser_out
- xml2rfc.log.write_err = parser_err
+ with capture_xml2rfc_output() as parser_logs:
parser = xml2rfc.XmlRfcParser(filename, quiet=True)
try:
tree = parser.parse()
except XMLSyntaxError:
raise InvalidXMLError()
except Exception as e:
- raise XMLParseError(parser_out.getvalue(), parser_err.getvalue()) from e
+ raise XMLParseError(
+ parser_logs["stdout"].getvalue(),
+ parser_logs["stderr"].getvalue(),
+ ) from e
xml_version = tree.getroot().get('version', '2')
if xml_version == '2':
@@ -147,10 +153,31 @@ def get_title(self):
def parse_creation_date(date_elt):
if date_elt is None:
return None
+
today = date_today()
- # ths mimics handling of date elements in the xml2rfc text/html writers
- year, month, day = extract_date(date_elt, today)
- year, month, day = augment_date(year, month, day, today)
+
+ # Outright reject non-numeric year / day (xml2rfc's extract_date does not do this)
+ # (n.b., "year" can be non-numeric in a section per RFC 7991)
+ year = date_elt.get("year")
+ day = date_elt.get("day")
+ non_numeric_year = year and not year.isdigit()
+ non_numeric_day = day and not day.isdigit()
+ if non_numeric_day or non_numeric_year:
+ raise InvalidMetadataError(
+ "Unable to parse the element in the section: "
+ "year and day must be numeric values if specified."
+ )
+
+ try:
+ # ths mimics handling of date elements in the xml2rfc text/html writers
+ year, month, day = extract_date(date_elt, today)
+ year, month, day = augment_date(year, month, day, today)
+ except Exception as err:
+ # Give a generic error if anything goes wrong so far...
+ raise InvalidMetadataError(
+ "Unable to parse the element in the section."
+ ) from err
+
if not day:
# Must choose a day for a datetime.date. Per RFC 7991 sect 2.17, we use
# today's date if it is consistent with the rest of the date. Otherwise,
@@ -159,9 +186,12 @@ def parse_creation_date(date_elt):
day = today.day
else:
day = 15
+
try:
creation_date = datetime.date(year, month, day)
except Exception:
+ # If everything went well, we should have had a valid datetime, but we didn't.
+ # The parsing _worked_ but not in a way that we can go forward with.
raise InvalidMetadataError(
"The element in the section specified an incomplete date "
"that was not consistent with today's date. If you specify only a year, "
From 232a861f8ae52e1026d59d7088f07211acd166a5 Mon Sep 17 00:00:00 2001
From: Jennifer Richards
Date: Mon, 3 Mar 2025 14:51:14 -0400
Subject: [PATCH 072/460] chore: config gunicorn secure_scheme_headers (#8632)
* chore: config gunicorn secure_scheme_headers
* chore: typo in comment
---
dev/build/gunicorn.conf.py | 6 ++++++
1 file changed, 6 insertions(+)
diff --git a/dev/build/gunicorn.conf.py b/dev/build/gunicorn.conf.py
index 6666a0d37d..032d95ee0d 100644
--- a/dev/build/gunicorn.conf.py
+++ b/dev/build/gunicorn.conf.py
@@ -1,5 +1,11 @@
# Copyright The IETF Trust 2024, All Rights Reserved
+# Configure security scheme headers for forwarded requests. Cloudflare sets X-Forwarded-Proto
+# for us. Don't trust any of the other similar headers. Only trust the header if it's coming
+# from localhost, as all legitimate traffic will reach gunicorn via co-located nginx.
+secure_scheme_headers = {"X-FORWARDED-PROTO": "https"}
+forwarded_allow_ips = "127.0.0.1, ::1" # this is the default
+
# Log as JSON on stdout (to distinguish from Django's logs on stderr)
#
# This is applied as an update to gunicorn's glogging.CONFIG_DEFAULTS.
From 554182ef8ab33947ca8d9ee904a5d5472d3c57f8 Mon Sep 17 00:00:00 2001
From: Robert Sparks
Date: Tue, 4 Mar 2025 11:42:04 -0600
Subject: [PATCH 073/460] feat: run the docker container as dev (#8606)
* feat: run the docker container as dev
* fix: $@ -> $*
Old bug, but might as well fix it now
---------
Co-authored-by: Jennifer Richards
---
dev/celery/docker-init.sh | 13 ++++++---
docker-compose.yml | 4 ++-
docker/celery.Dockerfile | 60 +++++++++++++++++++++++++++++++++++++++
3 files changed, 72 insertions(+), 5 deletions(-)
create mode 100644 docker/celery.Dockerfile
diff --git a/dev/celery/docker-init.sh b/dev/celery/docker-init.sh
index 4fd1f1294f..9940dfd7d0 100755
--- a/dev/celery/docker-init.sh
+++ b/dev/celery/docker-init.sh
@@ -49,11 +49,16 @@ if [[ -n "${CELERY_GID}" ]]; then
fi
run_as_celery_uid () {
- SU_OPTS=()
- if [[ -n "${CELERY_GROUP}" ]]; then
- SU_OPTS+=("-g" "${CELERY_GROUP}")
+ IAM=$(whoami)
+ if [ "${IAM}" = "${CELERY_USERNAME:-root}" ]; then
+ SU_OPTS=()
+ if [[ -n "${CELERY_GROUP}" ]]; then
+ SU_OPTS+=("-g" "${CELERY_GROUP}")
+ fi
+ su "${SU_OPTS[@]}" "${CELERY_USERNAME:-root}" -s /bin/sh -c "$*"
+ else
+ /bin/sh -c "$*"
fi
- su "${SU_OPTS[@]}" "${CELERY_USERNAME:-root}" -s /bin/sh -c "$@"
}
log_term_timing_msgs () {
diff --git a/docker-compose.yml b/docker-compose.yml
index 30ce8ba4d2..9910c02a99 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -67,7 +67,9 @@ services:
restart: unless-stopped
celery:
- image: ghcr.io/ietf-tools/datatracker-celery:latest
+ build:
+ context: .
+ dockerfile: docker/celery.Dockerfile
init: true
environment:
CELERY_APP: ietf
diff --git a/docker/celery.Dockerfile b/docker/celery.Dockerfile
new file mode 100644
index 0000000000..e44200398c
--- /dev/null
+++ b/docker/celery.Dockerfile
@@ -0,0 +1,60 @@
+FROM ghcr.io/ietf-tools/datatracker-celery:latest
+LABEL maintainer="IETF Tools Team "
+
+ENV DEBIAN_FRONTEND=noninteractive
+
+# Install needed packages and setup non-root user.
+ARG USERNAME=dev
+ARG USER_UID=1000
+ARG USER_GID=$USER_UID
+COPY docker/scripts/app-setup-debian.sh /tmp/library-scripts/docker-setup-debian.sh
+RUN sed -i 's/\r$//' /tmp/library-scripts/docker-setup-debian.sh && chmod +x /tmp/library-scripts/docker-setup-debian.sh
+
+# Add Postgresql Apt Repository to get 14
+RUN echo "deb http://apt.postgresql.org/pub/repos/apt $(. /etc/os-release && echo "$VERSION_CODENAME")-pgdg main" | tee /etc/apt/sources.list.d/pgdg.list
+RUN wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add -
+
+RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
+ && apt-get install -y --no-install-recommends postgresql-client-14 pgloader \
+ # Remove imagemagick due to https://security-tracker.debian.org/tracker/CVE-2019-10131
+ && apt-get purge -y imagemagick imagemagick-6-common \
+ # Install common packages, non-root user
+ # Syntax: ./docker-setup-debian.sh [install zsh flag] [username] [user UID] [user GID] [upgrade packages flag] [install Oh My Zsh! flag] [Add non-free packages]
+ && bash /tmp/library-scripts/docker-setup-debian.sh "true" "${USERNAME}" "${USER_UID}" "${USER_GID}" "false" "true" "true"
+
+# Setup default python tools in a venv via pipx to avoid conflicts
+ENV PIPX_HOME=/usr/local/py-utils \
+ PIPX_BIN_DIR=/usr/local/py-utils/bin
+ENV PATH=${PATH}:${PIPX_BIN_DIR}
+COPY docker/scripts/app-setup-python.sh /tmp/library-scripts/docker-setup-python.sh
+RUN sed -i 's/\r$//' /tmp/library-scripts/docker-setup-python.sh && chmod +x /tmp/library-scripts/docker-setup-python.sh
+RUN bash /tmp/library-scripts/docker-setup-python.sh "none" "/usr/local" "${PIPX_HOME}" "${USERNAME}"
+
+# Remove library scripts for final image
+RUN rm -rf /tmp/library-scripts
+
+# Copy the startup file
+COPY dev/celery/docker-init.sh /docker-init.sh
+RUN sed -i 's/\r$//' /docker-init.sh && \
+ chmod +x /docker-init.sh
+
+ENTRYPOINT [ "/docker-init.sh" ]
+
+# Fix user UID / GID to match host
+RUN groupmod --gid $USER_GID $USERNAME \
+ && usermod --uid $USER_UID --gid $USER_GID $USERNAME \
+ && chown -R $USER_UID:$USER_GID /home/$USERNAME \
+ || exit 0
+
+# Switch to local dev user
+USER dev:dev
+
+# Install current datatracker python dependencies
+COPY requirements.txt /tmp/pip-tmp/
+RUN pip3 --disable-pip-version-check --no-cache-dir install --user --no-warn-script-location -r /tmp/pip-tmp/requirements.txt
+RUN pip3 --disable-pip-version-check --no-cache-dir install --user --no-warn-script-location watchdog[watchmedo]
+
+RUN sudo rm -rf /tmp/pip-tmp
+
+VOLUME [ "/assets" ]
+
From cf6340443f7437ac23ff65c981741335a0911363 Mon Sep 17 00:00:00 2001
From: Nicolas Giard
Date: Sat, 8 Mar 2025 00:56:14 -0500
Subject: [PATCH 074/460] docs: Update README.md
---
README.md | 15 +++++++++++----
1 file changed, 11 insertions(+), 4 deletions(-)
diff --git a/README.md b/README.md
index 0ece0eb03b..abebb7ca02 100644
--- a/README.md
+++ b/README.md
@@ -44,6 +44,7 @@
This project is following the standard **Git Feature Workflow** development model. Learn about all the various steps of the development workflow, from creating a fork to submitting a pull request, in the [Contributing](https://github.com/ietf-tools/.github/blob/main/CONTRIBUTING.md) guide.
+> [!TIP]
> Make sure to read the [Styleguides](https://github.com/ietf-tools/.github/blob/main/CONTRIBUTING.md#styleguides) section to ensure a cohesive code format across the project.
You can submit bug reports, enhancement and new feature requests in the [discussions](https://github.com/ietf-tools/datatracker/discussions) area. Accepted tickets will be converted to issues.
@@ -52,7 +53,8 @@ You can submit bug reports, enhancement and new feature requests in the [discuss
Click the Fork button in the top-right corner of the repository to create a personal copy that you can work on.
-> Note that some GitHub Actions might be enabled by default in your fork. You should disable them by going to **Settings** > **Actions** > **General** and selecting **Disable actions** (then Save).
+> [!NOTE]
+> Some GitHub Actions might be enabled by default in your fork. You should disable them by going to **Settings** > **Actions** > **General** and selecting **Disable actions** (then Save).
#### Git Cloning Tips
@@ -104,7 +106,8 @@ Read the [Docker Dev Environment](docker/README.md) guide to get started.
Nightly database dumps of the datatracker are available as Docker images: `ghcr.io/ietf-tools/datatracker-db:latest`
-> Note that to update the database in your dev environment to the latest version, you should run the `docker/cleandb` script.
+> [!TIP]
+> In order to update the database in your dev environment to the latest version, you should run the `docker/cleandb` script.
### Blob storage for dev/test
@@ -248,6 +251,7 @@ From a datatracker container, run the command:
./ietf/manage.py test --settings=settings_test
```
+> [!TIP]
> You can limit the run to specific tests using the `--pattern` argument.
### Frontend Tests
@@ -257,11 +261,13 @@ Frontend tests are done via Playwright. There're 2 different type of tests:
- Tests that test Vue pages / components and run natively without any external dependency.
- Tests that require a running datatracker instance to test against (usually legacy views).
+> [!IMPORTANT]
> Make sure you have Node.js 16.x or later installed on your machine.
#### Run Vue Tests
-> :warning: All commands below **MUST** be run from the `./playwright` directory, unless noted otherwise.
+> [!WARNING]
+> All commands below **MUST** be run from the `./playwright` directory, unless noted otherwise.
1. Run **once** to install dependencies on your system:
```sh
@@ -294,7 +300,8 @@ Frontend tests are done via Playwright. There're 2 different type of tests:
First, you need to start a datatracker instance (dev or prod), ideally from a docker container, exposing the 8000 port.
-> :warning: All commands below **MUST** be run from the `./playwright` directory.
+> [!WARNING]
+> All commands below **MUST** be run from the `./playwright` directory.
1. Run **once** to install dependencies on your system:
```sh
From cf21c4129a3d083980297dcaa82b5fd58bf447f6 Mon Sep 17 00:00:00 2001
From: Nicolas Giard
Date: Sat, 8 Mar 2025 00:59:26 -0500
Subject: [PATCH 075/460] docs: Update README.md
---
docker/README.md | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/docker/README.md b/docker/README.md
index 14fcc38995..f2161a173f 100644
--- a/docker/README.md
+++ b/docker/README.md
@@ -4,11 +4,12 @@
1. [Set up Docker](https://docs.docker.com/get-started/) on your preferred platform. On Windows, it is highly recommended to use the [WSL 2 *(Windows Subsystem for Linux)*](https://docs.docker.com/desktop/windows/wsl/) backend.
+> [!IMPORTANT]
> See the [IETF Tools Windows Dev guide](https://github.com/ietf-tools/.github/blob/main/docs/windows-dev.md) on how to get started when using Windows.
2. On Linux, you must [install Docker Compose manually](https://docs.docker.com/compose/install/linux/#install-the-plugin-manually) and not install Docker Desktop. On Mac and Windows install Docker Desktop which already includes Docker Compose.
-2. If you have a copy of the datatracker code checked out already, simply `cd` to the top-level directory.
+3. If you have a copy of the datatracker code checked out already, simply `cd` to the top-level directory.
If not, check out a datatracker branch as usual. We'll check out `main` below, but you can use any branch:
@@ -18,7 +19,7 @@
git checkout main
```
-3. Follow the instructions for your preferred editor:
+4. Follow the instructions for your preferred editor:
- [Visual Studio Code](#using-visual-studio-code)
- [Other Editors / Generic](#using-other-editors--generic)
@@ -189,7 +190,6 @@ The content of the source files will be copied into the target `.ics` files. Mak
Because including all assets in the image would significantly increase the file size, they are not included by default. You can however fetch them by running the **Fetch assets via rsync** task in VS Code or run manually the script `docker/scripts/app-rsync-extras.sh`
-
### Linux file permissions leaking to the host system
If on the host filesystem you have permissions that look like this,
From 06158c05c77b89434197a626d7cfcf04b8b1e14a Mon Sep 17 00:00:00 2001
From: Nicolas Giard
Date: Mon, 10 Mar 2025 16:28:20 -0400
Subject: [PATCH 076/460] chore: Remove deprecated version from
docker-compose.extend.yml
---
.devcontainer/docker-compose.extend.yml | 2 --
1 file changed, 2 deletions(-)
diff --git a/.devcontainer/docker-compose.extend.yml b/.devcontainer/docker-compose.extend.yml
index 286eefb29c..a92f42bc6d 100644
--- a/.devcontainer/docker-compose.extend.yml
+++ b/.devcontainer/docker-compose.extend.yml
@@ -1,5 +1,3 @@
-version: '3.8'
-
services:
app:
environment:
From 887ec11f3916d19da04dd939a0aa2edd697f91fd Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Sat, 15 Mar 2025 00:43:17 -0400
Subject: [PATCH 077/460] chore(deps): bump ncipollo/release-action from 1.15.0
to 1.16.0 (#8594)
Bumps [ncipollo/release-action](https://github.com/ncipollo/release-action) from 1.15.0 to 1.16.0.
- [Release notes](https://github.com/ncipollo/release-action/releases)
- [Commits](https://github.com/ncipollo/release-action/compare/v1.15.0...v1.16.0)
---
updated-dependencies:
- dependency-name: ncipollo/release-action
dependency-type: direct:production
update-type: version-update:semver-minor
...
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
.github/workflows/build.yml | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index a14ea73e9b..9f621e16b7 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -97,7 +97,7 @@ jobs:
echo "IS_RELEASE=true" >> $GITHUB_ENV
- name: Create Draft Release
- uses: ncipollo/release-action@v1.15.0
+ uses: ncipollo/release-action@v1.16.0
if: ${{ github.ref_name == 'release' }}
with:
prerelease: true
@@ -316,7 +316,7 @@ jobs:
histCoveragePath: historical-coverage.json
- name: Create Release
- uses: ncipollo/release-action@v1.15.0
+ uses: ncipollo/release-action@v1.16.0
if: ${{ env.SHOULD_DEPLOY == 'true' }}
with:
allowUpdates: true
@@ -329,7 +329,7 @@ jobs:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Update Baseline Coverage
- uses: ncipollo/release-action@v1.15.0
+ uses: ncipollo/release-action@v1.16.0
if: ${{ github.event.inputs.updateCoverage == 'true' || github.ref_name == 'release' }}
with:
allowUpdates: true
From e56c6cae272cb482a6d33f1f7e24c7ba8a48361d Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Sat, 15 Mar 2025 00:43:47 -0400
Subject: [PATCH 078/460] chore(deps): bump actions/download-artifact from
4.1.8 to 4.1.9 (#8628)
Bumps [actions/download-artifact](https://github.com/actions/download-artifact) from 4.1.8 to 4.1.9.
- [Release notes](https://github.com/actions/download-artifact/releases)
- [Commits](https://github.com/actions/download-artifact/compare/v4.1.8...v4.1.9)
---
updated-dependencies:
- dependency-name: actions/download-artifact
dependency-type: direct:production
update-type: version-update:semver-patch
...
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
.github/workflows/build.yml | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 9f621e16b7..123bd5c65a 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -185,7 +185,7 @@ jobs:
- name: Download a Coverage Results
if: ${{ github.event.inputs.skiptests == 'false' || github.ref_name == 'release' }}
- uses: actions/download-artifact@v4.1.8
+ uses: actions/download-artifact@v4.1.9
with:
name: coverage
@@ -292,7 +292,7 @@ jobs:
- name: Download Coverage Results
if: ${{ github.event.inputs.skiptests == 'false' || github.ref_name == 'release' }}
- uses: actions/download-artifact@v4.1.8
+ uses: actions/download-artifact@v4.1.9
with:
name: coverage
@@ -407,7 +407,7 @@ jobs:
- uses: actions/checkout@v4
- name: Download a Release Artifact
- uses: actions/download-artifact@v4.1.8
+ uses: actions/download-artifact@v4.1.9
with:
name: release-${{ env.PKG_VERSION }}
From 9db109f692d8592c5407ea82d2c2790eb6006263 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Sat, 15 Mar 2025 00:44:18 -0400
Subject: [PATCH 079/460] chore(deps): bump appleboy/ssh-action from 1.2.0 to
1.2.2 (#8650)
Bumps [appleboy/ssh-action](https://github.com/appleboy/ssh-action) from 1.2.0 to 1.2.2.
- [Release notes](https://github.com/appleboy/ssh-action/releases)
- [Changelog](https://github.com/appleboy/ssh-action/blob/master/.goreleaser.yaml)
- [Commits](https://github.com/appleboy/ssh-action/compare/7eaf76671a0d7eec5d98ee897acda4f968735a17...2ead5e36573f08b82fbfce1504f1a4b05a647c6f)
---
updated-dependencies:
- dependency-name: appleboy/ssh-action
dependency-type: direct:production
update-type: version-update:semver-patch
...
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
.github/workflows/tests-az.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/tests-az.yml b/.github/workflows/tests-az.yml
index 6d53a121aa..8553563a19 100644
--- a/.github/workflows/tests-az.yml
+++ b/.github/workflows/tests-az.yml
@@ -38,7 +38,7 @@ jobs:
ssh-keyscan -t rsa $vminfo >> ~/.ssh/known_hosts
- name: Remote SSH into VM
- uses: appleboy/ssh-action@7eaf76671a0d7eec5d98ee897acda4f968735a17
+ uses: appleboy/ssh-action@2ead5e36573f08b82fbfce1504f1a4b05a647c6f
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
From 227b44bfa25036e7d4fea86a72405beb4e53e2ce Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Sat, 15 Mar 2025 00:45:42 -0400
Subject: [PATCH 080/460] chore(deps): bump nanoid (#8649)
Bumps the npm group with 1 update in the /dev/deploy-to-container directory: [nanoid](https://github.com/ai/nanoid).
Updates `nanoid` from 5.0.9 to 5.1.3
- [Release notes](https://github.com/ai/nanoid/releases)
- [Changelog](https://github.com/ai/nanoid/blob/main/CHANGELOG.md)
- [Commits](https://github.com/ai/nanoid/compare/5.0.9...5.1.3)
---
updated-dependencies:
- dependency-name: nanoid
dependency-type: direct:production
update-type: version-update:semver-minor
dependency-group: npm
...
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
dev/deploy-to-container/package-lock.json | 15 ++++++++-------
dev/deploy-to-container/package.json | 2 +-
2 files changed, 9 insertions(+), 8 deletions(-)
diff --git a/dev/deploy-to-container/package-lock.json b/dev/deploy-to-container/package-lock.json
index 03327083b1..c4f675527e 100644
--- a/dev/deploy-to-container/package-lock.json
+++ b/dev/deploy-to-container/package-lock.json
@@ -8,7 +8,7 @@
"dependencies": {
"dockerode": "^4.0.4",
"fs-extra": "^11.3.0",
- "nanoid": "5.0.9",
+ "nanoid": "5.1.3",
"nanoid-dictionary": "5.0.0-beta.1",
"slugify": "1.6.6",
"tar": "^7.4.3",
@@ -668,15 +668,16 @@
"optional": true
},
"node_modules/nanoid": {
- "version": "5.0.9",
- "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.0.9.tgz",
- "integrity": "sha512-Aooyr6MXU6HpvvWXKoVoXwKMs/KyVakWwg7xQfv5/S/RIgJMy0Ifa45H9qqYy7pTCszrHzP21Uk4PZq2HpEM8Q==",
+ "version": "5.1.3",
+ "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.1.3.tgz",
+ "integrity": "sha512-zAbEOEr7u2CbxwoMRlz/pNSpRP0FdAU4pRaYunCdEezWohXFs+a0Xw7RfkKaezMsmSM1vttcLthJtwRnVtOfHQ==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/ai"
}
],
+ "license": "MIT",
"bin": {
"nanoid": "bin/nanoid.js"
},
@@ -1612,9 +1613,9 @@
"optional": true
},
"nanoid": {
- "version": "5.0.9",
- "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.0.9.tgz",
- "integrity": "sha512-Aooyr6MXU6HpvvWXKoVoXwKMs/KyVakWwg7xQfv5/S/RIgJMy0Ifa45H9qqYy7pTCszrHzP21Uk4PZq2HpEM8Q=="
+ "version": "5.1.3",
+ "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.1.3.tgz",
+ "integrity": "sha512-zAbEOEr7u2CbxwoMRlz/pNSpRP0FdAU4pRaYunCdEezWohXFs+a0Xw7RfkKaezMsmSM1vttcLthJtwRnVtOfHQ=="
},
"nanoid-dictionary": {
"version": "5.0.0-beta.1",
diff --git a/dev/deploy-to-container/package.json b/dev/deploy-to-container/package.json
index 1f54745ebf..8772c568c8 100644
--- a/dev/deploy-to-container/package.json
+++ b/dev/deploy-to-container/package.json
@@ -4,7 +4,7 @@
"dependencies": {
"dockerode": "^4.0.4",
"fs-extra": "^11.3.0",
- "nanoid": "5.0.9",
+ "nanoid": "5.1.3",
"nanoid-dictionary": "5.0.0-beta.1",
"slugify": "1.6.6",
"tar": "^7.4.3",
From 968820de34bdf6d0c1fd69b921ed490da725b941 Mon Sep 17 00:00:00 2001
From: Jennifer Richards
Date: Sat, 15 Mar 2025 14:45:04 +0700
Subject: [PATCH 081/460] feat: celery task + admin to resend Messages (#8661)
* feat: Message re-send task
* feat: admin action to queue redelivery
* feat: MessageAdmin list_filters
* feat: show sent status
* feat: better date filtering
* chore: remove send-by-date task
Adds complexity and risk - the improved Messages admin lets us do most of what it did without the opportunity for accidentally resending huge ranges
* chore: fill in empty docstring
* style: black
* fix: unused import
* feat: better logging
* chore: mypy lint
* test: test retry_send_messages_by_pk_task
* test: test retry_send_messages
---
ietf/message/admin.py | 79 +++++++++++++++++++++++++++++++---
ietf/message/tasks.py | 24 ++++++++++-
ietf/message/tests.py | 59 +++++++++++++++++++++++--
ietf/message/utils.py | 36 ++++++++++++++--
ietf/settings.py | 1 +
ietf/templates/admin/base.html | 1 +
requirements.txt | 1 +
7 files changed, 187 insertions(+), 14 deletions(-)
diff --git a/ietf/message/admin.py b/ietf/message/admin.py
index c2564c04b9..250e1eb596 100644
--- a/ietf/message/admin.py
+++ b/ietf/message/admin.py
@@ -1,32 +1,99 @@
-from django.contrib import admin
+# Copyright The IETF Trust 2012-2025, All Rights Reserved
+from django.contrib import admin, messages
+from django.db.models import QuerySet
+from rangefilter.filters import DateRangeQuickSelectListFilterBuilder
from ietf.message.models import Message, MessageAttachment, SendQueue, AnnouncementFrom
+from ietf.message.tasks import retry_send_messages_by_pk_task
+
+
+class MessageSentStatusListFilter(admin.SimpleListFilter):
+ """Filter Messages by whether or not they were sent"""
+
+ title = "status"
+ parameter_name = "status"
+
+ def lookups(self, request, model_admin):
+ return [
+ ("sent", "Sent"),
+ ("unsent", "Not sent"),
+ ]
+
+ def queryset(self, request, queryset):
+ if self.value() == "unsent":
+ return queryset.filter(sent__isnull=True)
+ elif self.value() == "sent":
+ return queryset.filter(sent__isnull=False)
+
class MessageAdmin(admin.ModelAdmin):
- list_display = ["subject", "by", "time", "groups"]
+ list_display = ["sent_status", "subject", "by", "time", "groups"]
search_fields = ["subject", "body"]
raw_id_fields = ["by", "related_groups", "related_docs"]
+ list_filter = [
+ MessageSentStatusListFilter,
+ ("time", DateRangeQuickSelectListFilterBuilder()),
+ ]
ordering = ["-time"]
+ actions = ["retry_send"]
def groups(self, instance):
return ", ".join(g.acronym for g in instance.related_groups.all())
+
+ @admin.display(description="Sent", boolean=True)
+ def sent_status(self, instance):
+ return instance.sent is not None
+
+ @admin.action(description="Send selected messages if unsent")
+ def retry_send(self, request, queryset: QuerySet[Message]):
+ try:
+ retry_send_messages_by_pk_task.delay(
+ message_pks=list(queryset.values_list("pk", flat=True)),
+ resend=False,
+ )
+ except Exception as err:
+ self.message_user(
+ request,
+ f"Error: {repr(err)}",
+ messages.ERROR,
+ )
+ else:
+ self.message_user(request, "Messages queued for delivery", messages.SUCCESS)
+
+
admin.site.register(Message, MessageAdmin)
+
class MessageAttachmentAdmin(admin.ModelAdmin):
- list_display = ['id', 'message', 'filename', 'removed',]
- raw_id_fields = ['message']
+ list_display = [
+ "id",
+ "message",
+ "filename",
+ "removed",
+ ]
+ raw_id_fields = ["message"]
+
+
admin.site.register(MessageAttachment, MessageAttachmentAdmin)
+
class SendQueueAdmin(admin.ModelAdmin):
list_display = ["time", "by", "message", "send_at", "sent_at"]
list_filter = ["time", "send_at", "sent_at"]
search_fields = ["message__body"]
raw_id_fields = ["by", "message"]
ordering = ["-time"]
+
+
admin.site.register(SendQueue, SendQueueAdmin)
+
class AnnouncementFromAdmin(admin.ModelAdmin):
- list_display = ['name', 'group', 'address', ]
-admin.site.register(AnnouncementFrom, AnnouncementFromAdmin)
+ list_display = [
+ "name",
+ "group",
+ "address",
+ ]
+admin.site.register(AnnouncementFrom, AnnouncementFromAdmin)
diff --git a/ietf/message/tasks.py b/ietf/message/tasks.py
index efd776b9d8..1fdff7bea4 100644
--- a/ietf/message/tasks.py
+++ b/ietf/message/tasks.py
@@ -5,8 +5,8 @@
from celery import shared_task
from smtplib import SMTPException
-from ietf.message.utils import send_scheduled_message_from_send_queue
-from ietf.message.models import SendQueue
+from ietf.message.utils import send_scheduled_message_from_send_queue, retry_send_messages
+from ietf.message.models import SendQueue, Message
from ietf.utils import log
from ietf.utils.mail import log_smtp_exception, send_error_email
@@ -25,3 +25,23 @@ def send_scheduled_mail_task():
except SMTPException as e:
log_smtp_exception(e)
send_error_email(e)
+
+
+@shared_task
+def retry_send_messages_by_pk_task(message_pks: list, resend=False):
+ """Task to retry sending Messages by PK
+
+ Sends Messages whose PK is included in the list.
+ Only previously unsent messages are sent unless `resend` is true.
+ """
+ log.log(
+ "retry_send_messages_by_pk_task: "
+ "retrying send of Message PKs [{}] (resend={})".format(
+ ", ".join(str(pk) for pk in message_pks),
+ resend,
+ )
+ )
+ retry_send_messages(
+ messages=Message.objects.filter(pk__in=message_pks),
+ resend=resend,
+ )
diff --git a/ietf/message/tests.py b/ietf/message/tests.py
index 7fbd29167c..a677d5477e 100644
--- a/ietf/message/tests.py
+++ b/ietf/message/tests.py
@@ -11,10 +11,10 @@
import debug # pyflakes:ignore
from ietf.group.factories import GroupFactory
-from ietf.message.factories import SendQueueFactory
+from ietf.message.factories import MessageFactory, SendQueueFactory
from ietf.message.models import Message, SendQueue
-from ietf.message.tasks import send_scheduled_mail_task
-from ietf.message.utils import send_scheduled_message_from_send_queue
+from ietf.message.tasks import send_scheduled_mail_task, retry_send_messages_by_pk_task
+from ietf.message.utils import send_scheduled_message_from_send_queue, retry_send_messages
from ietf.person.models import Person
from ietf.utils.mail import outbox, send_mail_text, send_mail_message, get_payload_text
from ietf.utils.test_utils import TestCase
@@ -133,6 +133,44 @@ def test_send_mime_announcement(self):
self.assertTrue(SendQueue.objects.get(id=q.id).sent_at)
+class UtilsTests(TestCase):
+ @mock.patch("ietf.message.utils.send_mail_message")
+ def test_retry_send_messages(self, mock_send_mail_message):
+ sent_message = MessageFactory(sent=timezone.now())
+ unsent_messages = MessageFactory.create_batch(2, sent=None)
+
+ # Send the sent message and one of the unsent messages
+ retry_send_messages(
+ Message.objects.filter(pk__in=[
+ sent_message.pk,
+ unsent_messages[0].pk,
+ ]),
+ resend=False,
+ )
+ self.assertEqual(mock_send_mail_message.call_count, 1)
+ self.assertEqual(
+ mock_send_mail_message.call_args.args[1],
+ unsent_messages[0],
+ )
+
+ mock_send_mail_message.reset_mock()
+ # Once again, send the sent message and one of the unsent messages
+ # (we can use the same one because our mock prevented it from having
+ # its status updated to sent)
+ retry_send_messages(
+ Message.objects.filter(pk__in=[
+ sent_message.pk,
+ unsent_messages[0].pk,
+ ]),
+ resend=True,
+ )
+ self.assertEqual(mock_send_mail_message.call_count, 2)
+ self.assertCountEqual(
+ [call_args.args[1] for call_args in mock_send_mail_message.call_args_list],
+ [sent_message, unsent_messages[0]],
+ )
+
+
class TaskTests(TestCase):
@mock.patch("ietf.message.tasks.log_smtp_exception")
@mock.patch("ietf.message.tasks.send_scheduled_message_from_send_queue")
@@ -150,3 +188,18 @@ def test_send_scheduled_mail_task(self, mock_send_message, mock_log_smtp_excepti
self.assertEqual(mock_send_message.call_count, 1)
self.assertEqual(mock_send_message.call_args[0], (not_yet_sent,))
self.assertTrue(mock_log_smtp_exception.called)
+
+ @mock.patch("ietf.message.tasks.retry_send_messages")
+ def test_retry_send_messages_by_pk_task(self, mock_retry_send):
+ msgs = MessageFactory.create_batch(3)
+ MessageFactory() # an extra message that won't be resent
+
+ retry_send_messages_by_pk_task([msg.pk for msg in msgs], resend=False)
+ called_with_messages = mock_retry_send.call_args.kwargs["messages"]
+ self.assertCountEqual(msgs, called_with_messages)
+ self.assertFalse(mock_retry_send.call_args.kwargs["resend"])
+
+ retry_send_messages_by_pk_task([msg.pk for msg in msgs], resend=True)
+ called_with_messages = mock_retry_send.call_args.kwargs["messages"]
+ self.assertCountEqual(msgs, called_with_messages)
+ self.assertTrue(mock_retry_send.call_args.kwargs["resend"])
diff --git a/ietf/message/utils.py b/ietf/message/utils.py
index 2601eccab8..74448ca7c9 100644
--- a/ietf/message/utils.py
+++ b/ietf/message/utils.py
@@ -1,13 +1,17 @@
# Copyright The IETF Trust 2012-2020, All Rights Reserved
# -*- coding: utf-8 -*-
+import email
+import email.utils
+import re
+import smtplib
-import re, email
-
+from django.db.models import QuerySet
from django.utils import timezone
from django.utils.encoding import force_str
-from ietf.utils.mail import send_mail_text, send_mail_mime
+from ietf.utils import log
+from ietf.utils.mail import send_mail_text, send_mail_mime, send_mail_message
from ietf.message.models import Message
first_dot_on_line_re = re.compile(r'^\.', re.MULTILINE)
@@ -58,3 +62,29 @@ def send_scheduled_message_from_send_queue(queue_item):
queue_item.message.sent = queue_item.sent_at
queue_item.message.save()
+
+
+def retry_send_messages(messages: QuerySet[Message], resend=False):
+ """Attempt delivery of Messages"""
+ if not resend:
+ # only include sent messages on explicit request
+ for already_sent in messages.filter(sent__isnull=False):
+ assert already_sent.sent is not None # appease mypy type checking
+ log.log(
+ f"retry_send_messages: skipping {already_sent.pk} "
+ f"(already sent {already_sent.sent.isoformat(timespec='milliseconds')})"
+ )
+ messages = messages.filter(sent__isnull=True)
+ for msg in messages:
+ to = ",".join(a[1] for a in email.utils.getaddresses([msg.to]))
+ try:
+ send_mail_message(None, msg)
+ log.log(
+ f'retry_send_messages: '
+ f'sent {msg.pk} {msg.frm} -> {to} "{msg.subject.strip()}"'
+ )
+ except smtplib.SMTPException as e:
+ log.log(
+ f'retry_send_messages: '
+ f'Failure {e}: {msg.pk} {msg.frm} -> {to} "{msg.subject.strip()}"'
+ )
diff --git a/ietf/settings.py b/ietf/settings.py
index faee42237c..1fe5f48229 100644
--- a/ietf/settings.py
+++ b/ietf/settings.py
@@ -465,6 +465,7 @@ def skip_unreadable_post(record):
'drf_spectacular',
'drf_standardized_errors',
'rest_framework',
+ 'rangefilter',
'simple_history',
'tastypie',
'widget_tweaks',
diff --git a/ietf/templates/admin/base.html b/ietf/templates/admin/base.html
index 9ca7377a54..d48891dfc4 100644
--- a/ietf/templates/admin/base.html
+++ b/ietf/templates/admin/base.html
@@ -20,6 +20,7 @@
--header-color: var(--bs-secondary);
--breadcrumbs-fg: var(--bs-secondary);
--breadcrumbs-link-fg: var(--link-fg);
+ .calendar caption { background-color: var(--secondary);}
}
span.text-danger { color: var(--bs-danger); }
diff --git a/requirements.txt b/requirements.txt
index d8b6e0742f..8bd906c220 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -13,6 +13,7 @@ celery>=5.2.6
coverage>=4.5.4,<5.0 # Coverage 5.x moves from a json database to SQLite. Moving to 5.x will require substantial rewrites in ietf.utils.test_runner and ietf.release.views
defusedxml>=0.7.1 # for TastyPie when using xml; not a declared dependency
Django>4.2,<5
+django-admin-rangefilter>=0.13.2
django-analytical>=3.1.0
django-bootstrap5>=21.3
django-celery-beat>=2.3.0
From 48211414dfc992e4a51f0d2de9183eab5c44dad0 Mon Sep 17 00:00:00 2001
From: rjsparks <10996692+rjsparks@users.noreply.github.com>
Date: Sat, 15 Mar 2025 07:58:06 +0000
Subject: [PATCH 082/460] ci: update base image target version to 20250315T0745
---
dev/build/Dockerfile | 2 +-
dev/build/TARGET_BASE | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/dev/build/Dockerfile b/dev/build/Dockerfile
index 852ba43c80..2b02a091c5 100644
--- a/dev/build/Dockerfile
+++ b/dev/build/Dockerfile
@@ -1,4 +1,4 @@
-FROM ghcr.io/ietf-tools/datatracker-app-base:20250128T1728
+FROM ghcr.io/ietf-tools/datatracker-app-base:20250315T0745
LABEL maintainer="IETF Tools Team "
ENV DEBIAN_FRONTEND=noninteractive
diff --git a/dev/build/TARGET_BASE b/dev/build/TARGET_BASE
index fbc9426744..e6f490b168 100644
--- a/dev/build/TARGET_BASE
+++ b/dev/build/TARGET_BASE
@@ -1 +1 @@
-20250128T1728
+20250315T0745
From 200d2bd164224694c0524e734ea6e46b62f3998a Mon Sep 17 00:00:00 2001
From: Russ Housley
Date: Sat, 15 Mar 2025 04:23:35 -0400
Subject: [PATCH 083/460] fix(review_info template): display correct date for
I-D to be reviewed
---
ietf/templates/doc/review/request_info.html | 7 +++++--
1 file changed, 5 insertions(+), 2 deletions(-)
diff --git a/ietf/templates/doc/review/request_info.html b/ietf/templates/doc/review/request_info.html
index ee46916b43..9ad126d59e 100644
--- a/ietf/templates/doc/review/request_info.html
+++ b/ietf/templates/doc/review/request_info.html
@@ -96,7 +96,7 @@
{% endif %}
- {% if doc.time %}
+ {% if review_req.doc.time %}
{% endif %}
+ {% endfor %}
{% endblock %}
{% block js %}
From 73abdcc29798850a86ddeabdd65b824adcc62eb6 Mon Sep 17 00:00:00 2001
From: Rudi Matz
Date: Tue, 18 Mar 2025 22:25:10 -0400
Subject: [PATCH 091/460] feat(agenda): add preliminary date when no agenda
available (#8690)
* feat(meetings): add preliminary agenda date when no agenda available
* test: adapt test for additional field for preliminary agenda date
---------
Co-authored-by: Rudi Matz
---
client/agenda/AgendaScheduleList.vue | 1 +
ietf/meeting/tests_views.py | 5 +++--
ietf/meeting/views.py | 8 ++++++--
3 files changed, 10 insertions(+), 4 deletions(-)
diff --git a/client/agenda/AgendaScheduleList.vue b/client/agenda/AgendaScheduleList.vue
index ab0f6e0184..369025d5da 100644
--- a/client/agenda/AgendaScheduleList.vue
+++ b/client/agenda/AgendaScheduleList.vue
@@ -15,6 +15,7 @@
td(:colspan='pickerModeActive ? 6 : 5')
i.bi.bi-exclamation-triangle.me-2
span(v-if='agendaStore.searchVisible && agendaStore.searchText') No event matching your search query.
+ span(v-else-if='agendaStore.meeting.prelimAgendaDate') A preliminary agenda is expected to be released on {{ agendaStore.meeting.prelimAgendaDate }}
span(v-else) Nothing to display
tr(
v-for='item of meetingEvents'
diff --git a/ietf/meeting/tests_views.py b/ietf/meeting/tests_views.py
index 111584cc56..0f91986f77 100644
--- a/ietf/meeting/tests_views.py
+++ b/ietf/meeting/tests_views.py
@@ -233,6 +233,7 @@ def test_meeting_agenda(self):
session.save()
slot = TimeSlot.objects.get(sessionassignments__session=session,sessionassignments__schedule=meeting.schedule)
meeting.timeslot_set.filter(type_id="break").update(show_location=False)
+ meeting.importantdate_set.create(name_id='prelimagenda',date=date_today() + datetime.timedelta(days=20))
#
self.write_materials_files(meeting, session)
#
@@ -262,7 +263,8 @@ def test_meeting_agenda(self):
"updated": generated_data.get("meeting").get("updated"), # Just expect the value to exist
"timezone": meeting.time_zone,
"infoNote": meeting.agenda_info_note,
- "warningNote": meeting.agenda_warning_note
+ "warningNote": meeting.agenda_warning_note,
+ "prelimAgendaDate": (date_today() + datetime.timedelta(days=20)).isoformat()
},
"categories": generated_data.get("categories"), # Just expect the value to exist
"isCurrentMeeting": True,
@@ -9341,4 +9343,3 @@ def test_bluesheet_data(self):
{"name": attended_with_affil.person.plain_name(), "affiliation": "Somewhere"},
]
)
-
diff --git a/ietf/meeting/views.py b/ietf/meeting/views.py
index d02ae40902..6a73059d92 100644
--- a/ietf/meeting/views.py
+++ b/ietf/meeting/views.py
@@ -59,7 +59,7 @@
from ietf.ietfauth.utils import role_required, has_role, user_is_person
from ietf.mailtrigger.utils import gather_address_lists
from ietf.meeting.models import Meeting, Session, Schedule, FloorPlan, SessionPresentation, TimeSlot, SlideSubmission, Attended
-from ietf.meeting.models import SessionStatusName, SchedulingEvent, SchedTimeSessAssignment, Room, TimeSlotTypeName
+from ietf.meeting.models import ImportantDate, SessionStatusName, SchedulingEvent, SchedTimeSessAssignment, Room, TimeSlotTypeName
from ietf.meeting.forms import ( CustomDurationField, SwapDaysForm, SwapTimeslotsForm, ImportMinutesForm,
TimeSlotCreateForm, TimeSlotEditForm, SessionCancelForm, SessionEditForm )
from ietf.meeting.helpers import get_person_by_email, get_schedule_by_name
@@ -1709,6 +1709,9 @@ def generate_agenda_data(num=None, force_refresh=False):
# Get Floor Plans
floors = FloorPlan.objects.filter(meeting=meeting).order_by('order')
+ # Get Preliminary Agenda Date
+ prelimAgendaDate = ImportantDate.objects.filter(name_id="prelimagenda", meeting=meeting).first()
+
result = {
"meeting": {
"number": schedule.meeting.number,
@@ -1718,7 +1721,8 @@ def generate_agenda_data(num=None, force_refresh=False):
"updated": updated,
"timezone": meeting.time_zone,
"infoNote": schedule.meeting.agenda_info_note,
- "warningNote": schedule.meeting.agenda_warning_note
+ "warningNote": schedule.meeting.agenda_warning_note,
+ "prelimAgendaDate": prelimAgendaDate.date.isoformat() if prelimAgendaDate else ""
},
"categories": filter_organizer.get_filter_categories(),
"isCurrentMeeting": is_current_meeting,
From 3356505dc89e2a372f54c3ed1da472c3a11bc6de Mon Sep 17 00:00:00 2001
From: Matthew Holloway
Date: Wed, 19 Mar 2025 15:28:17 +1300
Subject: [PATCH 092/460] fix(agenda): Agenda 'now' fallback to next event if
there is no current event (#8693)
* fix: agenda now fallback to next event if there is no current event
* chore: agenda goto now PR feedback
* chore: simplifying 'agenda goto now' next event logic
* chore: simplifying 'agenda goto now' nextEvent var
---
client/agenda/AgendaMobileBar.vue | 8 ++++----
client/agenda/AgendaQuickAccess.vue | 8 ++++----
client/agenda/AgendaScheduleList.vue | 16 ++++++++++------
client/agenda/store.js | 22 ++++++++++++++++++++++
4 files changed, 40 insertions(+), 14 deletions(-)
diff --git a/client/agenda/AgendaMobileBar.vue b/client/agenda/AgendaMobileBar.vue
index 63611e21c2..43480bedd3 100644
--- a/client/agenda/AgendaMobileBar.vue
+++ b/client/agenda/AgendaMobileBar.vue
@@ -124,11 +124,11 @@ const downloadIcsOptions = [
function jumpToDay (dayId) {
if (dayId === 'now') {
- const lastEventId = agendaStore.findCurrentEventId()
- if (lastEventId) {
- document.getElementById(`agenda-rowid-${lastEventId}`)?.scrollIntoView(true)
+ const nowEventId = agendaStore.findNowEventId()
+ if (nowEventId) {
+ document.getElementById(`agenda-rowid-${nowEventId}`)?.scrollIntoView(true)
} else {
- message.warning('There is no event happening right now.')
+ message.warning('There is no event happening right now or in the future.')
}
} else {
document.getElementById(dayId)?.scrollIntoView(true)
diff --git a/client/agenda/AgendaQuickAccess.vue b/client/agenda/AgendaQuickAccess.vue
index b226d09c60..c9412f6663 100644
--- a/client/agenda/AgendaQuickAccess.vue
+++ b/client/agenda/AgendaQuickAccess.vue
@@ -204,12 +204,12 @@ function scrollToDay (daySlug, ev) {
}
function scrollToNow (ev) {
- const lastEventId = agendaStore.findCurrentEventId()
+ const nowEventId = agendaStore.findNowEventId()
- if (lastEventId) {
- document.getElementById(`agenda-rowid-${lastEventId}`)?.scrollIntoView(true)
+ if (nowEventId) {
+ document.getElementById(`agenda-rowid-${nowEventId}`)?.scrollIntoView(true)
} else {
- message.warning('There is no event happening right now.')
+ message.warning('There is no event happening right now or in the future.')
}
}
diff --git a/client/agenda/AgendaScheduleList.vue b/client/agenda/AgendaScheduleList.vue
index 369025d5da..5a971c25cf 100644
--- a/client/agenda/AgendaScheduleList.vue
+++ b/client/agenda/AgendaScheduleList.vue
@@ -591,10 +591,10 @@ function renderLinkLabel (opt) {
function recalculateRedLine () {
state.currentMinute = DateTime.local().minute
- const lastEventId = agendaStore.findCurrentEventId()
+ const currentEventId = agendaStore.findCurrentEventId()
- if (lastEventId) {
- state.redhandOffset = document.getElementById(`agenda-rowid-${lastEventId}`)?.offsetTop || 0
+ if (currentEventId) {
+ state.redhandOffset = document.getElementById(`agenda-rowid-${currentEventId}`)?.offsetTop || 0
} else {
state.redhandOffset = 0
}
@@ -615,9 +615,13 @@ function recalculateRedLine () {
return
}
unsubscribe() // we only need to scroll once, so unsubscribe from future updates
- if(window.location.hash === "#now") {
- const lastEventId = agendaStore.findCurrentEventId()
- document.getElementById(`agenda-rowid-${lastEventId}`)?.scrollIntoView(true)
+ if (window.location.hash === "#now") {
+ const nowEventId = agendaStore.findNowEvent()
+ if (nowEventId) {
+ document.getElementById(`agenda-rowid-${nowEventId}`)?.scrollIntoView(true)
+ } else {
+ message.warning('There is no event happening right now or in the future.')
+ }
} else if(window.location.hash.startsWith(`#${daySlugPrefix}`)) {
document.getElementById(window.location.hash.substring(1))?.scrollIntoView(true)
}
diff --git a/client/agenda/store.js b/client/agenda/store.js
index 71c1219725..359c5fbf05 100644
--- a/client/agenda/store.js
+++ b/client/agenda/store.js
@@ -230,6 +230,28 @@ export const useAgendaStore = defineStore('agenda', {
return lastEvent.id || null
},
+ findNowEventId () {
+ const currentEventId = this.findCurrentEventId()
+
+ if (currentEventId) {
+ return currentEventId
+ }
+
+ // if there isn't a current event then instead find the next event
+
+ const current = (this.nowDebugDiff ? DateTime.local().minus(this.nowDebugDiff) : DateTime.local()).setZone(this.timezone)
+
+ // -> Find next event after current time
+ let nextEventId = undefined
+ for(const sh of this.scheduleAdjusted) {
+ if (sh.adjustedStart > current) {
+ nextEventId = sh.id
+ break
+ }
+ }
+
+ return nextEventId || null
+ },
hideLoadingScreen () {
// -> Hide loading screen
const loadingRef = document.querySelector('#app-loading')
From c3a296fdb559b015f8e4d457f56fea137e10cf37 Mon Sep 17 00:00:00 2001
From: Nicolas Giard
Date: Wed, 19 Mar 2025 04:56:59 -0400
Subject: [PATCH 093/460] fix(agenda): handle calendar view events that spread
across multiple days (#8685)
---
client/agenda/AgendaScheduleCalendar.vue | 8 ++++++--
1 file changed, 6 insertions(+), 2 deletions(-)
diff --git a/client/agenda/AgendaScheduleCalendar.vue b/client/agenda/AgendaScheduleCalendar.vue
index 9b56b7f5a7..6701ddabd7 100644
--- a/client/agenda/AgendaScheduleCalendar.vue
+++ b/client/agenda/AgendaScheduleCalendar.vue
@@ -187,6 +187,7 @@ function refreshData () {
let earliestDate = DateTime.fromISO('2200-01-01')
let latestDate = DateTime.fromISO('1990-01-01')
let nowDate = DateTime.now()
+ let hasCrossDayEvents = false
calendarOptions.events = agendaStore.scheduleAdjusted.map(ev => {
// -> Determine boundaries
@@ -202,6 +203,9 @@ function refreshData () {
if (ev.adjustedEnd < latestDate) {
latestDate = ev.adjustedEnd
}
+ if (ev.adjustedStart.day !== ev.adjustedEnd.day) {
+ hasCrossDayEvents = true
+ }
// -> Build event object
return {
id: ev.id,
@@ -214,8 +218,8 @@ function refreshData () {
})
// -> Display settings
- calendarOptions.slotMinTime = `${earliestHour.toString().padStart(2, '0')}:00:00`
- calendarOptions.slotMaxTime = `${latestHour.toString().padStart(2, '0')}:00:00`
+ calendarOptions.slotMinTime = hasCrossDayEvents ? '00:00:00' : `${earliestHour.toString().padStart(2, '0')}:00:00`
+ calendarOptions.slotMaxTime = hasCrossDayEvents ? '23:59:59' : `${latestHour.toString().padStart(2, '0')}:00:00`
calendarOptions.validRange.start = earliestDate.minus({ days: 1 }).toISODate()
calendarOptions.validRange.end = latestDate.plus({ days: 1 }).toISODate()
// calendarOptions.scrollTime = `${earliestHour.toString().padStart(2, '0')}:00:00`
From fde8136df53b3b9d2666674e16e15a7edf7e0ed1 Mon Sep 17 00:00:00 2001
From: Rich Salz
Date: Wed, 19 Mar 2025 05:06:47 -0400
Subject: [PATCH 094/460] chore: Add draft name to review-completed email
(#8676)
* chore: Add draft name to review-completed email
Fixes: #7866
* fix: typo
* fix: show title and add test
---------
Co-authored-by: Robert Sparks
---
ietf/doc/tests_review.py | 5 ++++-
ietf/templates/review/completed_review.txt | 4 +++-
2 files changed, 7 insertions(+), 2 deletions(-)
diff --git a/ietf/doc/tests_review.py b/ietf/doc/tests_review.py
index e93bc02181..13ddbc22ba 100644
--- a/ietf/doc/tests_review.py
+++ b/ietf/doc/tests_review.py
@@ -902,7 +902,10 @@ def test_complete_review_enter_content(self):
self.assertEqual(len(outbox), 1)
self.assertIn(assignment.review_request.team.list_email, outbox[0]["To"])
- self.assertIn("This is a review", get_payload_text(outbox[0]))
+ payload = get_payload_text(outbox[0])
+ self.assertIn("This is a review", payload)
+ self.assertIn(f"Document: {assignment.review_request.doc.name}", payload)
+ self.assertIn(f"Title: {assignment.review_request.doc.title}", payload)
self.assertIn(settings.MAILING_LIST_ARCHIVE_URL, assignment.review.external_url)
diff --git a/ietf/templates/review/completed_review.txt b/ietf/templates/review/completed_review.txt
index bdbe321ca1..7d10d8bf13 100644
--- a/ietf/templates/review/completed_review.txt
+++ b/ietf/templates/review/completed_review.txt
@@ -1,7 +1,9 @@
{% load ietf_filters %}{% autoescape off %}{% filter maybewordwrap:80 %}{% if assignment.state_id == "part-completed" %}
Review is partially done. Another assignment may be needed to complete it.
-{% endif %}Reviewer: {{ assignment.reviewer.person }}
+{% endif %}Document: {{ assignment.review_request.doc.name }}
+Title: {{ assignment.review_request.doc.title }}
+Reviewer: {{ assignment.reviewer.person }}
Review result: {{ assignment.result.name }}
{{ content }}
From 2d974ed09b8e9ff93805fb06cfa65ab6f814e261 Mon Sep 17 00:00:00 2001
From: Matthew Holloway
Date: Fri, 21 Mar 2025 13:45:27 +1300
Subject: [PATCH 095/460] feat(agenda): Agenda meeting materials in overflow
menu (#8698)
* feat: agenda meeting materials in overflow menu
* chore: fixing agenda meeting materials tests
* chore: fix agenda meeting material tests
* chore: fixing agenda meeting materials test
* fix: changing to -mat suffix rather than -meeting-materials
* fix: agenda meeting materials lnk -> btn
* fix: click handler on agenda meeting materials
---
client/agenda/AgendaScheduleList.vue | 60 ++++++++++++++++++++-----
playwright/tests/meeting/agenda.spec.js | 8 ++--
2 files changed, 52 insertions(+), 16 deletions(-)
diff --git a/client/agenda/AgendaScheduleList.vue b/client/agenda/AgendaScheduleList.vue
index 5a971c25cf..1e50df5fb4 100644
--- a/client/agenda/AgendaScheduleList.vue
+++ b/client/agenda/AgendaScheduleList.vue
@@ -121,20 +121,12 @@
:options='item.links'
key-field='id'
:render-icon='renderLinkIcon'
- :render-label='renderLinkLabel'
+ :render-label='renderLink'
)
n-button(size='tiny')
i.bi.bi-three-dots
.agenda-table-cell-links-buttons(v-else-if='item.links && item.links.length > 0')
- template(v-if='item.flags.agenda')
- n-popover
- template(#trigger)
- i.bi.bi-collection(
- :id='`btn-lnk-` + item.key + `-mat`'
- @click='showMaterials(item.key)'
- )
- span Show meeting materials
- template(v-else-if='item.type === `regular`')
+ template(v-if='!item.flags.agenda && item.type === `regular`')
n-popover
template(#trigger)
i.no-meeting-materials
@@ -143,7 +135,16 @@
span No meeting materials yet.
n-popover(v-for='lnk of item.links', :key='lnk.id')
template(#trigger)
+ button(
+ v-if="lnk.click"
+ type="button"
+ :id='`btn-` + lnk.id'
+ @click='lnk.click'
+ :aria-label='lnk.label'
+ :class='`border-0 bg-transparent text-` + lnk.color'
+ ): i.bi(:class='`bi-` + lnk.icon')
a(
+ v-else
:id='`btn-` + lnk.id'
:href='lnk.href'
:aria-label='lnk.label'
@@ -281,13 +282,28 @@ const meetingEvents = computed(() => {
}
acc.lastTypeName = typeName
- // -> Populate event links
+ //
+ /**
+ * -> Populate event menu items
+ *
+ * links is an array of either,
+ * 1. { href: "...", click: undefined, ...sharedProps }
+ * 2. { click: () => {...}, href: undefined, ...sharedProps }
+ */
const links = []
const typesWithLinks = ['regular', 'plenary', 'other']
const purposesWithoutLinks = ['admin', 'closed_meeting', 'officehours', 'social']
if (item.flags.showAgenda || (typesWithLinks.includes(item.type) && !purposesWithoutLinks.includes(item.purpose))) {
if (item.flags.agenda) {
// -> Meeting Materials
+ links.push({
+ id: `btn-${item.id}-mat`,
+ label: 'Show meeting materials',
+ icon: 'collection',
+ href: undefined,
+ click: () => showMaterials(item.id),
+ color: 'black'
+ })
links.push({
id: `lnk-${item.id}-tar`,
label: 'Download meeting materials as .tar archive',
@@ -585,7 +601,11 @@ function renderLinkIcon (opt) {
return h('i', { class: `bi bi-${opt.icon} text-${opt.color}` })
}
-function renderLinkLabel (opt) {
+function renderLink (opt) {
+ if (opt.click) {
+ return h('button', { type: 'button', class: 'overflow-button', onClick: opt.click }, opt.label)
+ }
+
return h('a', { href: opt.href, target: '_blank' }, opt.label)
}
@@ -1564,6 +1584,22 @@ onBeforeUnmount(() => {
}
}
+.overflow-button {
+ font-size: inherit;
+ padding: 0;
+ border: 0;
+ background: transparent;
+
+ &:before {
+ content: "";
+ position: absolute;
+ left: 0;
+ right: 0;
+ top: 0;
+ bottom: 0;
+ }
+}
+
@keyframes fadeInAnim {
0% {
opacity: 0;
diff --git a/playwright/tests/meeting/agenda.spec.js b/playwright/tests/meeting/agenda.spec.js
index e6c6e17031..b7e3df5718 100644
--- a/playwright/tests/meeting/agenda.spec.js
+++ b/playwright/tests/meeting/agenda.spec.js
@@ -275,7 +275,7 @@ test.describe('past - desktop', () => {
const eventButtons = row.locator('.agenda-table-cell-links > .agenda-table-cell-links-buttons')
if (event.flags.agenda) {
// Show meeting materials button
- await expect(eventButtons.locator('i.bi.bi-collection')).toBeVisible()
+ await expect(eventButtons.locator(`#btn-btn-${event.id}-mat`)).toBeVisible()
// ZIP materials button
await expect(eventButtons.locator(`#btn-lnk-${event.id}-tar`)).toHaveAttribute('href', `/meeting/${meetingData.meeting.number}/agenda/${event.acronym}-drafts.tgz`)
await expect(eventButtons.locator(`#btn-lnk-${event.id}-tar > i.bi`)).toBeVisible()
@@ -425,7 +425,7 @@ test.describe('past - desktop', () => {
})
})
// Open dialog
- await page.locator(`#agenda-rowid-${event.id} #btn-lnk-${event.id}-mat`).click()
+ await page.locator(`#agenda-rowid-${event.id} #btn-btn-${event.id}-mat`).click()
await expect(page.locator('.agenda-eventdetails')).toBeVisible()
// Header
await expect(page.locator('.agenda-eventdetails .n-card-header__main > .detail-header > .bi')).toBeVisible()
@@ -507,7 +507,7 @@ test.describe('past - desktop', () => {
})
})
// Open dialog
- await page.locator(`#btn-lnk-${event.id}-mat`).click()
+ await page.locator(`#btn-btn-${event.id}-mat`).click()
await expect(page.locator('.agenda-eventdetails')).toBeVisible()
// Slides Tab
await page.locator('.agenda-eventdetails .detail-nav > a').nth(1).click()
@@ -1158,7 +1158,7 @@ test.describe('future - desktop', () => {
if (event.flags.showAgenda || (['regular', 'plenary', 'other'].includes(event.type) && !['admin', 'closed_meeting', 'officehours', 'social'].includes(event.purpose))) {
if (event.flags.agenda) {
// Show meeting materials button
- await expect(eventButtons.locator('i.bi.bi-collection')).toBeVisible()
+ await expect(eventButtons.locator(`#btn-btn-${event.id}-mat`)).toBeVisible()
// ZIP materials button
await expect(eventButtons.locator(`#btn-lnk-${event.id}-tar`)).toHaveAttribute('href', `/meeting/${meetingData.meeting.number}/agenda/${event.acronym}-drafts.tgz`)
await expect(eventButtons.locator(`#btn-lnk-${event.id}-tar > i.bi`)).toBeVisible()
From 27ea6234a8b0b74062c0a57e849e316087c849f3 Mon Sep 17 00:00:00 2001
From: Nicolas Giard
Date: Fri, 21 Mar 2025 04:43:33 -0400
Subject: [PATCH 096/460] feat(agenda): keep video client + onsite tool buttons
for plenary for rest of day (#8706)
---
client/agenda/AgendaScheduleList.vue | 17 ++++
playwright/helpers/common.js | 24 ++++++
playwright/helpers/meeting.js | 3 +
playwright/tests/meeting/agenda.spec.js | 102 ++++++++++++++++++++----
4 files changed, 129 insertions(+), 17 deletions(-)
diff --git a/client/agenda/AgendaScheduleList.vue b/client/agenda/AgendaScheduleList.vue
index 1e50df5fb4..86c91bed85 100644
--- a/client/agenda/AgendaScheduleList.vue
+++ b/client/agenda/AgendaScheduleList.vue
@@ -464,6 +464,23 @@ const meetingEvents = computed(() => {
color: 'purple'
})
}
+ // -> Keep showing video client / on-site tool for Plenary until end of day, in case it goes over the planned time range
+ if (item.type === 'plenary' && item.adjustedEnd.day === current.day) {
+ links.push({
+ id: `lnk-${item.id}-video`,
+ label: 'Full Client with Video',
+ icon: 'camera-video',
+ href: item.links.videoStream,
+ color: 'purple'
+ })
+ links.push({
+ id: `lnk-${item.id}-onsitetool`,
+ label: 'Onsite tool',
+ icon: 'telephone-outbound',
+ href: item.links.onsiteTool,
+ color: 'teal'
+ })
+ }
}
}
}
diff --git a/playwright/helpers/common.js b/playwright/helpers/common.js
index 5ba39ba022..c4dd7e2640 100644
--- a/playwright/helpers/common.js
+++ b/playwright/helpers/common.js
@@ -13,5 +13,29 @@ module.exports = {
return rect.top < bottom && rect.top > 0 - rect.height
})
+ },
+ /**
+ * Override page DateTime with a new value
+ *
+ * @param {Object} page Page object
+ * @param {Object} dateTimeOverride New DateTime object
+ */
+ overridePageDateTime: async (page, dateTimeOverride) => {
+ await page.addInitScript(`{
+ // Extend Date constructor to default to fixed time
+ Date = class extends Date {
+ constructor(...args) {
+ if (args.length === 0) {
+ super(${dateTimeOverride.toMillis()});
+ } else {
+ super(...args);
+ }
+ }
+ }
+ // Override Date.now() to start from fixed time
+ const __DateNowOffset = ${dateTimeOverride.toMillis()} - Date.now();
+ const __DateNow = Date.now;
+ Date.now = () => __DateNow() + __DateNowOffset;
+ }`)
}
}
diff --git a/playwright/helpers/meeting.js b/playwright/helpers/meeting.js
index 9722ffc68b..634ca2e8c6 100644
--- a/playwright/helpers/meeting.js
+++ b/playwright/helpers/meeting.js
@@ -609,6 +609,9 @@ module.exports = {
startDateTime: curDay.set({ hour: 17, minute: 30 }),
duration: '2h',
type: 'plenary',
+ showAgenda: true,
+ hasAgenda: true,
+ hasRecordings: true,
...findAreaGroup('ietf-plenary', categories[2])
}, floors))
}
diff --git a/playwright/tests/meeting/agenda.spec.js b/playwright/tests/meeting/agenda.spec.js
index b7e3df5718..412a3fe9b8 100644
--- a/playwright/tests/meeting/agenda.spec.js
+++ b/playwright/tests/meeting/agenda.spec.js
@@ -1213,7 +1213,7 @@ test.describe('future - desktop', () => {
await expect(eventButtons.locator(`#btn-lnk-${event.id}-remotecallin`)).toHaveAttribute('href', remoteCallInUrl)
await expect(eventButtons.locator(`#btn-lnk-${event.id}-remotecallin > i.bi`)).toBeVisible()
}
- // calendar
+ // Calendar
if (event.links.calendar) {
await expect(eventButtons.locator(`#btn-lnk-${event.id}-calendar`)).toHaveAttribute('href', event.links.calendar)
await expect(eventButtons.locator(`#btn-lnk-${event.id}-calendar > i.bi`)).toBeVisible()
@@ -1278,22 +1278,7 @@ test.describe('live - desktop', () => {
})
// Override Date in page to fixed time
- await page.addInitScript(`{
- // Extend Date constructor to default to fixed time
- Date = class extends Date {
- constructor(...args) {
- if (args.length === 0) {
- super(${currentTime.toMillis()});
- } else {
- super(...args);
- }
- }
- }
- // Override Date.now() to start from fixed time
- const __DateNowOffset = ${currentTime.toMillis()} - Date.now();
- const __DateNow = Date.now;
- Date.now = () => __DateNow() + __DateNowOffset;
- }`)
+ await commonHelper.overridePageDateTime(page, currentTime)
// Visit agenda page and await Meeting Data API call to complete
await Promise.all([
@@ -1348,6 +1333,89 @@ test.describe('live - desktop', () => {
})
})
+// ====================================================================
+// AGENDA (live meeting) | DESKTOP viewport | Plenary Extended Time Buttons
+// ====================================================================
+
+test.describe('live - desktop - plenary extended time buttons', () => {
+ let meetingData
+ let plenarySessionId
+
+ test.beforeAll(async () => {
+ // Generate meeting data
+ meetingData = meetingHelper.generateAgendaResponse({ dateMode: 'current' })
+ plenarySessionId = meetingData.schedule.find(s => s.type === 'plenary').id
+ })
+
+ test.beforeEach(async ({ page }) => {
+ // Intercept Meeting Data API
+ await page.route(`**/api/meeting/${meetingData.meeting.number}/agenda-data`, route => {
+ route.fulfill({
+ status: 200,
+ contentType: 'application/json',
+ body: JSON.stringify(meetingData)
+ })
+ })
+
+ await page.setViewportSize({
+ width: viewports.desktop[0],
+ height: viewports.desktop[1]
+ })
+ })
+
+ // -> BUTTONS PRESENT AFTER EVENT, SAME DAY
+
+ test('same day - after event', async ({ page }) => {
+ // Override Date in page to fixed time
+ const currentTime = DateTime.fromISO('2022-02-01T13:45:15', { zone: 'Asia/Tokyo' }).plus({ days: 1 }).set({ hour: 20, minute: 30 })
+ await commonHelper.overridePageDateTime(page, currentTime)
+
+ // Visit agenda page and await Meeting Data API call to complete
+ await Promise.all([
+ page.waitForResponse(`**/api/meeting/${meetingData.meeting.number}/agenda-data`),
+ page.goto(`/meeting/${meetingData.meeting.number}/agenda`)
+ ])
+
+ // Wait for page to be ready
+ await page.locator('.agenda h1').waitFor({ state: 'visible' })
+ await setTimeout(500)
+
+ // Check for plenary event
+ await expect(page.locator('.agenda .agenda-table-display-event.agenda-table-type-plenary')).toBeVisible()
+ await page.locator('.agenda .agenda-table-display-event.agenda-table-type-plenary').scrollIntoViewIfNeeded()
+
+ // Check for full video client + on-site tool
+ await expect(page.locator(`.agenda .agenda-table-display-event.agenda-table-type-plenary .agenda-table-cell-links-buttons a#btn-lnk-${plenarySessionId}-video`)).toBeVisible()
+ await expect(page.locator(`.agenda .agenda-table-display-event.agenda-table-type-plenary .agenda-table-cell-links-buttons a#btn-lnk-${plenarySessionId}-onsitetool`)).toBeVisible()
+ })
+
+ // -> BUTTONS NO LONGER PRESENT AFTER EVENT, NEXT DAY
+
+ test('next day - after event', async ({ page }) => {
+ // Override Date in page to fixed time
+ const currentTime = DateTime.fromISO('2022-02-01T13:45:15', { zone: 'Asia/Tokyo' }).plus({ days: 2 }).set({ hour: 2, minute: 30 })
+ await commonHelper.overridePageDateTime(page, currentTime)
+
+ // Visit agenda page and await Meeting Data API call to complete
+ await Promise.all([
+ page.waitForResponse(`**/api/meeting/${meetingData.meeting.number}/agenda-data`),
+ page.goto(`/meeting/${meetingData.meeting.number}/agenda`)
+ ])
+
+ // Wait for page to be ready
+ await page.locator('.agenda h1').waitFor({ state: 'visible' })
+ await setTimeout(500)
+
+ // Check for plenary event
+ await expect(page.locator('.agenda .agenda-table-display-event.agenda-table-type-plenary')).toBeVisible()
+ await page.locator('.agenda .agenda-table-display-event.agenda-table-type-plenary').scrollIntoViewIfNeeded()
+
+ // Check for full video client + on-site tool
+ await expect(page.locator(`.agenda .agenda-table-display-event.agenda-table-type-plenary .agenda-table-cell-links-buttons a#btn-lnk-${plenarySessionId}-video`)).not.toBeVisible()
+ await expect(page.locator(`.agenda .agenda-table-display-event.agenda-table-type-plenary .agenda-table-cell-links-buttons a#btn-lnk-${plenarySessionId}-onsitetool`)).not.toBeVisible()
+ })
+})
+
// ====================================================================
// AGENDA (past meeting) | SMALL DESKTOP/TABLET/MOBILE viewports
// ====================================================================
From ed2059a5813130cbf31ce77c0978a35751addc6c Mon Sep 17 00:00:00 2001
From: Jennifer Richards
Date: Fri, 21 Mar 2025 15:45:26 +0700
Subject: [PATCH 097/460] fix: add blank=true to SlideSubmission.doc (#8688)
Allows admin to save an instance with a null doc reference, which was already permitted.
---
.../0011_alter_slidesubmission_doc.py | 26 +++++++++++++++++++
ietf/meeting/models.py | 2 +-
2 files changed, 27 insertions(+), 1 deletion(-)
create mode 100644 ietf/meeting/migrations/0011_alter_slidesubmission_doc.py
diff --git a/ietf/meeting/migrations/0011_alter_slidesubmission_doc.py b/ietf/meeting/migrations/0011_alter_slidesubmission_doc.py
new file mode 100644
index 0000000000..b9cbc58e99
--- /dev/null
+++ b/ietf/meeting/migrations/0011_alter_slidesubmission_doc.py
@@ -0,0 +1,26 @@
+# Generated by Django 4.2.19 on 2025-03-17 09:37
+
+from django.db import migrations
+import django.db.models.deletion
+import ietf.utils.models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ("doc", "0025_storedobject_storedobject_unique_name_per_store"),
+ ("meeting", "0010_alter_floorplan_image_alter_meetinghost_logo"),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name="slidesubmission",
+ name="doc",
+ field=ietf.utils.models.ForeignKey(
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ to="doc.document",
+ ),
+ ),
+ ]
diff --git a/ietf/meeting/models.py b/ietf/meeting/models.py
index 5284420731..84d151c310 100644
--- a/ietf/meeting/models.py
+++ b/ietf/meeting/models.py
@@ -1385,7 +1385,7 @@ class SlideSubmission(models.Model):
apply_to_all = models.BooleanField(default=False)
submitter = ForeignKey(Person)
status = ForeignKey(SlideSubmissionStatusName, null=True, default='pending', on_delete=models.SET_NULL)
- doc = ForeignKey(Document, null=True, on_delete=models.SET_NULL)
+ doc = ForeignKey(Document, blank=True, null=True, on_delete=models.SET_NULL)
def staged_filepath(self):
return os.path.join(settings.SLIDE_STAGING_PATH , self.filename)
From 37b4448b3d390b102b7e14890880bcfd1e965bcc Mon Sep 17 00:00:00 2001
From: Benson Muite
Date: Fri, 21 Mar 2025 11:52:18 +0300
Subject: [PATCH 098/460] fix: Clarify legacy description in RFC banner (#8424)
(#8670)
---
ietf/templates/doc/disclaimer.html | 10 ++++++++--
1 file changed, 8 insertions(+), 2 deletions(-)
diff --git a/ietf/templates/doc/disclaimer.html b/ietf/templates/doc/disclaimer.html
index 31ce6c397e..db4c42ed68 100644
--- a/ietf/templates/doc/disclaimer.html
+++ b/ietf/templates/doc/disclaimer.html
@@ -3,12 +3,18 @@
{% load ietf_filters %}
{% origin %}
{% if doc.type_id == "rfc" %}
- {% if doc.stream.slug != "ietf" and doc.std_level.slug|default:"unk" not in "bcp,ds,ps,std"|split:"," %}
+ {% if doc.stream.slug != "ietf" and doc.stream.desc != "Legacy" and doc.std_level.slug|default:"unk" not in "bcp,ds,ps,std"|split:"," %}
This RFC was published on the {{ doc.stream.desc }} stream.
This RFC is not endorsed by the IETF and has no formal standing in the
IETF standards process.
+ {% elif doc.stream.slug != "ietf" and doc.stream.desc == "Legacy" and doc.std_level.slug|default:"unk" not in "bcp,ds,ps,std"|split:"," %}
+
+ This RFC is labeled as "{{ doc.stream.desc }}"; it was published before a formal source was recorded.
+ This RFC is not endorsed by the IETF and has no formal standing in the
+ IETF standards process.
+
{% endif %}
{% elif doc|is_in_stream %}
{% if doc.stream.slug != "ietf" and doc.std_level.slug|default:"unk" not in "bcp,ds,ps,std"|split:"," %}
@@ -25,4 +31,4 @@
This I-D is not endorsed by the IETF and has no formal standing in the
IETF standards process.
-{% endif %}
\ No newline at end of file
+{% endif %}
From 8fec17282f5dc7ef28355990e2f74bff2a309002 Mon Sep 17 00:00:00 2001
From: Nicolas Giard
Date: Fri, 21 Mar 2025 19:30:40 -0400
Subject: [PATCH 099/460] ci: Update build-base-app.yml
---
.github/workflows/build-base-app.yml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/.github/workflows/build-base-app.yml b/.github/workflows/build-base-app.yml
index c8f66a22b7..5f0a0d11b8 100644
--- a/.github/workflows/build-base-app.yml
+++ b/.github/workflows/build-base-app.yml
@@ -51,7 +51,7 @@ jobs:
push: true
tags: |
ghcr.io/ietf-tools/datatracker-app-base:${{ env.IMGVERSION }}
- ghcr.io/ietf-tools/datatracker-app-base:latest
+ ${{ github.ref == 'refs/heads/main' && 'ghcr.io/ietf-tools/datatracker-app-base:latest' || '' }}
- name: Update version references
run: |
@@ -61,6 +61,6 @@ jobs:
- name: Commit CHANGELOG.md
uses: stefanzweifel/git-auto-commit-action@v5
with:
- branch: main
+ branch: ${{ github.ref_name }}
commit_message: 'ci: update base image target version to ${{ env.IMGVERSION }}'
file_pattern: dev/build/Dockerfile dev/build/TARGET_BASE
From 6da36da3fde2721d3885001e4a4174c85579db8e Mon Sep 17 00:00:00 2001
From: Jennifer Richards
Date: Thu, 27 Mar 2025 00:29:48 +0700
Subject: [PATCH 100/460] chore: bump rabbitmq to 3.13 (#8603)
* ci: param for rabbitmq_version
* ci: drop latest tagging from datatracker-mq
* chore: bump rabbitmq to 3.13
* Revert "ci: drop latest tagging from datatracker-mq"
This reverts commit 58cea207c7300727f9cfc19a57859ab16c3b2c01.
---
.github/workflows/build-mq-broker.yml | 15 +++++++++------
k8s/rabbitmq.yaml | 2 +-
2 files changed, 10 insertions(+), 7 deletions(-)
diff --git a/.github/workflows/build-mq-broker.yml b/.github/workflows/build-mq-broker.yml
index 8c6f1e6ae1..85c27c23cc 100644
--- a/.github/workflows/build-mq-broker.yml
+++ b/.github/workflows/build-mq-broker.yml
@@ -8,10 +8,13 @@ on:
- 'dev/mq/**'
- '.github/workflows/build-mq-broker.yml'
- workflow_dispatch:
-
-env:
- RABBITMQ_VERSION: 3.12-alpine
+ workflow_dispatch:
+ inputs:
+ rabbitmq_version:
+ description: 'RabbitMQ Version'
+ default: '3.13-alpine'
+ required: true
+ type: string
jobs:
publish:
@@ -45,7 +48,7 @@ jobs:
file: dev/mq/Dockerfile
platforms: linux/amd64,linux/arm64
push: true
- build-args: RABBITMQ_VERSION=${{ env.RABBITMQ_VERSION }}
+ build-args: RABBITMQ_VERSION=${{ inputs.rabbitmq_version }}
tags: |
- ghcr.io/ietf-tools/datatracker-mq:${{ env.RABBITMQ_VERSION }}
+ ghcr.io/ietf-tools/datatracker-mq:${{ inputs.rabbitmq_version }}
ghcr.io/ietf-tools/datatracker-mq:latest
diff --git a/k8s/rabbitmq.yaml b/k8s/rabbitmq.yaml
index 3cab7ff565..0c8f0705b5 100644
--- a/k8s/rabbitmq.yaml
+++ b/k8s/rabbitmq.yaml
@@ -29,7 +29,7 @@ spec:
# -----------------------------------------------------
# RabbitMQ Container
# -----------------------------------------------------
- - image: "ghcr.io/ietf-tools/datatracker-mq:3.12-alpine"
+ - image: "ghcr.io/ietf-tools/datatracker-mq:3.13-alpine"
imagePullPolicy: Always
name: rabbitmq
ports:
From 9dd25b9aee2ccb565d74d55d90c67bf8388c910e Mon Sep 17 00:00:00 2001
From: Matthew Holloway
Date: Fri, 28 Mar 2025 04:07:53 +1300
Subject: [PATCH 101/460] chore: vite sourcemap (#8719)
---
vite.config.js | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/vite.config.js b/vite.config.js
index 41a2cb02e0..bde2b9ed57 100644
--- a/vite.config.js
+++ b/vite.config.js
@@ -16,7 +16,8 @@ export default defineConfig(({ command, mode }) => {
main: 'client/main.js',
embedded: 'client/embedded.js'
}
- }
+ },
+ sourcemap: true
},
cacheDir: '.vite',
plugins: [
From 7e9a46af63da0e45f3ea0e4ccfda69875b282190 Mon Sep 17 00:00:00 2001
From: Robert Sparks
Date: Thu, 27 Mar 2025 13:59:24 -0500
Subject: [PATCH 102/460] feat: move base containers to bookworm (#8710)
---
docker/base.Dockerfile | 17 ++++++++++-------
docker/scripts/app-setup-debian.sh | 16 +++-------------
ietf/submit/tests.py | 4 ++--
3 files changed, 15 insertions(+), 22 deletions(-)
diff --git a/docker/base.Dockerfile b/docker/base.Dockerfile
index e2465f33c2..f364456c7a 100644
--- a/docker/base.Dockerfile
+++ b/docker/base.Dockerfile
@@ -1,4 +1,4 @@
-FROM python:3.9-bullseye
+FROM python:3.9-bookworm
LABEL maintainer="IETF Tools Team "
ENV DEBIAN_FRONTEND=noninteractive
@@ -14,6 +14,9 @@ RUN apt-get install -y --no-install-recommends ca-certificates curl gnupg \
&& mkdir -p /etc/apt/keyrings\
&& curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg
RUN echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_$NODE_MAJOR.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list
+RUN echo "Package: nodejs" >> /etc/apt/preferences.d/preferences && \
+ echo "Pin: origin deb.nodesource.com" >> /etc/apt/preferences.d/preferences && \
+ echo "Pin-Priority: 1001" >> /etc/apt/preferences.d/preferences
# Add Docker Source
RUN curl -fsSL https://download.docker.com/linux/debian/gpg | gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg
@@ -56,12 +59,13 @@ RUN apt-get update --fix-missing && apt-get install -qy --no-install-recommends
libmagic-dev \
libmariadb-dev \
libmemcached-tools \
+ libyang2-tools \
locales \
make \
mariadb-client \
memcached \
nano \
- netcat \
+ netcat-traditional \
nodejs \
pgloader \
pigz \
@@ -77,7 +81,6 @@ RUN apt-get update --fix-missing && apt-get install -qy --no-install-recommends
wget \
xauth \
xvfb \
- yang-tools \
zsh
# Install kramdown-rfc2629 (ruby)
@@ -106,11 +109,11 @@ RUN apt-get autoremove -y && apt-get clean -y && rm -rf /var/lib/apt/lists/* /va
ENV DBUS_SESSION_BUS_ADDRESS=/dev/null
# avoid million NPM install messages
-ENV npm_config_loglevel warn
+ENV npm_config_loglevel=warn
# allow installing when the main user is root
-ENV npm_config_unsafe_perm true
+ENV npm_config_unsafe_perm=true
# disable NPM funding messages
-ENV npm_config_fund false
+ENV npm_config_fund=false
# Set locale to en_US.UTF-8
RUN echo "LC_ALL=en_US.UTF-8" >> /etc/environment && \
@@ -119,7 +122,7 @@ RUN echo "LC_ALL=en_US.UTF-8" >> /etc/environment && \
dpkg-reconfigure locales && \
locale-gen en_US.UTF-8 && \
update-locale LC_ALL en_US.UTF-8
-ENV LC_ALL en_US.UTF-8
+ENV LC_ALL=en_US.UTF-8
# Install idnits
ADD https://raw.githubusercontent.com/ietf-tools/idnits-mirror/main/idnits /usr/local/bin/
diff --git a/docker/scripts/app-setup-debian.sh b/docker/scripts/app-setup-debian.sh
index ddfc351995..ea9cc3fb87 100644
--- a/docker/scripts/app-setup-debian.sh
+++ b/docker/scripts/app-setup-debian.sh
@@ -10,7 +10,6 @@
# Syntax: ./common-debian.sh [install zsh flag] [username] [user UID] [user GID] [upgrade packages flag] [install Oh My Zsh! flag] [Add non-free packages]
set -e
-
INSTALL_ZSH=${1:-"true"}
USERNAME=${2:-"automatic"}
USER_UID=${3:-"automatic"}
@@ -116,18 +115,9 @@ if [ "${PACKAGES_ALREADY_INSTALLED}" != "true" ]; then
# Needed for adding manpages-posix and manpages-posix-dev which are non-free packages in Debian
if [ "${ADD_NON_FREE_PACKAGES}" = "true" ]; then
# Bring in variables from /etc/os-release like VERSION_CODENAME
- . /etc/os-release
- sed -i -E "s/deb http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME} main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME} main contrib non-free/" /etc/apt/sources.list
- sed -i -E "s/deb-src http:\/\/(deb|httredir)\.debian\.org\/debian ${VERSION_CODENAME} main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME} main contrib non-free/" /etc/apt/sources.list
- sed -i -E "s/deb http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME}-updates main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME}-updates main contrib non-free/" /etc/apt/sources.list
- sed -i -E "s/deb-src http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME}-updates main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME}-updates main contrib non-free/" /etc/apt/sources.list
- sed -i "s/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main contrib non-free/" /etc/apt/sources.list
- sed -i "s/deb-src http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main contrib non-free/" /etc/apt/sources.list
- sed -i "s/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main contrib non-free/" /etc/apt/sources.list
- sed -i "s/deb-src http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main contrib non-free/" /etc/apt/sources.list
- # Handle bullseye location for security https://www.debian.org/releases/bullseye/amd64/release-notes/ch-information.en.html
- sed -i "s/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main contrib non-free/" /etc/apt/sources.list
- sed -i "s/deb-src http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main contrib non-free/" /etc/apt/sources.list
+ . /etc/os-release
+ sed -i -E "s/Components: main/Components: main contrib non-free/" /etc/apt/sources.list.d/debian.sources
+
echo "Running apt-get update..."
apt-get update
package_list="${package_list} manpages-posix manpages-posix-dev"
diff --git a/ietf/submit/tests.py b/ietf/submit/tests.py
index 9a993480cd..6b1c998e76 100644
--- a/ietf/submit/tests.py
+++ b/ietf/submit/tests.py
@@ -1888,7 +1888,7 @@ def test_submit_invalid_yang(self):
r = self.client.get(status_url)
q = PyQuery(r.content)
#
- self.assertContains(r, 'The yang validation returned 1 error')
+ self.assertContains(r, 'The yang validation returned 3 errors')
#
m = q('#yang-validation-message').text()
for command in ['xym', 'pyang', 'yanglint']:
@@ -1898,7 +1898,7 @@ def test_submit_invalid_yang(self):
self.assertIn("draft-yang-testing-invalid-00.txt", m)
self.assertIn("error: syntax error: illegal keyword: ;", m)
if settings.SUBMIT_YANGLINT_COMMAND and os.path.exists(settings.YANGLINT_BINARY):
- self.assertIn("No validation errors", m)
+ self.assertIn('libyang err : Parsing module "ietf-yang-metadata" failed.', m)
def submit_conflicting_submissiondocevent_rev(self, new_rev='01', existing_rev='01'):
"""Test submitting a rev when an equal or later SubmissionDocEvent rev exists
From 431c475060778ccaaf611e0de7a9bcc0c3b93c42 Mon Sep 17 00:00:00 2001
From: rjsparks <10996692+rjsparks@users.noreply.github.com>
Date: Thu, 27 Mar 2025 19:12:31 +0000
Subject: [PATCH 103/460] ci: update base image target version to 20250327T1859
---
dev/build/Dockerfile | 2 +-
dev/build/TARGET_BASE | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/dev/build/Dockerfile b/dev/build/Dockerfile
index 2b02a091c5..cc55c92881 100644
--- a/dev/build/Dockerfile
+++ b/dev/build/Dockerfile
@@ -1,4 +1,4 @@
-FROM ghcr.io/ietf-tools/datatracker-app-base:20250315T0745
+FROM ghcr.io/ietf-tools/datatracker-app-base:20250327T1859
LABEL maintainer="IETF Tools Team "
ENV DEBIAN_FRONTEND=noninteractive
diff --git a/dev/build/TARGET_BASE b/dev/build/TARGET_BASE
index e6f490b168..50e8bfd839 100644
--- a/dev/build/TARGET_BASE
+++ b/dev/build/TARGET_BASE
@@ -1 +1 @@
-20250315T0745
+20250327T1859
From 752bc2103146b534e1bb6188c7d54d11f07ba885 Mon Sep 17 00:00:00 2001
From: Rudi Matz
Date: Thu, 27 Mar 2025 18:19:30 -0400
Subject: [PATCH 104/460] feat: highlight unexpected state in AD dashboard
(#8738)
* feat: highlight unexpected state in AD dashboard
* test: add tests for is_unexpected_wg_state
* test: improve tests using WgDraftFactory
---
ietf/doc/templatetags/ietf_filters.py | 13 +++++++++++++
ietf/doc/templatetags/tests_ietf_filters.py | 16 ++++++++++++++++
ietf/templates/doc/search/status_columns.html | 4 ++++
3 files changed, 33 insertions(+)
diff --git a/ietf/doc/templatetags/ietf_filters.py b/ietf/doc/templatetags/ietf_filters.py
index 86507eeaaa..d4adf96a27 100644
--- a/ietf/doc/templatetags/ietf_filters.py
+++ b/ietf/doc/templatetags/ietf_filters.py
@@ -480,6 +480,19 @@ def state(doc, slug):
slug = "%s-stream-%s" % (doc.type_id, doc.stream_id)
return doc.get_state(slug)
+
+@register.filter
+def is_unexpected_wg_state(doc):
+ """Returns a flag indicating whether the document has an unexpected wg state."""
+ if not doc.type_id == "draft":
+ return False
+
+ draft_iesg_state = doc.get_state("draft-iesg")
+ draft_stream_state = doc.get_state("draft-stream-ietf")
+
+ return draft_iesg_state.slug != "idexists" and draft_stream_state is not None and draft_stream_state.slug != "sub-pub"
+
+
@register.filter
def statehelp(state):
"Output help icon with tooltip for state."
diff --git a/ietf/doc/templatetags/tests_ietf_filters.py b/ietf/doc/templatetags/tests_ietf_filters.py
index f018b7d9b3..b5130849ea 100644
--- a/ietf/doc/templatetags/tests_ietf_filters.py
+++ b/ietf/doc/templatetags/tests_ietf_filters.py
@@ -14,12 +14,14 @@
ConflictReviewFactory,
BofreqFactory,
StatementFactory,
+ RfcFactory,
)
from ietf.doc.models import DocEvent
from ietf.doc.templatetags.ietf_filters import (
urlize_ietf_docs,
is_valid_url,
is_in_stream,
+ is_unexpected_wg_state,
)
from ietf.person.models import Person
from ietf.utils.test_utils import TestCase
@@ -174,3 +176,17 @@ def test_urlize_ietf_docs(self):
for input, output in cases:
# debug.show("(input, urlize_ietf_docs(input), output)")
self.assertEqual(urlize_ietf_docs(input), output)
+
+ def test_is_unexpected_wg_state(self):
+ """
+ Test that the unexpected_wg_state function works correctly
+ """
+ # test documents with expected wg states
+ self.assertFalse(is_unexpected_wg_state(RfcFactory()))
+ self.assertFalse(is_unexpected_wg_state(WgDraftFactory (states=[('draft-stream-ietf', 'sub-pub')])))
+ self.assertFalse(is_unexpected_wg_state(WgDraftFactory (states=[('draft-iesg', 'idexists')])))
+ self.assertFalse(is_unexpected_wg_state(WgDraftFactory (states=[('draft-stream-ietf', 'wg-cand'), ('draft-iesg','idexists')])))
+
+ # test documents with unexpected wg states due to invalid combination of states
+ self.assertTrue(is_unexpected_wg_state(WgDraftFactory (states=[('draft-stream-ietf', 'wg-cand'), ('draft-iesg','lc-req')])))
+ self.assertTrue(is_unexpected_wg_state(WgDraftFactory (states=[('draft-stream-ietf', 'chair-w'), ('draft-iesg','pub-req')])))
diff --git a/ietf/templates/doc/search/status_columns.html b/ietf/templates/doc/search/status_columns.html
index 15f284fd12..5ba41bb9c4 100644
--- a/ietf/templates/doc/search/status_columns.html
+++ b/ietf/templates/doc/search/status_columns.html
@@ -78,6 +78,10 @@
{% person_link action_holder.person title=action_holder.role_for_doc %}{% if action_holder|action_holder_badge %} {{ action_holder|action_holder_badge }}{% endif %}{% if not forloop.last %},{% endif %}
{% endfor %}
{% endif %}
+ {% if doc|is_unexpected_wg_state %}
+
+ Unexpected WG state
+ {% endif %}
{% else %}
{# RFC #}
{{ doc.std_level|safe }} RFC
From aa2a3217a7006c05292650bb0d75b0243d2cc67b Mon Sep 17 00:00:00 2001
From: Nicolas Giard
Date: Fri, 28 Mar 2025 10:59:56 -0400
Subject: [PATCH 105/460] fix(agenda): strikethrough cancelled and rescheduled
events (#8694)
* fix(agenda): strikethrough cancelled and rescheduled events
* fix(agenda): rescheduled + canceled badges width on mobile
* fix(agenda): use em for canceled / rescheduled badge on mobile
* fix: deleting badge width/display:block CSS
---------
Co-authored-by: Matthew Holloway
---
client/agenda/AgendaDetailsModal.vue | 1 -
client/agenda/AgendaScheduleCalendar.vue | 1 -
client/agenda/AgendaScheduleList.vue | 12 ++++++++++--
3 files changed, 10 insertions(+), 4 deletions(-)
diff --git a/client/agenda/AgendaDetailsModal.vue b/client/agenda/AgendaDetailsModal.vue
index 037e32f9cb..2582bf2159 100644
--- a/client/agenda/AgendaDetailsModal.vue
+++ b/client/agenda/AgendaDetailsModal.vue
@@ -327,7 +327,6 @@ async function fetchSessionMaterials () {
border-radius: 5px;
.badge {
- width: 30px;
font-size: .7em;
background-color: $yellow-200;
border-bottom: 1px solid $yellow-500;
diff --git a/client/agenda/AgendaScheduleCalendar.vue b/client/agenda/AgendaScheduleCalendar.vue
index 6701ddabd7..9863296341 100644
--- a/client/agenda/AgendaScheduleCalendar.vue
+++ b/client/agenda/AgendaScheduleCalendar.vue
@@ -330,7 +330,6 @@ function close () {
}
.badge {
- width: 30px;
font-size: .7em;
border: 1px solid #CCC;
text-transform: uppercase;
diff --git a/client/agenda/AgendaScheduleList.vue b/client/agenda/AgendaScheduleList.vue
index 86c91bed85..d38c7842eb 100644
--- a/client/agenda/AgendaScheduleList.vue
+++ b/client/agenda/AgendaScheduleList.vue
@@ -1097,8 +1097,6 @@ onBeforeUnmount(() => {
}
@media screen and (max-width: $bs5-break-md) {
- width: 30px;
- display: block;
margin: 2px 0 0 0;
}
}
@@ -1321,6 +1319,11 @@ onBeforeUnmount(() => {
&.agenda-table-cell-room {
border-right: 1px solid darken($red-100, 5%) !important;
+ text-decoration: line-through;
+ }
+
+ &.agenda-table-cell-name > a, &.agenda-table-cell-name > span {
+ text-decoration: line-through;
}
&:last-child {
@@ -1348,6 +1351,11 @@ onBeforeUnmount(() => {
&.agenda-table-cell-room {
border-right: 1px solid darken($orange-100, 5%) !important;
+ text-decoration: line-through;
+ }
+
+ &.agenda-table-cell-name > a, &.agenda-table-cell-name > span {
+ text-decoration: line-through;
}
&:last-child {
From 527db359e3836a587ba67432b814004b75635022 Mon Sep 17 00:00:00 2001
From: Nicolas Giard
Date: Tue, 1 Apr 2025 09:03:59 -0400
Subject: [PATCH 106/460] chore: fix line ending for
.github/ISSUE_TEMPLATE/config.yml
---
.github/ISSUE_TEMPLATE/config.yml | 16 ++++++++--------
1 file changed, 8 insertions(+), 8 deletions(-)
diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml
index 5e27af9fed..320614b17e 100644
--- a/.github/ISSUE_TEMPLATE/config.yml
+++ b/.github/ISSUE_TEMPLATE/config.yml
@@ -1,8 +1,8 @@
-blank_issues_enabled: false
-contact_links:
- - name: Help and questions
- url: https://github.com/ietf-tools/datatracker/discussions/categories/help-questions
- about: Need help? Have a question on setting up the project or its usage?
- - name: Discuss new ideas
- url: https://github.com/ietf-tools/datatracker/discussions/categories/ideas
- about: Submit ideas for new features or improvements to be discussed.
+blank_issues_enabled: false
+contact_links:
+ - name: Help and questions
+ url: https://github.com/ietf-tools/datatracker/discussions/categories/help-questions
+ about: Need help? Have a question on setting up the project or its usage?
+ - name: Discuss new ideas
+ url: https://github.com/ietf-tools/datatracker/discussions/categories/ideas
+ about: Submit ideas for new features or improvements to be discussed.
From 9957cf190a4a61369f97e0224e0bb2ae6ed9c8be Mon Sep 17 00:00:00 2001
From: Rudi Matz
Date: Tue, 1 Apr 2025 14:35:04 -0400
Subject: [PATCH 107/460] feat: block iesg ballot for docs from outside streams
(#8761)
* feat: block iesg ballot for docs from outside streams
* test: correct failing irsg test
* feat: improve/simplify tests and condition
* test: filter out pyflakes tests related to globals
---
ietf/doc/tests_ballot.py | 29 +++++++++++++++++++++++------
ietf/doc/tests_irsg_ballot.py | 2 +-
ietf/doc/views_ballot.py | 5 ++++-
ietf/utils/test_runner.py | 9 ++++++++-
4 files changed, 36 insertions(+), 9 deletions(-)
diff --git a/ietf/doc/tests_ballot.py b/ietf/doc/tests_ballot.py
index c7362b58e2..ec23f3d491 100644
--- a/ietf/doc/tests_ballot.py
+++ b/ietf/doc/tests_ballot.py
@@ -17,7 +17,7 @@
from ietf.doc.models import (Document, State, DocEvent,
BallotPositionDocEvent, LastCallDocEvent, WriteupDocEvent, TelechatDocEvent)
from ietf.doc.factories import (DocumentFactory, IndividualDraftFactory, IndividualRfcFactory, WgDraftFactory,
- BallotPositionDocEventFactory, BallotDocEventFactory, IRSGBallotDocEventFactory)
+ BallotPositionDocEventFactory, BallotDocEventFactory, IRSGBallotDocEventFactory, RgDraftFactory)
from ietf.doc.templatetags.ietf_filters import can_defer
from ietf.doc.utils import create_ballot_if_not_open
from ietf.doc.views_ballot import parse_ballot_edit_return_point
@@ -360,7 +360,7 @@ def test_request_last_call(self):
self.assertTrue('aread@' in outbox[-1]['Cc'])
def test_edit_ballot_writeup(self):
- draft = IndividualDraftFactory(states=[('draft','active'),('draft-iesg','iesg-eva')])
+ draft = IndividualDraftFactory(states=[('draft','active'),('draft-iesg','iesg-eva')], stream_id='ietf')
url = urlreverse('ietf.doc.views_ballot.ballot_writeupnotes', kwargs=dict(name=draft.name))
login_testing_unauthorized(self, "secretary", url)
@@ -390,8 +390,25 @@ def test_edit_ballot_writeup(self):
self.assertTrue("This is a simple test" in d.latest_event(WriteupDocEvent, type="changed_ballot_writeup_text").text)
self.assertTrue('iesg-eva' == d.get_state_slug('draft-iesg'))
+ def test_edit_ballot_writeup_unauthorized_stream(self):
+ # Test that accessing a document from unauthorized (irtf) stream returns a 404 error
+ draft = RgDraftFactory()
+ url = urlreverse('ietf.doc.views_ballot.ballot_writeupnotes', kwargs=dict(name=draft.name))
+ login_testing_unauthorized(self, "ad", url)
+
+ r = self.client.get(url)
+ self.assertEqual(r.status_code, 404)
+
+ def test_edit_ballot_writeup_invalid_name(self):
+ # Test that accessing a non-existent document returns a 404 error
+ url = urlreverse('ietf.doc.views_ballot.ballot_writeupnotes', kwargs=dict(name="invalid_name"))
+ login_testing_unauthorized(self, "ad", url)
+
+ r = self.client.get(url)
+ self.assertEqual(r.status_code, 404)
+
def test_edit_ballot_writeup_already_approved(self):
- draft = IndividualDraftFactory(states=[('draft','active'),('draft-iesg','approved')])
+ draft = IndividualDraftFactory(states=[('draft','active'),('draft-iesg','approved')], stream_id='ietf')
url = urlreverse('ietf.doc.views_ballot.ballot_writeupnotes', kwargs=dict(name=draft.name))
login_testing_unauthorized(self, "secretary", url)
@@ -465,7 +482,7 @@ def test_edit_ballot_rfceditornote(self):
def test_issue_ballot(self):
ad = Person.objects.get(user__username="ad")
for case in ('none','past','future'):
- draft = IndividualDraftFactory(ad=ad)
+ draft = IndividualDraftFactory(ad=ad, stream_id='ietf')
if case in ('past','future'):
LastCallDocEvent.objects.create(
by=Person.objects.get(name='(System)'),
@@ -504,7 +521,7 @@ def test_issue_ballot(self):
def test_issue_ballot_auto_state_change(self):
ad = Person.objects.get(user__username="ad")
- draft = IndividualDraftFactory(ad=ad, states=[('draft','active'),('draft-iesg','writeupw')])
+ draft = IndividualDraftFactory(ad=ad, states=[('draft','active'),('draft-iesg','writeupw')], stream_id='ietf')
url = urlreverse('ietf.doc.views_ballot.ballot_writeupnotes', kwargs=dict(name=draft.name))
login_testing_unauthorized(self, "secretary", url)
@@ -528,7 +545,7 @@ def test_issue_ballot_auto_state_change(self):
def test_issue_ballot_warn_if_early(self):
ad = Person.objects.get(user__username="ad")
- draft = IndividualDraftFactory(ad=ad, states=[('draft','active'),('draft-iesg','lc')])
+ draft = IndividualDraftFactory(ad=ad, states=[('draft','active'),('draft-iesg','lc')], stream_id='ietf')
url = urlreverse('ietf.doc.views_ballot.ballot_writeupnotes', kwargs=dict(name=draft.name))
login_testing_unauthorized(self, "secretary", url)
diff --git a/ietf/doc/tests_irsg_ballot.py b/ietf/doc/tests_irsg_ballot.py
index 92752e48c4..aa62d8aaf9 100644
--- a/ietf/doc/tests_irsg_ballot.py
+++ b/ietf/doc/tests_irsg_ballot.py
@@ -288,7 +288,7 @@ def test_edit_ballot_position_permissions(self):
def test_iesg_ballot_no_irsg_actions(self):
ad = Person.objects.get(user__username="ad")
- wg_draft = IndividualDraftFactory(ad=ad)
+ wg_draft = IndividualDraftFactory(ad=ad, stream_id='ietf')
irsgmember = get_active_irsg()[0]
url = urlreverse('ietf.doc.views_ballot.ballot_writeupnotes', kwargs=dict(name=wg_draft.name))
diff --git a/ietf/doc/views_ballot.py b/ietf/doc/views_ballot.py
index 4ff40d2268..9e2a417933 100644
--- a/ietf/doc/views_ballot.py
+++ b/ietf/doc/views_ballot.py
@@ -611,6 +611,10 @@ def clean_ballot_writeup(self):
def ballot_writeupnotes(request, name):
"""Editing of ballot write-up and notes"""
doc = get_object_or_404(Document, name=name)
+
+ if doc.stream_id is None or doc.stream_id != 'ietf':
+ raise Http404("The requested operation is not allowed for this document.")
+
prev_state = doc.get_state("draft-iesg")
login = request.user.person
@@ -1335,4 +1339,3 @@ def parse_ballot_edit_return_point(path, doc_name, ballot_id):
"ietf.iesg.views.past_documents",
}
return validate_return_to_path(path, get_default_path, allowed_path_handlers)
-
diff --git a/ietf/utils/test_runner.py b/ietf/utils/test_runner.py
index 3c89a2d01c..6b6f282c49 100644
--- a/ietf/utils/test_runner.py
+++ b/ietf/utils/test_runner.py
@@ -263,7 +263,14 @@ def pyflakes_test(self):
path = os.path.join(settings.BASE_DIR)
warnings = []
warnings = pyflakes.checkPaths([path], verbosity=0)
- self.assertEqual([], [str(w) for w in warnings])
+
+ # Filter out warnings about unused global variables
+ filtered_warnings = [
+ w for w in warnings
+ if not re.search(r"`global \w+` is unused: name is never assigned in scope", str(w))
+ ]
+
+ self.assertEqual([], [str(w) for w in filtered_warnings])
class MyPyTest(TestCase):
From 48f97c68406a158a4d4bf5da58e9168d12a5f92e Mon Sep 17 00:00:00 2001
From: Jennifer Richards
Date: Tue, 1 Apr 2025 16:12:33 -0300
Subject: [PATCH 108/460] chore: fix/refactor code near unneeded global
declarations (#8765)
* refactor: unused vars in draft.py
* refactor: unwrap/simplify random_faker() prep
* chore: types/globals in test_runner.py
* chore: drop unneeded globals
---
ietf/doc/templatetags/wg_menu.py | 2 --
ietf/person/factories.py | 26 ++++++++++++++------------
ietf/utils/draft.py | 8 +-------
ietf/utils/patch.py | 3 +--
ietf/utils/test_runner.py | 31 ++++++++++++++-----------------
5 files changed, 30 insertions(+), 40 deletions(-)
diff --git a/ietf/doc/templatetags/wg_menu.py b/ietf/doc/templatetags/wg_menu.py
index 76bf7eb4d0..3e8d209448 100644
--- a/ietf/doc/templatetags/wg_menu.py
+++ b/ietf/doc/templatetags/wg_menu.py
@@ -62,8 +62,6 @@
@register.simple_tag
def wg_menu(flavor):
- global parents
-
for p in parents:
p.short_name = parent_short_names.get(p.acronym) or p.name
if p.short_name.endswith(" Area"):
diff --git a/ietf/person/factories.py b/ietf/person/factories.py
index 45de554766..98756f26c8 100644
--- a/ietf/person/factories.py
+++ b/ietf/person/factories.py
@@ -26,20 +26,22 @@
fake = faker.Factory.create()
-def setup():
- global acceptable_fakers
- # The transliteration of some Arabic and Devanagari names introduces
- # non-alphabetic characters that don't work with the draft author
- # extraction code, and also don't seem to match the way people with Arabic
- # names romanize Arabic names. Exclude those locales from name generation
- # in order to avoid test failures.
- locales = set( [ l for l in faker.config.AVAILABLE_LOCALES if not (l.startswith('ar_') or l.startswith('sg_') or l=='fr_QC') ] )
- acceptable_fakers = [faker.Faker(locale) for locale in locales]
-setup()
+# The transliteration of some Arabic and Devanagari names introduces
+# non-alphabetic characters that don't work with the draft author
+# extraction code, and also don't seem to match the way people with Arabic
+# names romanize Arabic names. Exclude those locales from name generation
+# in order to avoid test failures.
+_acceptable_fakers = [
+ faker.Faker(locale)
+ for locale in set(faker.config.AVAILABLE_LOCALES)
+ if not (locale.startswith('ar_') or locale.startswith('sg_') or locale == 'fr_QC')
+]
+
def random_faker():
- global acceptable_fakers
- return random.sample(acceptable_fakers, 1)[0]
+ """Helper to get a random faker acceptable for User names"""
+ return random.sample(_acceptable_fakers, 1)[0]
+
class UserFactory(factory.django.DjangoModelFactory):
class Meta:
diff --git a/ietf/utils/draft.py b/ietf/utils/draft.py
index 50add5abba..53d3d40811 100755
--- a/ietf/utils/draft.py
+++ b/ietf/utils/draft.py
@@ -65,7 +65,6 @@
opt_debug = False
opt_timestamp = False
opt_trace = False
-opt_authorinfo = False
opt_attributes = False
# Don't forget to add the option variable to the globals list in _main below
@@ -1332,8 +1331,6 @@ def getmeta(fn):
# ----------------------------------------------------------------------
def _output(docname, fields, outfile=sys.stdout):
- global company_domain
-
if opt_attributes:
def outputkey(key, fields):
field = fields[key]
@@ -1373,9 +1370,8 @@ def _printmeta(fn, outfile=sys.stdout):
# Main
# ----------------------------------------------------------------------
-company_domain = {} # type: Dict[str, str]
def _main(outfile=sys.stdout):
- global opt_debug, opt_timestamp, opt_trace, opt_authorinfo, files, company_domain, opt_attributes
+ global opt_debug, opt_timestamp, opt_trace, files, opt_attributes
# set default values, if any
# ----------------------------------------------------------------------
# Option processing
@@ -1423,8 +1419,6 @@ def _main(outfile=sys.stdout):
elif opt in ["-T", "--trace"]: # Emit trace information while working
opt_trace = True
- company_domain = {}
-
if not files:
files = [ "-" ]
diff --git a/ietf/utils/patch.py b/ietf/utils/patch.py
index 9de2270ebb..fd3e4a165d 100644
--- a/ietf/utils/patch.py
+++ b/ietf/utils/patch.py
@@ -87,8 +87,7 @@ def createLock(self):
debugmode = False
def setdebug():
- global debugmode, streamhandler
-
+ global debugmode
debugmode = True
loglevel = logging.DEBUG
logformat = "%(levelname)8s %(message)s"
diff --git a/ietf/utils/test_runner.py b/ietf/utils/test_runner.py
index 6b6f282c49..a77377ffb5 100644
--- a/ietf/utils/test_runner.py
+++ b/ietf/utils/test_runner.py
@@ -53,9 +53,10 @@
import factory.random
import urllib3
import warnings
-from urllib.parse import urlencode
from fnmatch import fnmatch
+from typing import Callable, Optional
+from urllib.parse import urlencode
from coverage.report import Reporter
from coverage.results import Numbers
@@ -90,11 +91,11 @@
from mypy_boto3_s3.service_resource import Bucket
-loaded_templates = set()
-visited_urls = set()
-test_database_name = None
-old_destroy = None
-old_create = None
+loaded_templates: set[str] = set()
+visited_urls: set[str] = set()
+test_database_name: Optional[str] = None
+old_destroy: Optional[Callable] = None
+old_create: Optional[Callable] = None
template_coverage_collection = None
code_coverage_collection = None
@@ -230,10 +231,12 @@ def load_and_run_fixtures(verbosity):
fn()
def safe_create_test_db(self, verbosity, *args, **kwargs):
- global test_database_name, old_create
+ if old_create is None:
+ raise RuntimeError("old_create has not been set, cannot proceed")
keepdb = kwargs.get('keepdb', False)
if not keepdb:
print(" Creating test database...")
+ global test_database_name
test_database_name = old_create(self, 0, *args, **kwargs)
if settings.GLOBAL_TEST_FIXTURES:
@@ -243,8 +246,9 @@ def safe_create_test_db(self, verbosity, *args, **kwargs):
return test_database_name
def safe_destroy_test_db(*args, **kwargs):
+ if old_destroy is None:
+ raise RuntimeError("old_destroy has not been set, cannot proceed")
sys.stdout.write('\n')
- global test_database_name, old_destroy
keepdb = kwargs.get('keepdb', False)
if not keepdb:
if settings.DATABASES["default"]["NAME"] != test_database_name:
@@ -358,15 +362,13 @@ class TemplateCoverageLoader(BaseLoader):
is_usable = True
def get_template(self, template_name, skip=None):
- global template_coverage_collection, loaded_templates
- if template_coverage_collection == True:
+ if template_coverage_collection:
loaded_templates.add(str(template_name))
raise TemplateDoesNotExist(template_name)
def record_urls_middleware(get_response):
def record_urls(request):
- global url_coverage_collection, visited_urls
- if url_coverage_collection == True:
+ if url_coverage_collection:
visited_urls.add(request.path)
return get_response(request)
return record_urls
@@ -532,7 +534,6 @@ def report_test_result(self, test):
( test, test_coverage*100, latest_coverage_version, master_coverage*100, ))
def template_coverage_test(self):
- global loaded_templates
if self.runner.check_coverage:
apps = [ app.split('.')[-1] for app in self.runner.test_apps ]
all = get_template_paths(apps)
@@ -760,7 +761,6 @@ def __init__(
self.show_logging = show_logging
self.rerun = rerun
self.test_labels = None
- global validation_settings
validation_settings["validate_html"] = self if validate_html else None
validation_settings["validate_html_harder"] = self if validate_html and validate_html_harder else None
validation_settings["show_logging"] = show_logging
@@ -783,9 +783,6 @@ def __init__(
self.blobstoremanager = TestBlobstoreManager() if manage_blobstore else None
def setup_test_environment(self, **kwargs):
- global template_coverage_collection
- global url_coverage_collection
-
ietf.utils.mail.test_mode = True
ietf.utils.mail.SMTP_ADDR['ip4'] = '127.0.0.1'
ietf.utils.mail.SMTP_ADDR['port'] = 2025
From 48bee8a58381e5caa6cf5d6b56bd58eff614a918 Mon Sep 17 00:00:00 2001
From: Jennifer Richards
Date: Tue, 1 Apr 2025 17:19:30 -0300
Subject: [PATCH 109/460] chore: update libreoffice install for bookworm
(#8768)
---
dev/build/Dockerfile | 5 ++---
1 file changed, 2 insertions(+), 3 deletions(-)
diff --git a/dev/build/Dockerfile b/dev/build/Dockerfile
index cc55c92881..c25298d652 100644
--- a/dev/build/Dockerfile
+++ b/dev/build/Dockerfile
@@ -10,9 +10,8 @@ RUN groupadd -g 1000 datatracker && \
RUN apt-get purge -y imagemagick imagemagick-6-common
# Install libreoffice (needed via PPT2PDF_COMMAND)
-RUN echo "deb http://deb.debian.org/debian bullseye-backports main" > /etc/apt/sources.list.d/bullseye-backports.list && \
- apt-get update && \
- apt-get -qyt bullseye-backports install libreoffice-nogui
+RUN apt-get update && \
+ apt-get -qy install libreoffice-nogui
COPY . .
COPY ./dev/build/start.sh ./start.sh
From 15ef59133a0e8b62158836b697d9413dc395228f Mon Sep 17 00:00:00 2001
From: Matthew Holloway
Date: Thu, 3 Apr 2025 05:10:22 +1300
Subject: [PATCH 110/460] chore: libreoffice flags for resilience (#8769)
---
ietf/settings.py | 7 ++++++-
1 file changed, 6 insertions(+), 1 deletion(-)
diff --git a/ietf/settings.py b/ietf/settings.py
index 1fe5f48229..33a2f976d9 100644
--- a/ietf/settings.py
+++ b/ietf/settings.py
@@ -1103,7 +1103,12 @@ def skip_unreadable_post(record):
DATATRACKER_MAX_UPLOAD_SIZE = 40960000
PPT2PDF_COMMAND = [
- "/usr/bin/soffice", "--headless", "--convert-to", "pdf:writer_globaldocument_pdf_Export", "--outdir"
+ "/usr/bin/soffice",
+ "--headless", # no GUI
+ "--safe-mode", # use a new libreoffice profile every time (ensures no reliance on accumulated profile config)
+ "--norestore", # don't attempt to restore files after a previous crash (ensures that one crash won't block future conversions until UI intervention)
+ "--convert-to", "pdf:writer_globaldocument_pdf_Export",
+ "--outdir"
]
STATS_REGISTRATION_ATTENDEES_JSON_URL = 'https://registration.ietf.org/{number}/attendees/'
From afa79dc55b16463d56b531bb1b6b06fbbeb12ace Mon Sep 17 00:00:00 2001
From: Jim Fenton
Date: Wed, 2 Apr 2025 09:10:38 -0700
Subject: [PATCH 111/460] feat: Warn if uploading minutes before session end
(#8700)
* Warn if uploading minutes before sessionn end
* Remove extraneous btn-primary for session future
Co-authored-by: Robert Sparks
* fix: guard against unscheduled sessions
* fix: test addition of warning
* fix: another guard against unscheduled sessions
* feat: test future warning on session details pannel
---------
Co-authored-by: Robert Sparks
---
ietf/meeting/tests_views.py | 286 ++++++++++--------
ietf/meeting/views.py | 6 +
.../meeting/session_details_panel.html | 2 +-
.../meeting/upload_session_minutes.html | 5 +
4 files changed, 168 insertions(+), 131 deletions(-)
diff --git a/ietf/meeting/tests_views.py b/ietf/meeting/tests_views.py
index 0f91986f77..a93a26b981 100644
--- a/ietf/meeting/tests_views.py
+++ b/ietf/meeting/tests_views.py
@@ -6541,110 +6541,130 @@ def test_upload_bluesheets_interim_chair_access(self):
self.assertIn('Upload', str(q("title")))
- def test_upload_minutes_agenda(self):
- for doctype in ('minutes','agenda'):
- session = SessionFactory(meeting__type_id='ietf')
- if doctype == 'minutes':
- url = urlreverse('ietf.meeting.views.upload_session_minutes',kwargs={'num':session.meeting.number,'session_id':session.id})
- else:
- url = urlreverse('ietf.meeting.views.upload_session_agenda',kwargs={'num':session.meeting.number,'session_id':session.id})
- self.client.logout()
- login_testing_unauthorized(self,"secretary",url)
- r = self.client.get(url)
- self.assertEqual(r.status_code, 200)
- q = PyQuery(r.content)
- self.assertIn('Upload', str(q("Title")))
- self.assertFalse(session.presentations.exists())
- self.assertFalse(q('form input[type="checkbox"]'))
-
- session2 = SessionFactory(meeting=session.meeting,group=session.group)
- r = self.client.get(url)
- self.assertEqual(r.status_code, 200)
- q = PyQuery(r.content)
- self.assertTrue(q('form input[type="checkbox"]'))
-
- # test not submitting a file
- r = self.client.post(url, dict(submission_method="upload"))
- self.assertEqual(r.status_code, 200)
- q = PyQuery(r.content)
- self.assertTrue(q("form .is-invalid"))
-
- test_file = BytesIO(b'this is some text for a test')
- test_file.name = "not_really.json"
- r = self.client.post(url,dict(submission_method="upload",file=test_file))
- self.assertEqual(r.status_code, 200)
- q = PyQuery(r.content)
- self.assertTrue(q('form .is-invalid'))
-
- test_file = BytesIO(b'this is some text for a test'*1510000)
- test_file.name = "not_really.pdf"
- r = self.client.post(url,dict(submission_method="upload",file=test_file))
- self.assertEqual(r.status_code, 200)
- q = PyQuery(r.content)
- self.assertTrue(q('form .is-invalid'))
-
- test_file = BytesIO(b'')
- test_file.name = "not_really.html"
- r = self.client.post(url,dict(submission_method="upload",file=test_file))
- self.assertEqual(r.status_code, 200)
- q = PyQuery(r.content)
- self.assertTrue(q('form .is-invalid'))
-
- # Test html sanitization
- test_file = BytesIO(b'Title
Title
Some text')
- test_file.name = "some.html"
- r = self.client.post(url,dict(submission_method="upload",file=test_file))
- self.assertEqual(r.status_code, 302)
- doc = session.presentations.filter(document__type_id=doctype).first().document
- self.assertEqual(doc.rev,'00')
- text = doc.text()
- self.assertIn('Some text', text)
- self.assertNotIn('', text)
- text = retrieve_str(doctype, f"{doc.name}-{doc.rev}.html")
- self.assertIn('Some text', text)
- self.assertNotIn('', text)
-
- # txt upload
- test_bytes = b'This is some text for a test, with the word\nvirtual at the beginning of a line.'
- test_file = BytesIO(test_bytes)
- test_file.name = "some.txt"
- r = self.client.post(url,dict(submission_method="upload",file=test_file,apply_to_all=False))
- self.assertEqual(r.status_code, 302)
- doc = session.presentations.filter(document__type_id=doctype).first().document
- self.assertEqual(doc.rev,'01')
- self.assertFalse(session2.presentations.filter(document__type_id=doctype))
- retrieved_bytes = retrieve_bytes(doctype, f"{doc.name}-{doc.rev}.txt")
- self.assertEqual(retrieved_bytes, test_bytes)
-
+ def test_label_future_sessions(self):
+ self.client.login(username='secretary', password='secretary+password')
+ for future in (True, False):
+ mtg_date = date_today()+datetime.timedelta(days=180 if future else -180)
+ session = SessionFactory(meeting__type_id='ietf', meeting__date=mtg_date)
+ # Verify future warning shows on the session details panel
+ url = urlreverse('ietf.meeting.views.session_details', kwargs={'num':session.meeting.number, 'acronym': session.group.acronym})
r = self.client.get(url)
- self.assertEqual(r.status_code, 200)
- q = PyQuery(r.content)
- self.assertIn('Revise', str(q("Title")))
- test_bytes = b'this is some different text for a test'
- test_file = BytesIO(test_bytes)
- test_file.name = "also_some.txt"
- r = self.client.post(url,dict(submission_method="upload",file=test_file,apply_to_all=True))
- self.assertEqual(r.status_code, 302)
- doc = Document.objects.get(pk=doc.pk)
- self.assertEqual(doc.rev,'02')
- self.assertTrue(session2.presentations.filter(document__type_id=doctype))
- retrieved_bytes = retrieve_bytes(doctype, f"{doc.name}-{doc.rev}.txt")
- self.assertEqual(retrieved_bytes, test_bytes)
-
- # Test bad encoding
- test_file = BytesIO('
Title
Some\x93text'.encode('latin1'))
- test_file.name = "some.html"
- r = self.client.post(url,dict(submission_method="upload",file=test_file))
- self.assertContains(r, 'Could not identify the file encoding')
- doc = Document.objects.get(pk=doc.pk)
- self.assertEqual(doc.rev,'02')
+ self.assertTrue(r.status_code==200)
+ if future:
+ self.assertContains(r, "Session has not ended yet")
+ else:
+ self.assertNotContains(r, "Session has not ended yet")
- # Verify that we don't have dead links
- url = urlreverse('ietf.meeting.views.session_details', kwargs={'num':session.meeting.number, 'acronym': session.group.acronym})
- top = '/meeting/%s/' % session.meeting.number
- self.requests_mock.get(f'{session.notes_url()}/download', text='markdown notes')
- self.requests_mock.get(f'{session.notes_url()}/info', text=json.dumps({'title': 'title', 'updatetime': '2021-12-01T17:11:00z'}))
- self.crawl_materials(url=url, top=top)
+ def test_upload_minutes_agenda(self):
+ for doctype in ('minutes','agenda'):
+ for future in (True, False):
+ mtg_date = date_today()+datetime.timedelta(days=180 if future else -180)
+ session = SessionFactory(meeting__type_id='ietf', meeting__date=mtg_date)
+ if doctype == 'minutes':
+ url = urlreverse('ietf.meeting.views.upload_session_minutes',kwargs={'num':session.meeting.number,'session_id':session.id})
+ else:
+ url = urlreverse('ietf.meeting.views.upload_session_agenda',kwargs={'num':session.meeting.number,'session_id':session.id})
+ self.client.logout()
+ login_testing_unauthorized(self,"secretary",url)
+ r = self.client.get(url)
+ self.assertEqual(r.status_code, 200)
+ q = PyQuery(r.content)
+ self.assertIn('Upload', str(q("Title")))
+ self.assertFalse(session.presentations.exists())
+ self.assertFalse(q('form input[type="checkbox"]'))
+ if future and doctype == "minutes":
+ self.assertContains(r, "Session has not ended yet")
+ else:
+ self.assertNotContains(r, "Session has not ended yet")
+
+ session2 = SessionFactory(meeting=session.meeting,group=session.group)
+ r = self.client.get(url)
+ self.assertEqual(r.status_code, 200)
+ q = PyQuery(r.content)
+ self.assertTrue(q('form input[type="checkbox"]'))
+
+ # test not submitting a file
+ r = self.client.post(url, dict(submission_method="upload"))
+ self.assertEqual(r.status_code, 200)
+ q = PyQuery(r.content)
+ self.assertTrue(q("form .is-invalid"))
+
+ test_file = BytesIO(b'this is some text for a test')
+ test_file.name = "not_really.json"
+ r = self.client.post(url,dict(submission_method="upload",file=test_file))
+ self.assertEqual(r.status_code, 200)
+ q = PyQuery(r.content)
+ self.assertTrue(q('form .is-invalid'))
+
+ test_file = BytesIO(b'this is some text for a test'*1510000)
+ test_file.name = "not_really.pdf"
+ r = self.client.post(url,dict(submission_method="upload",file=test_file))
+ self.assertEqual(r.status_code, 200)
+ q = PyQuery(r.content)
+ self.assertTrue(q('form .is-invalid'))
+
+ test_file = BytesIO(b'')
+ test_file.name = "not_really.html"
+ r = self.client.post(url,dict(submission_method="upload",file=test_file))
+ self.assertEqual(r.status_code, 200)
+ q = PyQuery(r.content)
+ self.assertTrue(q('form .is-invalid'))
+
+ # Test html sanitization
+ test_file = BytesIO(b'Title
Title
Some text')
+ test_file.name = "some.html"
+ r = self.client.post(url,dict(submission_method="upload",file=test_file))
+ self.assertEqual(r.status_code, 302)
+ doc = session.presentations.filter(document__type_id=doctype).first().document
+ self.assertEqual(doc.rev,'00')
+ text = doc.text()
+ self.assertIn('Some text', text)
+ self.assertNotIn('', text)
+ text = retrieve_str(doctype, f"{doc.name}-{doc.rev}.html")
+ self.assertIn('Some text', text)
+ self.assertNotIn('', text)
+
+ # txt upload
+ test_bytes = b'This is some text for a test, with the word\nvirtual at the beginning of a line.'
+ test_file = BytesIO(test_bytes)
+ test_file.name = "some.txt"
+ r = self.client.post(url,dict(submission_method="upload",file=test_file,apply_to_all=False))
+ self.assertEqual(r.status_code, 302)
+ doc = session.presentations.filter(document__type_id=doctype).first().document
+ self.assertEqual(doc.rev,'01')
+ self.assertFalse(session2.presentations.filter(document__type_id=doctype))
+ retrieved_bytes = retrieve_bytes(doctype, f"{doc.name}-{doc.rev}.txt")
+ self.assertEqual(retrieved_bytes, test_bytes)
+
+ r = self.client.get(url)
+ self.assertEqual(r.status_code, 200)
+ q = PyQuery(r.content)
+ self.assertIn('Revise', str(q("Title")))
+ test_bytes = b'this is some different text for a test'
+ test_file = BytesIO(test_bytes)
+ test_file.name = "also_some.txt"
+ r = self.client.post(url,dict(submission_method="upload",file=test_file,apply_to_all=True))
+ self.assertEqual(r.status_code, 302)
+ doc = Document.objects.get(pk=doc.pk)
+ self.assertEqual(doc.rev,'02')
+ self.assertTrue(session2.presentations.filter(document__type_id=doctype))
+ retrieved_bytes = retrieve_bytes(doctype, f"{doc.name}-{doc.rev}.txt")
+ self.assertEqual(retrieved_bytes, test_bytes)
+
+ # Test bad encoding
+ test_file = BytesIO('
Title
Some\x93text'.encode('latin1'))
+ test_file.name = "some.html"
+ r = self.client.post(url,dict(submission_method="upload",file=test_file))
+ self.assertContains(r, 'Could not identify the file encoding')
+ doc = Document.objects.get(pk=doc.pk)
+ self.assertEqual(doc.rev,'02')
+
+ # Verify that we don't have dead links
+ url = urlreverse('ietf.meeting.views.session_details', kwargs={'num':session.meeting.number, 'acronym': session.group.acronym})
+ top = '/meeting/%s/' % session.meeting.number
+ self.requests_mock.get(f'{session.notes_url()}/download', text='markdown notes')
+ self.requests_mock.get(f'{session.notes_url()}/info', text=json.dumps({'title': 'title', 'updatetime': '2021-12-01T17:11:00z'}))
+ self.crawl_materials(url=url, top=top)
def test_upload_minutes_agenda_unscheduled(self):
for doctype in ('minutes','agenda'):
@@ -6661,6 +6681,7 @@ def test_upload_minutes_agenda_unscheduled(self):
self.assertIn('Upload', str(q("Title")))
self.assertFalse(session.presentations.exists())
self.assertFalse(q('form input[type="checkbox"]'))
+ self.assertNotContains(r, "Session has not ended yet")
test_file = BytesIO(b'this is some text for a test')
test_file.name = "not_really.txt"
@@ -6669,35 +6690,40 @@ def test_upload_minutes_agenda_unscheduled(self):
@override_settings(MEETING_MATERIALS_SERVE_LOCALLY=True)
def test_upload_minutes_agenda_interim(self):
- session=SessionFactory(meeting__type_id='interim')
for doctype in ('minutes','agenda'):
- if doctype=='minutes':
- url = urlreverse('ietf.meeting.views.upload_session_minutes',kwargs={'num':session.meeting.number,'session_id':session.id})
- else:
- url = urlreverse('ietf.meeting.views.upload_session_agenda',kwargs={'num':session.meeting.number,'session_id':session.id})
- self.client.logout()
- login_testing_unauthorized(self,"secretary",url)
- r = self.client.get(url)
- self.assertEqual(r.status_code, 200)
- q = PyQuery(r.content)
- self.assertIn('Upload', str(q("title")))
- self.assertFalse(session.presentations.filter(document__type_id=doctype))
- test_bytes = b'this is some text for a test'
- test_file = BytesIO(test_bytes)
- test_file.name = "not_really.txt"
- r = self.client.post(url,dict(submission_method="upload",file=test_file))
- self.assertEqual(r.status_code, 302)
- doc = session.presentations.filter(document__type_id=doctype).first().document
- self.assertEqual(doc.rev,'00')
- retrieved_bytes = retrieve_bytes(doctype, f"{doc.name}-{doc.rev}.txt")
- self.assertEqual(retrieved_bytes, test_bytes)
-
- # Verify that we don't have dead links
- url = urlreverse('ietf.meeting.views.session_details', kwargs={'num':session.meeting.number, 'acronym': session.group.acronym})
- top = '/meeting/%s/' % session.meeting.number
- self.requests_mock.get(f'{session.notes_url()}/download', text='markdown notes')
- self.requests_mock.get(f'{session.notes_url()}/info', text=json.dumps({'title': 'title', 'updatetime': '2021-12-01T17:11:00z'}))
- self.crawl_materials(url=url, top=top)
+ for future in (True, False):
+ session=SessionFactory(meeting__type_id='interim', meeting__date = date_today()+datetime.timedelta(days=180 if future else -180))
+ if doctype=='minutes':
+ url = urlreverse('ietf.meeting.views.upload_session_minutes',kwargs={'num':session.meeting.number,'session_id':session.id})
+ else:
+ url = urlreverse('ietf.meeting.views.upload_session_agenda',kwargs={'num':session.meeting.number,'session_id':session.id})
+ self.client.logout()
+ login_testing_unauthorized(self,"secretary",url)
+ r = self.client.get(url)
+ self.assertEqual(r.status_code, 200)
+ q = PyQuery(r.content)
+ self.assertIn('Upload', str(q("title")))
+ self.assertFalse(session.presentations.filter(document__type_id=doctype))
+ if future and doctype == "minutes":
+ self.assertContains(r, "Session has not ended yet")
+ else:
+ self.assertNotContains(r, "Session has not ended yet")
+ test_bytes = b'this is some text for a test'
+ test_file = BytesIO(test_bytes)
+ test_file.name = "not_really.txt"
+ r = self.client.post(url,dict(submission_method="upload",file=test_file))
+ self.assertEqual(r.status_code, 302)
+ doc = session.presentations.filter(document__type_id=doctype).first().document
+ self.assertEqual(doc.rev,'00')
+ retrieved_bytes = retrieve_bytes(doctype, f"{doc.name}-{doc.rev}.txt")
+ self.assertEqual(retrieved_bytes, test_bytes)
+
+ # Verify that we don't have dead links
+ url = urlreverse('ietf.meeting.views.session_details', kwargs={'num':session.meeting.number, 'acronym': session.group.acronym})
+ top = '/meeting/%s/' % session.meeting.number
+ self.requests_mock.get(f'{session.notes_url()}/download', text='markdown notes')
+ self.requests_mock.get(f'{session.notes_url()}/info', text=json.dumps({'title': 'title', 'updatetime': '2021-12-01T17:11:00z'}))
+ self.crawl_materials(url=url, top=top)
@override_settings(MEETING_MATERIALS_SERVE_LOCALLY=True)
def test_upload_narrativeminutes(self):
diff --git a/ietf/meeting/views.py b/ietf/meeting/views.py
index 6a73059d92..722bf829e1 100644
--- a/ietf/meeting/views.py
+++ b/ietf/meeting/views.py
@@ -2522,6 +2522,8 @@ def session_details(request, num, acronym):
else:
pending_suggestions = SlideSubmission.objects.none()
+ tsa = session.official_timeslotassignment()
+ future = tsa is not None and timezone.now() < tsa.timeslot.end_time()
return render(request, "meeting/session_details.html",
{ 'scheduled_sessions':scheduled_sessions ,
'unscheduled_sessions':unscheduled_sessions ,
@@ -2532,6 +2534,7 @@ def session_details(request, num, acronym):
'can_manage_materials' : can_manage,
'can_view_request': can_view_request,
'thisweek': datetime_today()-datetime.timedelta(days=7),
+ 'future': future,
})
class SessionDraftsForm(forms.Form):
@@ -2823,11 +2826,14 @@ def upload_session_minutes(request, session_id, num):
else:
form = UploadMinutesForm(show_apply_to_all_checkbox)
+ tsa = session.official_timeslotassignment()
+ future = tsa is not None and timezone.now() < tsa.timeslot.end_time()
return render(request, "meeting/upload_session_minutes.html",
{'session': session,
'session_number': session_number,
'minutes_sp' : minutes_sp,
'form': form,
+ 'future': future,
})
@role_required("Secretariat")
diff --git a/ietf/templates/meeting/session_details_panel.html b/ietf/templates/meeting/session_details_panel.html
index 87d9e3d672..7c52ac0b4a 100644
--- a/ietf/templates/meeting/session_details_panel.html
+++ b/ietf/templates/meeting/session_details_panel.html
@@ -109,7 +109,7 @@