From a9a8f9ba0116aea7480e83a2dfc133196cf9280f Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Fri, 21 Feb 2025 20:08:12 -0400 Subject: [PATCH 01/44] chore(deps): remove pydyf pin, update weasyprint req (#8580) * chore(deps): remove pydyf pin, update weasyprint req * chore(deps): drop pdf extra from xml2rfc dep This should come back when we use xml2rfc for PDF generation --- requirements.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements.txt b/requirements.txt index 66a785e929..d8b6e0742f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -57,7 +57,7 @@ oic>=1.3 # Used only by tests Pillow>=9.1.0 psycopg2>=2.9.6 pyang>=2.5.3 -pydyf>0.8.0,<0.10.0 # until weasyprint adjusts for 0.10.0 and later +pydyf>0.8.0 pyflakes>=2.4.0 pyopenssl>=22.0.0 # Used by urllib3.contrib, which is used by PyQuery but not marked as a dependency pyquery>=1.4.3 @@ -80,6 +80,6 @@ tlds>=2022042700 # Used to teach bleach about which TLDs currently exist tqdm>=4.64.0 Unidecode>=1.3.4 urllib3>=1.26,<2 -weasyprint>=59 -xml2rfc[pdf]>=3.23.0 +weasyprint>=64.1 +xml2rfc>=3.23.0 xym>=0.6,<1.0 From cb8ef96f364c3e76d6726f12889d534ab1b49a66 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Mon, 3 Mar 2025 11:21:39 -0400 Subject: [PATCH 02/44] fix: more submission date feedback; refactor xml2rfc log capture (#8621) * feat: catch and report any parsing error * refactor: error handling in a more testable way * fix: no bare `except` * test: exception cases for test_parse_creation_date * fix: explicitly reject non-numeric day/year * test: suppress xml2rfc output in test * refactor: context manager to capture xml2rfc output * refactor: more capture_xml2rfc_output usage * fix: capture_xml2rfc_output exception handling --- ietf/submit/utils.py | 162 ++++++++++++++++++++--------------------- ietf/utils/tests.py | 107 ++++++++++++++++++++++++++- ietf/utils/xmldraft.py | 66 ++++++++++++----- 3 files changed, 232 insertions(+), 103 deletions(-) diff --git a/ietf/submit/utils.py b/ietf/submit/utils.py index 61274c0116..a0c7dd8511 100644 --- a/ietf/submit/utils.py +++ b/ietf/submit/utils.py @@ -58,7 +58,7 @@ from ietf.utils.mail import is_valid_email from ietf.utils.text import parse_unicode, normalize_text from ietf.utils.timezone import date_today -from ietf.utils.xmldraft import InvalidMetadataError, XMLDraft +from ietf.utils.xmldraft import InvalidMetadataError, XMLDraft, capture_xml2rfc_output from ietf.person.name import unidecode_name @@ -926,105 +926,101 @@ def render_missing_formats(submission): If a txt file already exists, leaves it in place. Overwrites an existing html file if there is one. """ - # Capture stdio/stdout from xml2rfc - xml2rfc_stdout = io.StringIO() - xml2rfc_stderr = io.StringIO() - xml2rfc.log.write_out = xml2rfc_stdout - xml2rfc.log.write_err = xml2rfc_stderr - xml_path = staging_path(submission.name, submission.rev, '.xml') - parser = xml2rfc.XmlRfcParser(str(xml_path), quiet=True) - try: - # --- Parse the xml --- - xmltree = parser.parse(remove_comments=False) - except Exception as err: - raise XmlRfcError( - "Error parsing XML", - xml2rfc_stdout=xml2rfc_stdout.getvalue(), - xml2rfc_stderr=xml2rfc_stderr.getvalue(), - ) from err - # If we have v2, run it through v2v3. Keep track of the submitted version, though. - xmlroot = xmltree.getroot() - xml_version = xmlroot.get('version', '2') - if xml_version == '2': - v2v3 = xml2rfc.V2v3XmlWriter(xmltree) + with capture_xml2rfc_output() as xml2rfc_logs: + xml_path = staging_path(submission.name, submission.rev, '.xml') + parser = xml2rfc.XmlRfcParser(str(xml_path), quiet=True) try: - xmltree.tree = v2v3.convert2to3() + # --- Parse the xml --- + xmltree = parser.parse(remove_comments=False) except Exception as err: raise XmlRfcError( - "Error converting v2 XML to v3", - xml2rfc_stdout=xml2rfc_stdout.getvalue(), - xml2rfc_stderr=xml2rfc_stderr.getvalue(), + "Error parsing XML", + xml2rfc_stdout=xml2rfc_logs["stdout"].getvalue(), + xml2rfc_stderr=xml2rfc_logs["stderr"].getvalue(), ) from err - - # --- Prep the xml --- - today = date_today() - prep = xml2rfc.PrepToolWriter(xmltree, quiet=True, liberal=True, keep_pis=[xml2rfc.V3_PI_TARGET]) - prep.options.accept_prepped = True - prep.options.date = today - try: - xmltree.tree = prep.prep() - except RfcWriterError: - raise XmlRfcError( - f"Error during xml2rfc prep: {prep.errors}", - xml2rfc_stdout=xml2rfc_stdout.getvalue(), - xml2rfc_stderr=xml2rfc_stderr.getvalue(), - ) - except Exception as err: - raise XmlRfcError( - "Unexpected error during xml2rfc prep", - xml2rfc_stdout=xml2rfc_stdout.getvalue(), - xml2rfc_stderr=xml2rfc_stderr.getvalue(), - ) from err - - # --- Convert to txt --- - txt_path = staging_path(submission.name, submission.rev, '.txt') - if not txt_path.exists(): - writer = xml2rfc.TextWriter(xmltree, quiet=True) - writer.options.accept_prepped = True + # If we have v2, run it through v2v3. Keep track of the submitted version, though. + xmlroot = xmltree.getroot() + xml_version = xmlroot.get('version', '2') + if xml_version == '2': + v2v3 = xml2rfc.V2v3XmlWriter(xmltree) + try: + xmltree.tree = v2v3.convert2to3() + except Exception as err: + raise XmlRfcError( + "Error converting v2 XML to v3", + xml2rfc_stdout=xml2rfc_logs["stdout"].getvalue(), + xml2rfc_stderr=xml2rfc_logs["stderr"].getvalue(), + ) from err + + # --- Prep the xml --- + today = date_today() + prep = xml2rfc.PrepToolWriter(xmltree, quiet=True, liberal=True, keep_pis=[xml2rfc.V3_PI_TARGET]) + prep.options.accept_prepped = True + prep.options.date = today + try: + xmltree.tree = prep.prep() + except RfcWriterError: + raise XmlRfcError( + f"Error during xml2rfc prep: {prep.errors}", + xml2rfc_stdout=xml2rfc_logs["stdout"].getvalue(), + xml2rfc_stderr=xml2rfc_logs["stderr"].getvalue(), + ) + except Exception as err: + raise XmlRfcError( + "Unexpected error during xml2rfc prep", + xml2rfc_stdout=xml2rfc_logs["stdout"].getvalue(), + xml2rfc_stderr=xml2rfc_logs["stderr"].getvalue(), + ) from err + + # --- Convert to txt --- + txt_path = staging_path(submission.name, submission.rev, '.txt') + if not txt_path.exists(): + writer = xml2rfc.TextWriter(xmltree, quiet=True) + writer.options.accept_prepped = True + writer.options.date = today + try: + writer.write(txt_path) + except Exception as err: + raise XmlRfcError( + "Error generating text format from XML", + xml2rfc_stdout=xml2rfc_logs["stdout"].getvalue(), + xml2rfc_stderr=xml2rfc_logs["stderr"].getvalue(), + ) from err + log.log( + 'In %s: xml2rfc %s generated %s from %s (version %s)' % ( + str(xml_path.parent), + xml2rfc.__version__, + txt_path.name, + xml_path.name, + xml_version, + ) + ) + # When the blobstores become autoritative - the guard at the + # containing if statement needs to be based on the store + with Path(txt_path).open("rb") as f: + store_file("staging", f"{submission.name}-{submission.rev}.txt", f) + + # --- Convert to html --- + html_path = staging_path(submission.name, submission.rev, '.html') + writer = xml2rfc.HtmlWriter(xmltree, quiet=True) writer.options.date = today try: - writer.write(txt_path) + writer.write(str(html_path)) except Exception as err: raise XmlRfcError( - "Error generating text format from XML", - xml2rfc_stdout=xml2rfc_stdout.getvalue(), - xml2rfc_stderr=xml2rfc_stderr.getvalue(), + "Error generating HTML format from XML", + xml2rfc_stdout=xml2rfc_logs["stdout"].getvalue(), + xml2rfc_stderr=xml2rfc_logs["stderr"].getvalue(), ) from err log.log( 'In %s: xml2rfc %s generated %s from %s (version %s)' % ( str(xml_path.parent), xml2rfc.__version__, - txt_path.name, + html_path.name, xml_path.name, xml_version, ) ) - # When the blobstores become autoritative - the guard at the - # containing if statement needs to be based on the store - with Path(txt_path).open("rb") as f: - store_file("staging", f"{submission.name}-{submission.rev}.txt", f) - - # --- Convert to html --- - html_path = staging_path(submission.name, submission.rev, '.html') - writer = xml2rfc.HtmlWriter(xmltree, quiet=True) - writer.options.date = today - try: - writer.write(str(html_path)) - except Exception as err: - raise XmlRfcError( - "Error generating HTML format from XML", - xml2rfc_stdout=xml2rfc_stdout.getvalue(), - xml2rfc_stderr=xml2rfc_stderr.getvalue(), - ) from err - log.log( - 'In %s: xml2rfc %s generated %s from %s (version %s)' % ( - str(xml_path.parent), - xml2rfc.__version__, - html_path.name, - xml_path.name, - xml_version, - ) - ) with Path(html_path).open("rb") as f: store_file("staging", f"{submission.name}-{submission.rev}.html", f) diff --git a/ietf/utils/tests.py b/ietf/utils/tests.py index 0a1986a608..2dd861cd11 100644 --- a/ietf/utils/tests.py +++ b/ietf/utils/tests.py @@ -23,6 +23,8 @@ from importlib import import_module from textwrap import dedent from tempfile import mkdtemp +from xml2rfc import log as xml2rfc_log +from xml2rfc.util.date import extract_date as xml2rfc_extract_date from django.apps import apps from django.contrib.auth.models import User @@ -57,7 +59,7 @@ from ietf.utils.test_utils import TestCase, unicontent from ietf.utils.text import parse_unicode from ietf.utils.timezone import timezone_not_near_midnight -from ietf.utils.xmldraft import XMLDraft +from ietf.utils.xmldraft import XMLDraft, InvalidMetadataError, capture_xml2rfc_output class SendingMail(TestCase): @@ -544,7 +546,7 @@ def test_get_refs_v2(self): def test_parse_creation_date(self): # override date_today to avoid skew when test runs around midnight today = datetime.date.today() - with patch("ietf.utils.xmldraft.date_today", return_value=today): + with capture_xml2rfc_output(), patch("ietf.utils.xmldraft.date_today", return_value=today): # Note: using a dict as a stand-in for XML elements, which rely on the get() method self.assertEqual( XMLDraft.parse_creation_date({"year": "2022", "month": "11", "day": "24"}), @@ -590,6 +592,74 @@ def test_parse_creation_date(self): ), datetime.date(today.year, 1 if today.month != 1 else 2, 15), ) + # Some exeception-inducing conditions + with self.assertRaises( + InvalidMetadataError, + msg="raise an InvalidMetadataError if a year-only date is not current", + ): + XMLDraft.parse_creation_date( + { + "year": str(today.year - 1), + "month": "", + "day": "", + } + ) + with self.assertRaises( + InvalidMetadataError, + msg="raise an InvalidMetadataError for a non-numeric year" + ): + XMLDraft.parse_creation_date( + { + "year": "two thousand twenty-five", + "month": "2", + "day": "28", + } + ) + with self.assertRaises( + InvalidMetadataError, + msg="raise an InvalidMetadataError for an invalid month" + ): + XMLDraft.parse_creation_date( + { + "year": "2024", + "month": "13", + "day": "28", + } + ) + with self.assertRaises( + InvalidMetadataError, + msg="raise an InvalidMetadataError for a misspelled month" + ): + XMLDraft.parse_creation_date( + { + "year": "2024", + "month": "Oktobur", + "day": "28", + } + ) + with self.assertRaises( + InvalidMetadataError, + msg="raise an InvalidMetadataError for an invalid day" + ): + XMLDraft.parse_creation_date( + { + "year": "2024", + "month": "feb", + "day": "31", + } + ) + with self.assertRaises( + InvalidMetadataError, + msg="raise an InvalidMetadataError for a non-numeric day" + ): + XMLDraft.parse_creation_date( + { + "year": "2024", + "month": "feb", + "day": "twenty-four", + } + ) + def test_parse_docname(self): with self.assertRaises(ValueError) as cm: @@ -671,6 +741,39 @@ def test_render_author_name(self): "J. Q.", ) + def test_capture_xml2rfc_output(self): + """capture_xml2rfc_output reroutes and captures xml2rfc logs""" + orig_write_out = xml2rfc_log.write_out + orig_write_err = xml2rfc_log.write_err + with capture_xml2rfc_output() as outer_log_streams: # ensure no output + # such meta! very Inception! + with capture_xml2rfc_output() as inner_log_streams: + # arbitrary xml2rfc method that triggers a log, nothing special otherwise + xml2rfc_extract_date({"year": "fish"}, datetime.date(2025,3,1)) + self.assertNotEqual(inner_log_streams, outer_log_streams) + self.assertEqual(xml2rfc_log.write_out, outer_log_streams["stdout"], "out stream should be restored") + self.assertEqual(xml2rfc_log.write_err, outer_log_streams["stderr"], "err stream should be restored") + self.assertEqual(xml2rfc_log.write_out, orig_write_out, "original out stream should be restored") + self.assertEqual(xml2rfc_log.write_err, orig_write_err, "original err stream should be restored") + + # don't happen to get any output on stdout and not paranoid enough to force some, just test stderr + self.assertGreater(len(inner_log_streams["stderr"].getvalue()), 0, "want output on inner streams") + self.assertEqual(len(outer_log_streams["stdout"].getvalue()), 0, "no output on outer streams") + self.assertEqual(len(outer_log_streams["stderr"].getvalue()), 0, "no output on outer streams") + + def test_capture_xml2rfc_output_exception_handling(self): + """capture_xml2rfc_output restores streams after an exception""" + orig_write_out = xml2rfc_log.write_out + orig_write_err = xml2rfc_log.write_err + with capture_xml2rfc_output() as outer_log_streams: # ensure no output + with self.assertRaises(RuntimeError), capture_xml2rfc_output() as inner_log_streams: + raise RuntimeError("nooo") + self.assertNotEqual(inner_log_streams, outer_log_streams) + self.assertEqual(xml2rfc_log.write_out, outer_log_streams["stdout"], "out stream should be restored") + self.assertEqual(xml2rfc_log.write_err, outer_log_streams["stderr"], "err stream should be restored") + self.assertEqual(xml2rfc_log.write_out, orig_write_out, "original out stream should be restored") + self.assertEqual(xml2rfc_log.write_err, orig_write_err, "original err stream should be restored") + class NameTests(TestCase): diff --git a/ietf/utils/xmldraft.py b/ietf/utils/xmldraft.py index 73baf917d8..3ac9a269c7 100644 --- a/ietf/utils/xmldraft.py +++ b/ietf/utils/xmldraft.py @@ -7,7 +7,7 @@ import debug # pyflakes: ignore -from contextlib import ExitStack +from contextlib import contextmanager from lxml.etree import XMLSyntaxError from xml2rfc.util.date import augment_date, extract_date from ietf.utils.timezone import date_today @@ -15,6 +15,21 @@ from .draft import Draft +@contextmanager +def capture_xml2rfc_output(): + orig_write_out = xml2rfc.log.write_out + orig_write_err = xml2rfc.log.write_err + parser_out = io.StringIO() + parser_err = io.StringIO() + xml2rfc.log.write_out = parser_out + xml2rfc.log.write_err = parser_err + try: + yield {"stdout": parser_out, "stderr": parser_err} + finally: + xml2rfc.log.write_out = orig_write_out + xml2rfc.log.write_err = orig_write_err + + class XMLDraft(Draft): """Draft from XML source @@ -38,27 +53,18 @@ def parse_xml(filename): Converts to xml2rfc v3 schema, then returns the root of the v3 tree and the original xml version. """ - orig_write_out = xml2rfc.log.write_out - orig_write_err = xml2rfc.log.write_err - parser_out = io.StringIO() - parser_err = io.StringIO() - - with ExitStack() as stack: - @stack.callback - def cleanup(): # called when context exited, even if there's an exception - xml2rfc.log.write_out = orig_write_out - xml2rfc.log.write_err = orig_write_err - - xml2rfc.log.write_out = parser_out - xml2rfc.log.write_err = parser_err + with capture_xml2rfc_output() as parser_logs: parser = xml2rfc.XmlRfcParser(filename, quiet=True) try: tree = parser.parse() except XMLSyntaxError: raise InvalidXMLError() except Exception as e: - raise XMLParseError(parser_out.getvalue(), parser_err.getvalue()) from e + raise XMLParseError( + parser_logs["stdout"].getvalue(), + parser_logs["stderr"].getvalue(), + ) from e xml_version = tree.getroot().get('version', '2') if xml_version == '2': @@ -147,10 +153,31 @@ def get_title(self): def parse_creation_date(date_elt): if date_elt is None: return None + today = date_today() - # ths mimics handling of date elements in the xml2rfc text/html writers - year, month, day = extract_date(date_elt, today) - year, month, day = augment_date(year, month, day, today) + + # Outright reject non-numeric year / day (xml2rfc's extract_date does not do this) + # (n.b., "year" can be non-numeric in a section per RFC 7991) + year = date_elt.get("year") + day = date_elt.get("day") + non_numeric_year = year and not year.isdigit() + non_numeric_day = day and not day.isdigit() + if non_numeric_day or non_numeric_year: + raise InvalidMetadataError( + "Unable to parse the element in the section: " + "year and day must be numeric values if specified." + ) + + try: + # ths mimics handling of date elements in the xml2rfc text/html writers + year, month, day = extract_date(date_elt, today) + year, month, day = augment_date(year, month, day, today) + except Exception as err: + # Give a generic error if anything goes wrong so far... + raise InvalidMetadataError( + "Unable to parse the element in the section." + ) from err + if not day: # Must choose a day for a datetime.date. Per RFC 7991 sect 2.17, we use # today's date if it is consistent with the rest of the date. Otherwise, @@ -159,9 +186,12 @@ def parse_creation_date(date_elt): day = today.day else: day = 15 + try: creation_date = datetime.date(year, month, day) except Exception: + # If everything went well, we should have had a valid datetime, but we didn't. + # The parsing _worked_ but not in a way that we can go forward with. raise InvalidMetadataError( "The element in the section specified an incomplete date " "that was not consistent with today's date. If you specify only a year, " From 232a861f8ae52e1026d59d7088f07211acd166a5 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Mon, 3 Mar 2025 14:51:14 -0400 Subject: [PATCH 03/44] chore: config gunicorn secure_scheme_headers (#8632) * chore: config gunicorn secure_scheme_headers * chore: typo in comment --- dev/build/gunicorn.conf.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/dev/build/gunicorn.conf.py b/dev/build/gunicorn.conf.py index 6666a0d37d..032d95ee0d 100644 --- a/dev/build/gunicorn.conf.py +++ b/dev/build/gunicorn.conf.py @@ -1,5 +1,11 @@ # Copyright The IETF Trust 2024, All Rights Reserved +# Configure security scheme headers for forwarded requests. Cloudflare sets X-Forwarded-Proto +# for us. Don't trust any of the other similar headers. Only trust the header if it's coming +# from localhost, as all legitimate traffic will reach gunicorn via co-located nginx. +secure_scheme_headers = {"X-FORWARDED-PROTO": "https"} +forwarded_allow_ips = "127.0.0.1, ::1" # this is the default + # Log as JSON on stdout (to distinguish from Django's logs on stderr) # # This is applied as an update to gunicorn's glogging.CONFIG_DEFAULTS. From 554182ef8ab33947ca8d9ee904a5d5472d3c57f8 Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Tue, 4 Mar 2025 11:42:04 -0600 Subject: [PATCH 04/44] feat: run the docker container as dev (#8606) * feat: run the docker container as dev * fix: $@ -> $* Old bug, but might as well fix it now --------- Co-authored-by: Jennifer Richards --- dev/celery/docker-init.sh | 13 ++++++--- docker-compose.yml | 4 ++- docker/celery.Dockerfile | 60 +++++++++++++++++++++++++++++++++++++++ 3 files changed, 72 insertions(+), 5 deletions(-) create mode 100644 docker/celery.Dockerfile diff --git a/dev/celery/docker-init.sh b/dev/celery/docker-init.sh index 4fd1f1294f..9940dfd7d0 100755 --- a/dev/celery/docker-init.sh +++ b/dev/celery/docker-init.sh @@ -49,11 +49,16 @@ if [[ -n "${CELERY_GID}" ]]; then fi run_as_celery_uid () { - SU_OPTS=() - if [[ -n "${CELERY_GROUP}" ]]; then - SU_OPTS+=("-g" "${CELERY_GROUP}") + IAM=$(whoami) + if [ "${IAM}" = "${CELERY_USERNAME:-root}" ]; then + SU_OPTS=() + if [[ -n "${CELERY_GROUP}" ]]; then + SU_OPTS+=("-g" "${CELERY_GROUP}") + fi + su "${SU_OPTS[@]}" "${CELERY_USERNAME:-root}" -s /bin/sh -c "$*" + else + /bin/sh -c "$*" fi - su "${SU_OPTS[@]}" "${CELERY_USERNAME:-root}" -s /bin/sh -c "$@" } log_term_timing_msgs () { diff --git a/docker-compose.yml b/docker-compose.yml index 30ce8ba4d2..9910c02a99 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -67,7 +67,9 @@ services: restart: unless-stopped celery: - image: ghcr.io/ietf-tools/datatracker-celery:latest + build: + context: . + dockerfile: docker/celery.Dockerfile init: true environment: CELERY_APP: ietf diff --git a/docker/celery.Dockerfile b/docker/celery.Dockerfile new file mode 100644 index 0000000000..e44200398c --- /dev/null +++ b/docker/celery.Dockerfile @@ -0,0 +1,60 @@ +FROM ghcr.io/ietf-tools/datatracker-celery:latest +LABEL maintainer="IETF Tools Team " + +ENV DEBIAN_FRONTEND=noninteractive + +# Install needed packages and setup non-root user. +ARG USERNAME=dev +ARG USER_UID=1000 +ARG USER_GID=$USER_UID +COPY docker/scripts/app-setup-debian.sh /tmp/library-scripts/docker-setup-debian.sh +RUN sed -i 's/\r$//' /tmp/library-scripts/docker-setup-debian.sh && chmod +x /tmp/library-scripts/docker-setup-debian.sh + +# Add Postgresql Apt Repository to get 14 +RUN echo "deb http://apt.postgresql.org/pub/repos/apt $(. /etc/os-release && echo "$VERSION_CODENAME")-pgdg main" | tee /etc/apt/sources.list.d/pgdg.list +RUN wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - + +RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ + && apt-get install -y --no-install-recommends postgresql-client-14 pgloader \ + # Remove imagemagick due to https://security-tracker.debian.org/tracker/CVE-2019-10131 + && apt-get purge -y imagemagick imagemagick-6-common \ + # Install common packages, non-root user + # Syntax: ./docker-setup-debian.sh [install zsh flag] [username] [user UID] [user GID] [upgrade packages flag] [install Oh My Zsh! flag] [Add non-free packages] + && bash /tmp/library-scripts/docker-setup-debian.sh "true" "${USERNAME}" "${USER_UID}" "${USER_GID}" "false" "true" "true" + +# Setup default python tools in a venv via pipx to avoid conflicts +ENV PIPX_HOME=/usr/local/py-utils \ + PIPX_BIN_DIR=/usr/local/py-utils/bin +ENV PATH=${PATH}:${PIPX_BIN_DIR} +COPY docker/scripts/app-setup-python.sh /tmp/library-scripts/docker-setup-python.sh +RUN sed -i 's/\r$//' /tmp/library-scripts/docker-setup-python.sh && chmod +x /tmp/library-scripts/docker-setup-python.sh +RUN bash /tmp/library-scripts/docker-setup-python.sh "none" "/usr/local" "${PIPX_HOME}" "${USERNAME}" + +# Remove library scripts for final image +RUN rm -rf /tmp/library-scripts + +# Copy the startup file +COPY dev/celery/docker-init.sh /docker-init.sh +RUN sed -i 's/\r$//' /docker-init.sh && \ + chmod +x /docker-init.sh + +ENTRYPOINT [ "/docker-init.sh" ] + +# Fix user UID / GID to match host +RUN groupmod --gid $USER_GID $USERNAME \ + && usermod --uid $USER_UID --gid $USER_GID $USERNAME \ + && chown -R $USER_UID:$USER_GID /home/$USERNAME \ + || exit 0 + +# Switch to local dev user +USER dev:dev + +# Install current datatracker python dependencies +COPY requirements.txt /tmp/pip-tmp/ +RUN pip3 --disable-pip-version-check --no-cache-dir install --user --no-warn-script-location -r /tmp/pip-tmp/requirements.txt +RUN pip3 --disable-pip-version-check --no-cache-dir install --user --no-warn-script-location watchdog[watchmedo] + +RUN sudo rm -rf /tmp/pip-tmp + +VOLUME [ "/assets" ] + From cf6340443f7437ac23ff65c981741335a0911363 Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Sat, 8 Mar 2025 00:56:14 -0500 Subject: [PATCH 05/44] docs: Update README.md --- README.md | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 0ece0eb03b..abebb7ca02 100644 --- a/README.md +++ b/README.md @@ -44,6 +44,7 @@ This project is following the standard **Git Feature Workflow** development model. Learn about all the various steps of the development workflow, from creating a fork to submitting a pull request, in the [Contributing](https://github.com/ietf-tools/.github/blob/main/CONTRIBUTING.md) guide. +> [!TIP] > Make sure to read the [Styleguides](https://github.com/ietf-tools/.github/blob/main/CONTRIBUTING.md#styleguides) section to ensure a cohesive code format across the project. You can submit bug reports, enhancement and new feature requests in the [discussions](https://github.com/ietf-tools/datatracker/discussions) area. Accepted tickets will be converted to issues. @@ -52,7 +53,8 @@ You can submit bug reports, enhancement and new feature requests in the [discuss Click the Fork button in the top-right corner of the repository to create a personal copy that you can work on. -> Note that some GitHub Actions might be enabled by default in your fork. You should disable them by going to **Settings** > **Actions** > **General** and selecting **Disable actions** (then Save). +> [!NOTE] +> Some GitHub Actions might be enabled by default in your fork. You should disable them by going to **Settings** > **Actions** > **General** and selecting **Disable actions** (then Save). #### Git Cloning Tips @@ -104,7 +106,8 @@ Read the [Docker Dev Environment](docker/README.md) guide to get started. Nightly database dumps of the datatracker are available as Docker images: `ghcr.io/ietf-tools/datatracker-db:latest` -> Note that to update the database in your dev environment to the latest version, you should run the `docker/cleandb` script. +> [!TIP] +> In order to update the database in your dev environment to the latest version, you should run the `docker/cleandb` script. ### Blob storage for dev/test @@ -248,6 +251,7 @@ From a datatracker container, run the command: ./ietf/manage.py test --settings=settings_test ``` +> [!TIP] > You can limit the run to specific tests using the `--pattern` argument. ### Frontend Tests @@ -257,11 +261,13 @@ Frontend tests are done via Playwright. There're 2 different type of tests: - Tests that test Vue pages / components and run natively without any external dependency. - Tests that require a running datatracker instance to test against (usually legacy views). +> [!IMPORTANT] > Make sure you have Node.js 16.x or later installed on your machine. #### Run Vue Tests -> :warning: All commands below **MUST** be run from the `./playwright` directory, unless noted otherwise. +> [!WARNING] +> All commands below **MUST** be run from the `./playwright` directory, unless noted otherwise. 1. Run **once** to install dependencies on your system: ```sh @@ -294,7 +300,8 @@ Frontend tests are done via Playwright. There're 2 different type of tests: First, you need to start a datatracker instance (dev or prod), ideally from a docker container, exposing the 8000 port. -> :warning: All commands below **MUST** be run from the `./playwright` directory. +> [!WARNING] +> All commands below **MUST** be run from the `./playwright` directory. 1. Run **once** to install dependencies on your system: ```sh From cf21c4129a3d083980297dcaa82b5fd58bf447f6 Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Sat, 8 Mar 2025 00:59:26 -0500 Subject: [PATCH 06/44] docs: Update README.md --- docker/README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docker/README.md b/docker/README.md index 14fcc38995..f2161a173f 100644 --- a/docker/README.md +++ b/docker/README.md @@ -4,11 +4,12 @@ 1. [Set up Docker](https://docs.docker.com/get-started/) on your preferred platform. On Windows, it is highly recommended to use the [WSL 2 *(Windows Subsystem for Linux)*](https://docs.docker.com/desktop/windows/wsl/) backend. +> [!IMPORTANT] > See the [IETF Tools Windows Dev guide](https://github.com/ietf-tools/.github/blob/main/docs/windows-dev.md) on how to get started when using Windows. 2. On Linux, you must [install Docker Compose manually](https://docs.docker.com/compose/install/linux/#install-the-plugin-manually) and not install Docker Desktop. On Mac and Windows install Docker Desktop which already includes Docker Compose. -2. If you have a copy of the datatracker code checked out already, simply `cd` to the top-level directory. +3. If you have a copy of the datatracker code checked out already, simply `cd` to the top-level directory. If not, check out a datatracker branch as usual. We'll check out `main` below, but you can use any branch: @@ -18,7 +19,7 @@ git checkout main ``` -3. Follow the instructions for your preferred editor: +4. Follow the instructions for your preferred editor: - [Visual Studio Code](#using-visual-studio-code) - [Other Editors / Generic](#using-other-editors--generic) @@ -189,7 +190,6 @@ The content of the source files will be copied into the target `.ics` files. Mak Because including all assets in the image would significantly increase the file size, they are not included by default. You can however fetch them by running the **Fetch assets via rsync** task in VS Code or run manually the script `docker/scripts/app-rsync-extras.sh` - ### Linux file permissions leaking to the host system If on the host filesystem you have permissions that look like this, From 06158c05c77b89434197a626d7cfcf04b8b1e14a Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Mon, 10 Mar 2025 16:28:20 -0400 Subject: [PATCH 07/44] chore: Remove deprecated version from docker-compose.extend.yml --- .devcontainer/docker-compose.extend.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.devcontainer/docker-compose.extend.yml b/.devcontainer/docker-compose.extend.yml index 286eefb29c..a92f42bc6d 100644 --- a/.devcontainer/docker-compose.extend.yml +++ b/.devcontainer/docker-compose.extend.yml @@ -1,5 +1,3 @@ -version: '3.8' - services: app: environment: From 887ec11f3916d19da04dd939a0aa2edd697f91fd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 15 Mar 2025 00:43:17 -0400 Subject: [PATCH 08/44] chore(deps): bump ncipollo/release-action from 1.15.0 to 1.16.0 (#8594) Bumps [ncipollo/release-action](https://github.com/ncipollo/release-action) from 1.15.0 to 1.16.0. - [Release notes](https://github.com/ncipollo/release-action/releases) - [Commits](https://github.com/ncipollo/release-action/compare/v1.15.0...v1.16.0) --- updated-dependencies: - dependency-name: ncipollo/release-action dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index a14ea73e9b..9f621e16b7 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -97,7 +97,7 @@ jobs: echo "IS_RELEASE=true" >> $GITHUB_ENV - name: Create Draft Release - uses: ncipollo/release-action@v1.15.0 + uses: ncipollo/release-action@v1.16.0 if: ${{ github.ref_name == 'release' }} with: prerelease: true @@ -316,7 +316,7 @@ jobs: histCoveragePath: historical-coverage.json - name: Create Release - uses: ncipollo/release-action@v1.15.0 + uses: ncipollo/release-action@v1.16.0 if: ${{ env.SHOULD_DEPLOY == 'true' }} with: allowUpdates: true @@ -329,7 +329,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} - name: Update Baseline Coverage - uses: ncipollo/release-action@v1.15.0 + uses: ncipollo/release-action@v1.16.0 if: ${{ github.event.inputs.updateCoverage == 'true' || github.ref_name == 'release' }} with: allowUpdates: true From e56c6cae272cb482a6d33f1f7e24c7ba8a48361d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 15 Mar 2025 00:43:47 -0400 Subject: [PATCH 09/44] chore(deps): bump actions/download-artifact from 4.1.8 to 4.1.9 (#8628) Bumps [actions/download-artifact](https://github.com/actions/download-artifact) from 4.1.8 to 4.1.9. - [Release notes](https://github.com/actions/download-artifact/releases) - [Commits](https://github.com/actions/download-artifact/compare/v4.1.8...v4.1.9) --- updated-dependencies: - dependency-name: actions/download-artifact dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 9f621e16b7..123bd5c65a 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -185,7 +185,7 @@ jobs: - name: Download a Coverage Results if: ${{ github.event.inputs.skiptests == 'false' || github.ref_name == 'release' }} - uses: actions/download-artifact@v4.1.8 + uses: actions/download-artifact@v4.1.9 with: name: coverage @@ -292,7 +292,7 @@ jobs: - name: Download Coverage Results if: ${{ github.event.inputs.skiptests == 'false' || github.ref_name == 'release' }} - uses: actions/download-artifact@v4.1.8 + uses: actions/download-artifact@v4.1.9 with: name: coverage @@ -407,7 +407,7 @@ jobs: - uses: actions/checkout@v4 - name: Download a Release Artifact - uses: actions/download-artifact@v4.1.8 + uses: actions/download-artifact@v4.1.9 with: name: release-${{ env.PKG_VERSION }} From 9db109f692d8592c5407ea82d2c2790eb6006263 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 15 Mar 2025 00:44:18 -0400 Subject: [PATCH 10/44] chore(deps): bump appleboy/ssh-action from 1.2.0 to 1.2.2 (#8650) Bumps [appleboy/ssh-action](https://github.com/appleboy/ssh-action) from 1.2.0 to 1.2.2. - [Release notes](https://github.com/appleboy/ssh-action/releases) - [Changelog](https://github.com/appleboy/ssh-action/blob/master/.goreleaser.yaml) - [Commits](https://github.com/appleboy/ssh-action/compare/7eaf76671a0d7eec5d98ee897acda4f968735a17...2ead5e36573f08b82fbfce1504f1a4b05a647c6f) --- updated-dependencies: - dependency-name: appleboy/ssh-action dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/tests-az.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/tests-az.yml b/.github/workflows/tests-az.yml index 6d53a121aa..8553563a19 100644 --- a/.github/workflows/tests-az.yml +++ b/.github/workflows/tests-az.yml @@ -38,7 +38,7 @@ jobs: ssh-keyscan -t rsa $vminfo >> ~/.ssh/known_hosts - name: Remote SSH into VM - uses: appleboy/ssh-action@7eaf76671a0d7eec5d98ee897acda4f968735a17 + uses: appleboy/ssh-action@2ead5e36573f08b82fbfce1504f1a4b05a647c6f env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: From 227b44bfa25036e7d4fea86a72405beb4e53e2ce Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 15 Mar 2025 00:45:42 -0400 Subject: [PATCH 11/44] chore(deps): bump nanoid (#8649) Bumps the npm group with 1 update in the /dev/deploy-to-container directory: [nanoid](https://github.com/ai/nanoid). Updates `nanoid` from 5.0.9 to 5.1.3 - [Release notes](https://github.com/ai/nanoid/releases) - [Changelog](https://github.com/ai/nanoid/blob/main/CHANGELOG.md) - [Commits](https://github.com/ai/nanoid/compare/5.0.9...5.1.3) --- updated-dependencies: - dependency-name: nanoid dependency-type: direct:production update-type: version-update:semver-minor dependency-group: npm ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- dev/deploy-to-container/package-lock.json | 15 ++++++++------- dev/deploy-to-container/package.json | 2 +- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/dev/deploy-to-container/package-lock.json b/dev/deploy-to-container/package-lock.json index 03327083b1..c4f675527e 100644 --- a/dev/deploy-to-container/package-lock.json +++ b/dev/deploy-to-container/package-lock.json @@ -8,7 +8,7 @@ "dependencies": { "dockerode": "^4.0.4", "fs-extra": "^11.3.0", - "nanoid": "5.0.9", + "nanoid": "5.1.3", "nanoid-dictionary": "5.0.0-beta.1", "slugify": "1.6.6", "tar": "^7.4.3", @@ -668,15 +668,16 @@ "optional": true }, "node_modules/nanoid": { - "version": "5.0.9", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.0.9.tgz", - "integrity": "sha512-Aooyr6MXU6HpvvWXKoVoXwKMs/KyVakWwg7xQfv5/S/RIgJMy0Ifa45H9qqYy7pTCszrHzP21Uk4PZq2HpEM8Q==", + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.1.3.tgz", + "integrity": "sha512-zAbEOEr7u2CbxwoMRlz/pNSpRP0FdAU4pRaYunCdEezWohXFs+a0Xw7RfkKaezMsmSM1vttcLthJtwRnVtOfHQ==", "funding": [ { "type": "github", "url": "https://github.com/sponsors/ai" } ], + "license": "MIT", "bin": { "nanoid": "bin/nanoid.js" }, @@ -1612,9 +1613,9 @@ "optional": true }, "nanoid": { - "version": "5.0.9", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.0.9.tgz", - "integrity": "sha512-Aooyr6MXU6HpvvWXKoVoXwKMs/KyVakWwg7xQfv5/S/RIgJMy0Ifa45H9qqYy7pTCszrHzP21Uk4PZq2HpEM8Q==" + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.1.3.tgz", + "integrity": "sha512-zAbEOEr7u2CbxwoMRlz/pNSpRP0FdAU4pRaYunCdEezWohXFs+a0Xw7RfkKaezMsmSM1vttcLthJtwRnVtOfHQ==" }, "nanoid-dictionary": { "version": "5.0.0-beta.1", diff --git a/dev/deploy-to-container/package.json b/dev/deploy-to-container/package.json index 1f54745ebf..8772c568c8 100644 --- a/dev/deploy-to-container/package.json +++ b/dev/deploy-to-container/package.json @@ -4,7 +4,7 @@ "dependencies": { "dockerode": "^4.0.4", "fs-extra": "^11.3.0", - "nanoid": "5.0.9", + "nanoid": "5.1.3", "nanoid-dictionary": "5.0.0-beta.1", "slugify": "1.6.6", "tar": "^7.4.3", From 968820de34bdf6d0c1fd69b921ed490da725b941 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Sat, 15 Mar 2025 14:45:04 +0700 Subject: [PATCH 12/44] feat: celery task + admin to resend Messages (#8661) * feat: Message re-send task * feat: admin action to queue redelivery * feat: MessageAdmin list_filters * feat: show sent status * feat: better date filtering * chore: remove send-by-date task Adds complexity and risk - the improved Messages admin lets us do most of what it did without the opportunity for accidentally resending huge ranges * chore: fill in empty docstring * style: black * fix: unused import * feat: better logging * chore: mypy lint * test: test retry_send_messages_by_pk_task * test: test retry_send_messages --- ietf/message/admin.py | 79 +++++++++++++++++++++++++++++++--- ietf/message/tasks.py | 24 ++++++++++- ietf/message/tests.py | 59 +++++++++++++++++++++++-- ietf/message/utils.py | 36 ++++++++++++++-- ietf/settings.py | 1 + ietf/templates/admin/base.html | 1 + requirements.txt | 1 + 7 files changed, 187 insertions(+), 14 deletions(-) diff --git a/ietf/message/admin.py b/ietf/message/admin.py index c2564c04b9..250e1eb596 100644 --- a/ietf/message/admin.py +++ b/ietf/message/admin.py @@ -1,32 +1,99 @@ -from django.contrib import admin +# Copyright The IETF Trust 2012-2025, All Rights Reserved +from django.contrib import admin, messages +from django.db.models import QuerySet +from rangefilter.filters import DateRangeQuickSelectListFilterBuilder from ietf.message.models import Message, MessageAttachment, SendQueue, AnnouncementFrom +from ietf.message.tasks import retry_send_messages_by_pk_task + + +class MessageSentStatusListFilter(admin.SimpleListFilter): + """Filter Messages by whether or not they were sent""" + + title = "status" + parameter_name = "status" + + def lookups(self, request, model_admin): + return [ + ("sent", "Sent"), + ("unsent", "Not sent"), + ] + + def queryset(self, request, queryset): + if self.value() == "unsent": + return queryset.filter(sent__isnull=True) + elif self.value() == "sent": + return queryset.filter(sent__isnull=False) + class MessageAdmin(admin.ModelAdmin): - list_display = ["subject", "by", "time", "groups"] + list_display = ["sent_status", "subject", "by", "time", "groups"] search_fields = ["subject", "body"] raw_id_fields = ["by", "related_groups", "related_docs"] + list_filter = [ + MessageSentStatusListFilter, + ("time", DateRangeQuickSelectListFilterBuilder()), + ] ordering = ["-time"] + actions = ["retry_send"] def groups(self, instance): return ", ".join(g.acronym for g in instance.related_groups.all()) + + @admin.display(description="Sent", boolean=True) + def sent_status(self, instance): + return instance.sent is not None + + @admin.action(description="Send selected messages if unsent") + def retry_send(self, request, queryset: QuerySet[Message]): + try: + retry_send_messages_by_pk_task.delay( + message_pks=list(queryset.values_list("pk", flat=True)), + resend=False, + ) + except Exception as err: + self.message_user( + request, + f"Error: {repr(err)}", + messages.ERROR, + ) + else: + self.message_user(request, "Messages queued for delivery", messages.SUCCESS) + + admin.site.register(Message, MessageAdmin) + class MessageAttachmentAdmin(admin.ModelAdmin): - list_display = ['id', 'message', 'filename', 'removed',] - raw_id_fields = ['message'] + list_display = [ + "id", + "message", + "filename", + "removed", + ] + raw_id_fields = ["message"] + + admin.site.register(MessageAttachment, MessageAttachmentAdmin) + class SendQueueAdmin(admin.ModelAdmin): list_display = ["time", "by", "message", "send_at", "sent_at"] list_filter = ["time", "send_at", "sent_at"] search_fields = ["message__body"] raw_id_fields = ["by", "message"] ordering = ["-time"] + + admin.site.register(SendQueue, SendQueueAdmin) + class AnnouncementFromAdmin(admin.ModelAdmin): - list_display = ['name', 'group', 'address', ] -admin.site.register(AnnouncementFrom, AnnouncementFromAdmin) + list_display = [ + "name", + "group", + "address", + ] +admin.site.register(AnnouncementFrom, AnnouncementFromAdmin) diff --git a/ietf/message/tasks.py b/ietf/message/tasks.py index efd776b9d8..1fdff7bea4 100644 --- a/ietf/message/tasks.py +++ b/ietf/message/tasks.py @@ -5,8 +5,8 @@ from celery import shared_task from smtplib import SMTPException -from ietf.message.utils import send_scheduled_message_from_send_queue -from ietf.message.models import SendQueue +from ietf.message.utils import send_scheduled_message_from_send_queue, retry_send_messages +from ietf.message.models import SendQueue, Message from ietf.utils import log from ietf.utils.mail import log_smtp_exception, send_error_email @@ -25,3 +25,23 @@ def send_scheduled_mail_task(): except SMTPException as e: log_smtp_exception(e) send_error_email(e) + + +@shared_task +def retry_send_messages_by_pk_task(message_pks: list, resend=False): + """Task to retry sending Messages by PK + + Sends Messages whose PK is included in the list. + Only previously unsent messages are sent unless `resend` is true. + """ + log.log( + "retry_send_messages_by_pk_task: " + "retrying send of Message PKs [{}] (resend={})".format( + ", ".join(str(pk) for pk in message_pks), + resend, + ) + ) + retry_send_messages( + messages=Message.objects.filter(pk__in=message_pks), + resend=resend, + ) diff --git a/ietf/message/tests.py b/ietf/message/tests.py index 7fbd29167c..a677d5477e 100644 --- a/ietf/message/tests.py +++ b/ietf/message/tests.py @@ -11,10 +11,10 @@ import debug # pyflakes:ignore from ietf.group.factories import GroupFactory -from ietf.message.factories import SendQueueFactory +from ietf.message.factories import MessageFactory, SendQueueFactory from ietf.message.models import Message, SendQueue -from ietf.message.tasks import send_scheduled_mail_task -from ietf.message.utils import send_scheduled_message_from_send_queue +from ietf.message.tasks import send_scheduled_mail_task, retry_send_messages_by_pk_task +from ietf.message.utils import send_scheduled_message_from_send_queue, retry_send_messages from ietf.person.models import Person from ietf.utils.mail import outbox, send_mail_text, send_mail_message, get_payload_text from ietf.utils.test_utils import TestCase @@ -133,6 +133,44 @@ def test_send_mime_announcement(self): self.assertTrue(SendQueue.objects.get(id=q.id).sent_at) +class UtilsTests(TestCase): + @mock.patch("ietf.message.utils.send_mail_message") + def test_retry_send_messages(self, mock_send_mail_message): + sent_message = MessageFactory(sent=timezone.now()) + unsent_messages = MessageFactory.create_batch(2, sent=None) + + # Send the sent message and one of the unsent messages + retry_send_messages( + Message.objects.filter(pk__in=[ + sent_message.pk, + unsent_messages[0].pk, + ]), + resend=False, + ) + self.assertEqual(mock_send_mail_message.call_count, 1) + self.assertEqual( + mock_send_mail_message.call_args.args[1], + unsent_messages[0], + ) + + mock_send_mail_message.reset_mock() + # Once again, send the sent message and one of the unsent messages + # (we can use the same one because our mock prevented it from having + # its status updated to sent) + retry_send_messages( + Message.objects.filter(pk__in=[ + sent_message.pk, + unsent_messages[0].pk, + ]), + resend=True, + ) + self.assertEqual(mock_send_mail_message.call_count, 2) + self.assertCountEqual( + [call_args.args[1] for call_args in mock_send_mail_message.call_args_list], + [sent_message, unsent_messages[0]], + ) + + class TaskTests(TestCase): @mock.patch("ietf.message.tasks.log_smtp_exception") @mock.patch("ietf.message.tasks.send_scheduled_message_from_send_queue") @@ -150,3 +188,18 @@ def test_send_scheduled_mail_task(self, mock_send_message, mock_log_smtp_excepti self.assertEqual(mock_send_message.call_count, 1) self.assertEqual(mock_send_message.call_args[0], (not_yet_sent,)) self.assertTrue(mock_log_smtp_exception.called) + + @mock.patch("ietf.message.tasks.retry_send_messages") + def test_retry_send_messages_by_pk_task(self, mock_retry_send): + msgs = MessageFactory.create_batch(3) + MessageFactory() # an extra message that won't be resent + + retry_send_messages_by_pk_task([msg.pk for msg in msgs], resend=False) + called_with_messages = mock_retry_send.call_args.kwargs["messages"] + self.assertCountEqual(msgs, called_with_messages) + self.assertFalse(mock_retry_send.call_args.kwargs["resend"]) + + retry_send_messages_by_pk_task([msg.pk for msg in msgs], resend=True) + called_with_messages = mock_retry_send.call_args.kwargs["messages"] + self.assertCountEqual(msgs, called_with_messages) + self.assertTrue(mock_retry_send.call_args.kwargs["resend"]) diff --git a/ietf/message/utils.py b/ietf/message/utils.py index 2601eccab8..74448ca7c9 100644 --- a/ietf/message/utils.py +++ b/ietf/message/utils.py @@ -1,13 +1,17 @@ # Copyright The IETF Trust 2012-2020, All Rights Reserved # -*- coding: utf-8 -*- +import email +import email.utils +import re +import smtplib -import re, email - +from django.db.models import QuerySet from django.utils import timezone from django.utils.encoding import force_str -from ietf.utils.mail import send_mail_text, send_mail_mime +from ietf.utils import log +from ietf.utils.mail import send_mail_text, send_mail_mime, send_mail_message from ietf.message.models import Message first_dot_on_line_re = re.compile(r'^\.', re.MULTILINE) @@ -58,3 +62,29 @@ def send_scheduled_message_from_send_queue(queue_item): queue_item.message.sent = queue_item.sent_at queue_item.message.save() + + +def retry_send_messages(messages: QuerySet[Message], resend=False): + """Attempt delivery of Messages""" + if not resend: + # only include sent messages on explicit request + for already_sent in messages.filter(sent__isnull=False): + assert already_sent.sent is not None # appease mypy type checking + log.log( + f"retry_send_messages: skipping {already_sent.pk} " + f"(already sent {already_sent.sent.isoformat(timespec='milliseconds')})" + ) + messages = messages.filter(sent__isnull=True) + for msg in messages: + to = ",".join(a[1] for a in email.utils.getaddresses([msg.to])) + try: + send_mail_message(None, msg) + log.log( + f'retry_send_messages: ' + f'sent {msg.pk} {msg.frm} -> {to} "{msg.subject.strip()}"' + ) + except smtplib.SMTPException as e: + log.log( + f'retry_send_messages: ' + f'Failure {e}: {msg.pk} {msg.frm} -> {to} "{msg.subject.strip()}"' + ) diff --git a/ietf/settings.py b/ietf/settings.py index faee42237c..1fe5f48229 100644 --- a/ietf/settings.py +++ b/ietf/settings.py @@ -465,6 +465,7 @@ def skip_unreadable_post(record): 'drf_spectacular', 'drf_standardized_errors', 'rest_framework', + 'rangefilter', 'simple_history', 'tastypie', 'widget_tweaks', diff --git a/ietf/templates/admin/base.html b/ietf/templates/admin/base.html index 9ca7377a54..d48891dfc4 100644 --- a/ietf/templates/admin/base.html +++ b/ietf/templates/admin/base.html @@ -20,6 +20,7 @@ --header-color: var(--bs-secondary); --breadcrumbs-fg: var(--bs-secondary); --breadcrumbs-link-fg: var(--link-fg); + .calendar caption { background-color: var(--secondary);} } span.text-danger { color: var(--bs-danger); } diff --git a/requirements.txt b/requirements.txt index d8b6e0742f..8bd906c220 100644 --- a/requirements.txt +++ b/requirements.txt @@ -13,6 +13,7 @@ celery>=5.2.6 coverage>=4.5.4,<5.0 # Coverage 5.x moves from a json database to SQLite. Moving to 5.x will require substantial rewrites in ietf.utils.test_runner and ietf.release.views defusedxml>=0.7.1 # for TastyPie when using xml; not a declared dependency Django>4.2,<5 +django-admin-rangefilter>=0.13.2 django-analytical>=3.1.0 django-bootstrap5>=21.3 django-celery-beat>=2.3.0 From 48211414dfc992e4a51f0d2de9183eab5c44dad0 Mon Sep 17 00:00:00 2001 From: rjsparks <10996692+rjsparks@users.noreply.github.com> Date: Sat, 15 Mar 2025 07:58:06 +0000 Subject: [PATCH 13/44] ci: update base image target version to 20250315T0745 --- dev/build/Dockerfile | 2 +- dev/build/TARGET_BASE | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/dev/build/Dockerfile b/dev/build/Dockerfile index 852ba43c80..2b02a091c5 100644 --- a/dev/build/Dockerfile +++ b/dev/build/Dockerfile @@ -1,4 +1,4 @@ -FROM ghcr.io/ietf-tools/datatracker-app-base:20250128T1728 +FROM ghcr.io/ietf-tools/datatracker-app-base:20250315T0745 LABEL maintainer="IETF Tools Team " ENV DEBIAN_FRONTEND=noninteractive diff --git a/dev/build/TARGET_BASE b/dev/build/TARGET_BASE index fbc9426744..e6f490b168 100644 --- a/dev/build/TARGET_BASE +++ b/dev/build/TARGET_BASE @@ -1 +1 @@ -20250128T1728 +20250315T0745 From 200d2bd164224694c0524e734ea6e46b62f3998a Mon Sep 17 00:00:00 2001 From: Russ Housley Date: Sat, 15 Mar 2025 04:23:35 -0400 Subject: [PATCH 14/44] fix(review_info template): display correct date for I-D to be reviewed --- ietf/templates/doc/review/request_info.html | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/ietf/templates/doc/review/request_info.html b/ietf/templates/doc/review/request_info.html index ee46916b43..9ad126d59e 100644 --- a/ietf/templates/doc/review/request_info.html +++ b/ietf/templates/doc/review/request_info.html @@ -96,7 +96,7 @@ {% endif %} - {% if doc.time %} + {% if review_req.doc.time %} @@ -104,7 +104,10 @@ I-D last updated - {{ doc.time|date:"Y-m-d" }} + {{ review_req.doc.time|date:"Y-m-d" }} + {% if review_req.doc.pub_date %} + (Latest revision {{ review_req.doc.pub_date|date:"Y-m-d" }}) + {% endif %} {% endif %} From 603938a9b6438ce78caea13272e61c0e15546478 Mon Sep 17 00:00:00 2001 From: Rich Salz Date: Sat, 15 Mar 2025 05:19:12 -0400 Subject: [PATCH 15/44] chore: Add IETF to "Last Call Expired" email Subject (#8675) No need to change the test. Fixes: #8526 --- ietf/doc/mails.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ietf/doc/mails.py b/ietf/doc/mails.py index c1e2074bc0..ddecbb6b54 100644 --- a/ietf/doc/mails.py +++ b/ietf/doc/mails.py @@ -568,7 +568,7 @@ def email_last_call_expired(doc): send_mail(None, addrs.to, "DraftTracker Mail System ", - "Last Call Expired: %s" % doc.file_tag(), + "IETF Last Call Expired: %s" % doc.file_tag(), "doc/mail/change_notice.txt", dict(text=text, doc=doc, From 9eb5b2fa8e25c68ec927b82008d61c8e38be354c Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Sat, 15 Mar 2025 05:23:16 -0400 Subject: [PATCH 16/44] fix(agenda): always render session row if rendering a new date row when filtering (#8672) --- client/agenda/AgendaScheduleList.vue | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/client/agenda/AgendaScheduleList.vue b/client/agenda/AgendaScheduleList.vue index 905677b4da..ab0f6e0184 100644 --- a/client/agenda/AgendaScheduleList.vue +++ b/client/agenda/AgendaScheduleList.vue @@ -253,6 +253,7 @@ const meetingEvents = computed(() => { // -> Add date row const itemDate = DateTime.fromISO(item.adjustedStartDate) + let willRenderDateRow = false if (itemDate.toISODate() !== acc.lastDate) { acc.result.push({ id: item.id, @@ -262,12 +263,13 @@ const meetingEvents = computed(() => { date: itemDate.toLocaleString(DateTime.DATE_HUGE), cssClasses: 'agenda-table-display-day' }) + willRenderDateRow = true } acc.lastDate = itemDate.toISODate() // -> Add session header row const typeName = `${item.type}-${item.slotName}` - if (item.type === 'regular' && acc.lastTypeName !== typeName) { + if (item.type === 'regular' && (acc.lastTypeName !== typeName || willRenderDateRow)) { acc.result.push({ key: `sesshd-${item.id}`, displayType: 'session-head', From 4bf1b938724abb775a5e9532740934dfa9a4dc40 Mon Sep 17 00:00:00 2001 From: Russ Housley Date: Sat, 15 Mar 2025 05:38:23 -0400 Subject: [PATCH 17/44] fix: Set Review Type Name for IETF Last Call (#8678) --- ietf/name/fixtures/names.json | 4 ++-- ietf/name/migrations/0015_last_call_name.py | 22 +++++++++++++++++++++ 2 files changed, 24 insertions(+), 2 deletions(-) create mode 100644 ietf/name/migrations/0015_last_call_name.py diff --git a/ietf/name/fixtures/names.json b/ietf/name/fixtures/names.json index 8f1262b4cf..96273fbc35 100644 --- a/ietf/name/fixtures/names.json +++ b/ietf/name/fixtures/names.json @@ -2628,7 +2628,7 @@ "used": true }, "model": "doc.state", - "pk": 182 + "pk": 181 }, { "fields": { @@ -13210,7 +13210,7 @@ { "fields": { "desc": "", - "name": "Last Call", + "name": "IETF Last Call", "order": 2, "used": true }, diff --git a/ietf/name/migrations/0015_last_call_name.py b/ietf/name/migrations/0015_last_call_name.py new file mode 100644 index 0000000000..ac210a274f --- /dev/null +++ b/ietf/name/migrations/0015_last_call_name.py @@ -0,0 +1,22 @@ +# Copyright 2025, IETF Trust + +from django.db import migrations + + +def forward(apps, schema_editor): + ReviewTypeName = apps.get_model("name", "ReviewTypeName") + ReviewTypeName.objects.filter(slug="lc").update(name="IETF Last Call") + +def reverse(apps, schema_editor): + ReviewTypeName = apps.get_model("name", "ReviewTypeName") + ReviewTypeName.objects.filter(slug="lc").update(name="Last Call") + +class Migration(migrations.Migration): + + dependencies = [ + ("name", "0014_change_legacy_stream_desc"), + ] + + operations = [ + migrations.RunPython(forward, reverse) + ] From 44bb285836e032acb15d38febc82f4495a6dd9e8 Mon Sep 17 00:00:00 2001 From: Jim Fenton Date: Sun, 16 Mar 2025 14:37:56 +0700 Subject: [PATCH 18/44] fix: use correct variable to decide to show meeting notes (#8674) * fix: use correct variable to decide to show session notes * Correct number of lines for selftests * fix: adjust test to match --------- Co-authored-by: Robert Sparks --- ietf/meeting/tests_views.py | 28 +++++++++++-------- .../meeting/session_details_panel.html | 2 +- 2 files changed, 17 insertions(+), 13 deletions(-) diff --git a/ietf/meeting/tests_views.py b/ietf/meeting/tests_views.py index 519f5f7c2d..59d7e49f7f 100644 --- a/ietf/meeting/tests_views.py +++ b/ietf/meeting/tests_views.py @@ -424,37 +424,41 @@ def test_meeting_agenda(self): self.assertEqual(r.status_code, 200) def test_session_recordings_via_factories(self): - session = SessionFactory(meeting__type_id="ietf", meeting__date=date_today()-datetime.timedelta(days=180)) + session = SessionFactory(meeting__type_id="ietf", meeting__date=date_today()-datetime.timedelta(days=180), meeting__number=str(random.randint(108,150))) self.assertEqual(session.meetecho_recording_name, "") self.assertEqual(len(session.recordings()), 0) url = urlreverse("ietf.meeting.views.session_details", kwargs=dict(num=session.meeting.number, acronym=session.group.acronym)) r = self.client.get(url) q = PyQuery(r.content) # debug.show("q(f'#notes_and_recordings_{session.pk}')") - self.assertEqual(len(q(f"#notes_and_recordings_{session.pk} tr")), 1) - link = q(f"#notes_and_recordings_{session.pk} tr a") - self.assertEqual(len(link), 1) - self.assertEqual(link[0].attrib['href'], str(session.session_recording_url())) + self.assertEqual(len(q(f"#notes_and_recordings_{session.pk} tr")), 2) + links = q(f"#notes_and_recordings_{session.pk} tr a") + self.assertEqual(len(links), 2) + self.assertEqual(links[0].attrib['href'], str(session.notes_url())) + self.assertEqual(links[1].attrib['href'], str(session.session_recording_url())) session.meetecho_recording_name = 'my_test_session_name' session.save() r = self.client.get(url) q = PyQuery(r.content) - self.assertEqual(len(q(f"#notes_and_recordings_{session.pk} tr")), 1) + self.assertEqual(len(q(f"#notes_and_recordings_{session.pk} tr")), 2) links = q(f"#notes_and_recordings_{session.pk} tr a") - self.assertEqual(len(links), 1) - self.assertEqual(links[0].attrib['href'], session.session_recording_url()) + self.assertEqual(len(links), 2) + self.assertEqual(links[0].attrib['href'], str(session.notes_url())) + self.assertEqual(links[1].attrib['href'], str(session.session_recording_url())) new_recording_url = "https://www.youtube.com/watch?v=jNQXAC9IVRw" new_recording_title = "Me at the zoo" create_recording(session, new_recording_url, new_recording_title) r = self.client.get(url) q = PyQuery(r.content) - self.assertEqual(len(q(f"#notes_and_recordings_{session.pk} tr")), 2) + self.assertEqual(len(q(f"#notes_and_recordings_{session.pk} tr")), 3) links = q(f"#notes_and_recordings_{session.pk} tr a") - self.assertEqual(len(links), 2) - self.assertEqual(links[0].attrib['href'], new_recording_url) - self.assertIn(new_recording_title, links[0].text_content()) + self.assertEqual(len(links), 3) + self.assertEqual(links[0].attrib['href'], str(session.notes_url())) + self.assertEqual(links[1].attrib['href'], new_recording_url) + self.assertIn(new_recording_title, links[1].text_content()) + self.assertEqual(links[2].attrib['href'], str(session.session_recording_url())) #debug.show("q(f'#notes_and_recordings_{session_pk}')") def test_delete_recordings(self): diff --git a/ietf/templates/meeting/session_details_panel.html b/ietf/templates/meeting/session_details_panel.html index 9b7a192f05..87d9e3d672 100644 --- a/ietf/templates/meeting/session_details_panel.html +++ b/ietf/templates/meeting/session_details_panel.html @@ -310,7 +310,7 @@

Notes and recordings

- {% if session.uses_notes %} + {% if meeting.uses_notes %}
From 2cb2ad15ba8500acb79b8dc62fd1903ad127a385 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 17 Mar 2025 17:09:38 -0400 Subject: [PATCH 19/44] chore(deps): bump nanoid in /dev/deploy-to-container in the npm group (#8689) Bumps the npm group in /dev/deploy-to-container with 1 update: [nanoid](https://github.com/ai/nanoid). Updates `nanoid` from 5.1.3 to 5.1.4 - [Release notes](https://github.com/ai/nanoid/releases) - [Changelog](https://github.com/ai/nanoid/blob/main/CHANGELOG.md) - [Commits](https://github.com/ai/nanoid/compare/5.1.3...5.1.4) --- updated-dependencies: - dependency-name: nanoid dependency-type: direct:production update-type: version-update:semver-patch dependency-group: npm ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- dev/deploy-to-container/package-lock.json | 14 +++++++------- dev/deploy-to-container/package.json | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/dev/deploy-to-container/package-lock.json b/dev/deploy-to-container/package-lock.json index c4f675527e..f4ce2c367c 100644 --- a/dev/deploy-to-container/package-lock.json +++ b/dev/deploy-to-container/package-lock.json @@ -8,7 +8,7 @@ "dependencies": { "dockerode": "^4.0.4", "fs-extra": "^11.3.0", - "nanoid": "5.1.3", + "nanoid": "5.1.4", "nanoid-dictionary": "5.0.0-beta.1", "slugify": "1.6.6", "tar": "^7.4.3", @@ -668,9 +668,9 @@ "optional": true }, "node_modules/nanoid": { - "version": "5.1.3", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.1.3.tgz", - "integrity": "sha512-zAbEOEr7u2CbxwoMRlz/pNSpRP0FdAU4pRaYunCdEezWohXFs+a0Xw7RfkKaezMsmSM1vttcLthJtwRnVtOfHQ==", + "version": "5.1.4", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.1.4.tgz", + "integrity": "sha512-GTFcMIDgR7tqji/LpSY8rtg464VnJl/j6ypoehYnuGb+Y8qZUdtKB8WVCXon0UEZgFDbuUxpIl//6FHLHgXSNA==", "funding": [ { "type": "github", @@ -1613,9 +1613,9 @@ "optional": true }, "nanoid": { - "version": "5.1.3", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.1.3.tgz", - "integrity": "sha512-zAbEOEr7u2CbxwoMRlz/pNSpRP0FdAU4pRaYunCdEezWohXFs+a0Xw7RfkKaezMsmSM1vttcLthJtwRnVtOfHQ==" + "version": "5.1.4", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.1.4.tgz", + "integrity": "sha512-GTFcMIDgR7tqji/LpSY8rtg464VnJl/j6ypoehYnuGb+Y8qZUdtKB8WVCXon0UEZgFDbuUxpIl//6FHLHgXSNA==" }, "nanoid-dictionary": { "version": "5.0.0-beta.1", diff --git a/dev/deploy-to-container/package.json b/dev/deploy-to-container/package.json index 8772c568c8..eb8cb9012a 100644 --- a/dev/deploy-to-container/package.json +++ b/dev/deploy-to-container/package.json @@ -4,7 +4,7 @@ "dependencies": { "dockerode": "^4.0.4", "fs-extra": "^11.3.0", - "nanoid": "5.1.3", + "nanoid": "5.1.4", "nanoid-dictionary": "5.0.0-beta.1", "slugify": "1.6.6", "tar": "^7.4.3", From 8af0dc457f4dda714046b6151a722e7d20b1a55d Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Tue, 18 Mar 2025 08:30:52 +0700 Subject: [PATCH 20/44] fix: add doc to raw_id_fields for SlideSubmissionAdmin (#8687) --- ietf/meeting/admin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ietf/meeting/admin.py b/ietf/meeting/admin.py index e975dd38a6..b7e56c7b77 100644 --- a/ietf/meeting/admin.py +++ b/ietf/meeting/admin.py @@ -189,7 +189,7 @@ class ImportantDateAdmin(admin.ModelAdmin): class SlideSubmissionAdmin(admin.ModelAdmin): model = SlideSubmission list_display = ['session', 'submitter', 'title'] - raw_id_fields = ['submitter', 'session'] + raw_id_fields = ['submitter', 'session', 'doc'] admin.site.register(SlideSubmission, SlideSubmissionAdmin) From 563fcee3a80d5036c9c490a794f31a78bd7a8e90 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Tue, 18 Mar 2025 14:53:38 +0700 Subject: [PATCH 21/44] fix: approve slides link for all pending submissions (#8692) * test: slide approval links shown for all sessions * fix: suggest pending slides for all sessions * refactor: flatten logic * refactor: avoid extra exists() query * test: update test comment * test: only pending slides suggested for approval * style: Black * test: temp path override for SessionDetailsTests --- ietf/meeting/tests_views.py | 80 +++++++++++++++++++++ ietf/meeting/views.py | 14 ++-- ietf/templates/meeting/session_details.html | 34 +++++---- 3 files changed, 104 insertions(+), 24 deletions(-) diff --git a/ietf/meeting/tests_views.py b/ietf/meeting/tests_views.py index 59d7e49f7f..111584cc56 100644 --- a/ietf/meeting/tests_views.py +++ b/ietf/meeting/tests_views.py @@ -4527,6 +4527,7 @@ def test_persistent_enabled_timeslot_types(self): class SessionDetailsTests(TestCase): + settings_temp_path_overrides = TestCase.settings_temp_path_overrides + ['SLIDE_STAGING_PATH'] def test_session_details(self): @@ -4659,6 +4660,85 @@ def test_add_session_drafts(self): q = PyQuery(r.content) self.assertEqual(1,len(q(".alert-warning:contains('may affect published proceedings')"))) + def test_proposed_slides_for_approval(self): + # This test overlaps somewhat with MaterialsTests of proposed slides handling. The focus + # here is on the display of slides, not the approval action. + group = GroupFactory() + meeting = MeetingFactory( + type_id="ietf", date=date_today() + datetime.timedelta(days=10) + ) + sessions = SessionFactory.create_batch( + 2, + group=group, + meeting=meeting, + ) + + # slides submission _not_ in the `pending` state + do_not_show = [ + SlideSubmissionFactory( + session=sessions[0], + title="already approved", + status_id="approved", + ), + SlideSubmissionFactory( + session=sessions[1], + title="already rejected", + status_id="rejected", + ), + ] + + # pending submissions + first_session_pending = SlideSubmissionFactory( + session=sessions[0], title="first session title" + ) + second_session_pending = SlideSubmissionFactory( + session=sessions[1], title="second session title" + ) + + # and their approval URLs + def _approval_url(slidesub): + return urlreverse( + "ietf.meeting.views.approve_proposed_slides", + kwargs={"slidesubmission_id": slidesub.pk, "num": meeting.number}, + ) + + first_approval_url = _approval_url(first_session_pending) + second_approval_url = _approval_url(second_session_pending) + do_not_show_urls = [_approval_url(ss) for ss in do_not_show] + + # Retrieve the URL as a group chair + url = urlreverse( + "ietf.meeting.views.session_details", + kwargs={ + "num": meeting.number, + "acronym": group.acronym, + }, + ) + chair = RoleFactory(group=group, name_id="chair").person + self.client.login( + username=chair.user.username, password=f"{chair.user.username}+password" + ) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + pq = PyQuery(r.content) + self.assertEqual( + len(pq(f'a[href="{first_approval_url}"]')), + 1, + "first session proposed slides should be linked for approval", + ) + self.assertEqual( + len(pq(f'a[href="{second_approval_url}"]')), + 1, + "second session proposed slides should be linked for approval", + ) + for no_show_url in do_not_show_urls: + self.assertEqual( + len(pq(f'a[href="{no_show_url}"]')), + 0, + "second session proposed slides should be linked for approval", + ) + + class EditScheduleListTests(TestCase): def setUp(self): super().setUp() diff --git a/ietf/meeting/views.py b/ietf/meeting/views.py index 3fa605ed7e..d02ae40902 100644 --- a/ietf/meeting/views.py +++ b/ietf/meeting/views.py @@ -2509,12 +2509,14 @@ def session_details(request, num, acronym): scheduled_sessions = [s for s in sessions if s.current_status == 'sched'] unscheduled_sessions = [s for s in sessions if s.current_status != 'sched'] - pending_suggestions = None - if request.user.is_authenticated: - if can_manage: - pending_suggestions = session.slidesubmission_set.filter(status__slug='pending') - else: - pending_suggestions = session.slidesubmission_set.filter(status__slug='pending', submitter=request.user.person) + # Start with all the pending suggestions for all the group's sessions + pending_suggestions = SlideSubmission.objects.filter(session__in=sessions, status__slug='pending') + if can_manage: + pass # keep the full set + elif hasattr(request.user, "person"): + pending_suggestions = pending_suggestions.filter(submitter=request.user.person) + else: + pending_suggestions = SlideSubmission.objects.none() return render(request, "meeting/session_details.html", { 'scheduled_sessions':scheduled_sessions , diff --git a/ietf/templates/meeting/session_details.html b/ietf/templates/meeting/session_details.html index 571715b4b4..55fa3d3857 100644 --- a/ietf/templates/meeting/session_details.html +++ b/ietf/templates/meeting/session_details.html @@ -31,30 +31,28 @@

Scheduled Sessions

{% include 'meeting/session_details_panel.html' with sessions=scheduled_sessions %}

Unscheduled Sessions

{% include 'meeting/session_details_panel.html' with sessions=unscheduled_sessions %} - {% if pending_suggestions %} -

+ {% for s in pending_suggestions %} + {% if forloop.first %}

{% if can_manage_materials %} Proposed slides awaiting your approval {% else %} Your proposed slides awaiting chair approval {% endif %}

-
- {% endif %} +
{% endif %} + {% if can_manage_materials %} +

+ + {{ s.submitter }} - {{ s.title }} ({{ s.time }}) + +

+ {% else %} +

+ {{ s.title }} ({{ s.time }}) +

+ {% endif %} + {% if forloop.last %}
{% endif %} + {% endfor %} {% endblock %} {% block js %} From 73abdcc29798850a86ddeabdd65b824adcc62eb6 Mon Sep 17 00:00:00 2001 From: Rudi Matz Date: Tue, 18 Mar 2025 22:25:10 -0400 Subject: [PATCH 22/44] feat(agenda): add preliminary date when no agenda available (#8690) * feat(meetings): add preliminary agenda date when no agenda available * test: adapt test for additional field for preliminary agenda date --------- Co-authored-by: Rudi Matz --- client/agenda/AgendaScheduleList.vue | 1 + ietf/meeting/tests_views.py | 5 +++-- ietf/meeting/views.py | 8 ++++++-- 3 files changed, 10 insertions(+), 4 deletions(-) diff --git a/client/agenda/AgendaScheduleList.vue b/client/agenda/AgendaScheduleList.vue index ab0f6e0184..369025d5da 100644 --- a/client/agenda/AgendaScheduleList.vue +++ b/client/agenda/AgendaScheduleList.vue @@ -15,6 +15,7 @@ td(:colspan='pickerModeActive ? 6 : 5') i.bi.bi-exclamation-triangle.me-2 span(v-if='agendaStore.searchVisible && agendaStore.searchText') No event matching your search query. + span(v-else-if='agendaStore.meeting.prelimAgendaDate') A preliminary agenda is expected to be released on {{ agendaStore.meeting.prelimAgendaDate }} span(v-else) Nothing to display tr( v-for='item of meetingEvents' diff --git a/ietf/meeting/tests_views.py b/ietf/meeting/tests_views.py index 111584cc56..0f91986f77 100644 --- a/ietf/meeting/tests_views.py +++ b/ietf/meeting/tests_views.py @@ -233,6 +233,7 @@ def test_meeting_agenda(self): session.save() slot = TimeSlot.objects.get(sessionassignments__session=session,sessionassignments__schedule=meeting.schedule) meeting.timeslot_set.filter(type_id="break").update(show_location=False) + meeting.importantdate_set.create(name_id='prelimagenda',date=date_today() + datetime.timedelta(days=20)) # self.write_materials_files(meeting, session) # @@ -262,7 +263,8 @@ def test_meeting_agenda(self): "updated": generated_data.get("meeting").get("updated"), # Just expect the value to exist "timezone": meeting.time_zone, "infoNote": meeting.agenda_info_note, - "warningNote": meeting.agenda_warning_note + "warningNote": meeting.agenda_warning_note, + "prelimAgendaDate": (date_today() + datetime.timedelta(days=20)).isoformat() }, "categories": generated_data.get("categories"), # Just expect the value to exist "isCurrentMeeting": True, @@ -9341,4 +9343,3 @@ def test_bluesheet_data(self): {"name": attended_with_affil.person.plain_name(), "affiliation": "Somewhere"}, ] ) - diff --git a/ietf/meeting/views.py b/ietf/meeting/views.py index d02ae40902..6a73059d92 100644 --- a/ietf/meeting/views.py +++ b/ietf/meeting/views.py @@ -59,7 +59,7 @@ from ietf.ietfauth.utils import role_required, has_role, user_is_person from ietf.mailtrigger.utils import gather_address_lists from ietf.meeting.models import Meeting, Session, Schedule, FloorPlan, SessionPresentation, TimeSlot, SlideSubmission, Attended -from ietf.meeting.models import SessionStatusName, SchedulingEvent, SchedTimeSessAssignment, Room, TimeSlotTypeName +from ietf.meeting.models import ImportantDate, SessionStatusName, SchedulingEvent, SchedTimeSessAssignment, Room, TimeSlotTypeName from ietf.meeting.forms import ( CustomDurationField, SwapDaysForm, SwapTimeslotsForm, ImportMinutesForm, TimeSlotCreateForm, TimeSlotEditForm, SessionCancelForm, SessionEditForm ) from ietf.meeting.helpers import get_person_by_email, get_schedule_by_name @@ -1709,6 +1709,9 @@ def generate_agenda_data(num=None, force_refresh=False): # Get Floor Plans floors = FloorPlan.objects.filter(meeting=meeting).order_by('order') + # Get Preliminary Agenda Date + prelimAgendaDate = ImportantDate.objects.filter(name_id="prelimagenda", meeting=meeting).first() + result = { "meeting": { "number": schedule.meeting.number, @@ -1718,7 +1721,8 @@ def generate_agenda_data(num=None, force_refresh=False): "updated": updated, "timezone": meeting.time_zone, "infoNote": schedule.meeting.agenda_info_note, - "warningNote": schedule.meeting.agenda_warning_note + "warningNote": schedule.meeting.agenda_warning_note, + "prelimAgendaDate": prelimAgendaDate.date.isoformat() if prelimAgendaDate else "" }, "categories": filter_organizer.get_filter_categories(), "isCurrentMeeting": is_current_meeting, From 3356505dc89e2a372f54c3ed1da472c3a11bc6de Mon Sep 17 00:00:00 2001 From: Matthew Holloway Date: Wed, 19 Mar 2025 15:28:17 +1300 Subject: [PATCH 23/44] fix(agenda): Agenda 'now' fallback to next event if there is no current event (#8693) * fix: agenda now fallback to next event if there is no current event * chore: agenda goto now PR feedback * chore: simplifying 'agenda goto now' next event logic * chore: simplifying 'agenda goto now' nextEvent var --- client/agenda/AgendaMobileBar.vue | 8 ++++---- client/agenda/AgendaQuickAccess.vue | 8 ++++---- client/agenda/AgendaScheduleList.vue | 16 ++++++++++------ client/agenda/store.js | 22 ++++++++++++++++++++++ 4 files changed, 40 insertions(+), 14 deletions(-) diff --git a/client/agenda/AgendaMobileBar.vue b/client/agenda/AgendaMobileBar.vue index 63611e21c2..43480bedd3 100644 --- a/client/agenda/AgendaMobileBar.vue +++ b/client/agenda/AgendaMobileBar.vue @@ -124,11 +124,11 @@ const downloadIcsOptions = [ function jumpToDay (dayId) { if (dayId === 'now') { - const lastEventId = agendaStore.findCurrentEventId() - if (lastEventId) { - document.getElementById(`agenda-rowid-${lastEventId}`)?.scrollIntoView(true) + const nowEventId = agendaStore.findNowEventId() + if (nowEventId) { + document.getElementById(`agenda-rowid-${nowEventId}`)?.scrollIntoView(true) } else { - message.warning('There is no event happening right now.') + message.warning('There is no event happening right now or in the future.') } } else { document.getElementById(dayId)?.scrollIntoView(true) diff --git a/client/agenda/AgendaQuickAccess.vue b/client/agenda/AgendaQuickAccess.vue index b226d09c60..c9412f6663 100644 --- a/client/agenda/AgendaQuickAccess.vue +++ b/client/agenda/AgendaQuickAccess.vue @@ -204,12 +204,12 @@ function scrollToDay (daySlug, ev) { } function scrollToNow (ev) { - const lastEventId = agendaStore.findCurrentEventId() + const nowEventId = agendaStore.findNowEventId() - if (lastEventId) { - document.getElementById(`agenda-rowid-${lastEventId}`)?.scrollIntoView(true) + if (nowEventId) { + document.getElementById(`agenda-rowid-${nowEventId}`)?.scrollIntoView(true) } else { - message.warning('There is no event happening right now.') + message.warning('There is no event happening right now or in the future.') } } diff --git a/client/agenda/AgendaScheduleList.vue b/client/agenda/AgendaScheduleList.vue index 369025d5da..5a971c25cf 100644 --- a/client/agenda/AgendaScheduleList.vue +++ b/client/agenda/AgendaScheduleList.vue @@ -591,10 +591,10 @@ function renderLinkLabel (opt) { function recalculateRedLine () { state.currentMinute = DateTime.local().minute - const lastEventId = agendaStore.findCurrentEventId() + const currentEventId = agendaStore.findCurrentEventId() - if (lastEventId) { - state.redhandOffset = document.getElementById(`agenda-rowid-${lastEventId}`)?.offsetTop || 0 + if (currentEventId) { + state.redhandOffset = document.getElementById(`agenda-rowid-${currentEventId}`)?.offsetTop || 0 } else { state.redhandOffset = 0 } @@ -615,9 +615,13 @@ function recalculateRedLine () { return } unsubscribe() // we only need to scroll once, so unsubscribe from future updates - if(window.location.hash === "#now") { - const lastEventId = agendaStore.findCurrentEventId() - document.getElementById(`agenda-rowid-${lastEventId}`)?.scrollIntoView(true) + if (window.location.hash === "#now") { + const nowEventId = agendaStore.findNowEvent() + if (nowEventId) { + document.getElementById(`agenda-rowid-${nowEventId}`)?.scrollIntoView(true) + } else { + message.warning('There is no event happening right now or in the future.') + } } else if(window.location.hash.startsWith(`#${daySlugPrefix}`)) { document.getElementById(window.location.hash.substring(1))?.scrollIntoView(true) } diff --git a/client/agenda/store.js b/client/agenda/store.js index 71c1219725..359c5fbf05 100644 --- a/client/agenda/store.js +++ b/client/agenda/store.js @@ -230,6 +230,28 @@ export const useAgendaStore = defineStore('agenda', { return lastEvent.id || null }, + findNowEventId () { + const currentEventId = this.findCurrentEventId() + + if (currentEventId) { + return currentEventId + } + + // if there isn't a current event then instead find the next event + + const current = (this.nowDebugDiff ? DateTime.local().minus(this.nowDebugDiff) : DateTime.local()).setZone(this.timezone) + + // -> Find next event after current time + let nextEventId = undefined + for(const sh of this.scheduleAdjusted) { + if (sh.adjustedStart > current) { + nextEventId = sh.id + break + } + } + + return nextEventId || null + }, hideLoadingScreen () { // -> Hide loading screen const loadingRef = document.querySelector('#app-loading') From c3a296fdb559b015f8e4d457f56fea137e10cf37 Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Wed, 19 Mar 2025 04:56:59 -0400 Subject: [PATCH 24/44] fix(agenda): handle calendar view events that spread across multiple days (#8685) --- client/agenda/AgendaScheduleCalendar.vue | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/client/agenda/AgendaScheduleCalendar.vue b/client/agenda/AgendaScheduleCalendar.vue index 9b56b7f5a7..6701ddabd7 100644 --- a/client/agenda/AgendaScheduleCalendar.vue +++ b/client/agenda/AgendaScheduleCalendar.vue @@ -187,6 +187,7 @@ function refreshData () { let earliestDate = DateTime.fromISO('2200-01-01') let latestDate = DateTime.fromISO('1990-01-01') let nowDate = DateTime.now() + let hasCrossDayEvents = false calendarOptions.events = agendaStore.scheduleAdjusted.map(ev => { // -> Determine boundaries @@ -202,6 +203,9 @@ function refreshData () { if (ev.adjustedEnd < latestDate) { latestDate = ev.adjustedEnd } + if (ev.adjustedStart.day !== ev.adjustedEnd.day) { + hasCrossDayEvents = true + } // -> Build event object return { id: ev.id, @@ -214,8 +218,8 @@ function refreshData () { }) // -> Display settings - calendarOptions.slotMinTime = `${earliestHour.toString().padStart(2, '0')}:00:00` - calendarOptions.slotMaxTime = `${latestHour.toString().padStart(2, '0')}:00:00` + calendarOptions.slotMinTime = hasCrossDayEvents ? '00:00:00' : `${earliestHour.toString().padStart(2, '0')}:00:00` + calendarOptions.slotMaxTime = hasCrossDayEvents ? '23:59:59' : `${latestHour.toString().padStart(2, '0')}:00:00` calendarOptions.validRange.start = earliestDate.minus({ days: 1 }).toISODate() calendarOptions.validRange.end = latestDate.plus({ days: 1 }).toISODate() // calendarOptions.scrollTime = `${earliestHour.toString().padStart(2, '0')}:00:00` From fde8136df53b3b9d2666674e16e15a7edf7e0ed1 Mon Sep 17 00:00:00 2001 From: Rich Salz Date: Wed, 19 Mar 2025 05:06:47 -0400 Subject: [PATCH 25/44] chore: Add draft name to review-completed email (#8676) * chore: Add draft name to review-completed email Fixes: #7866 * fix: typo * fix: show title and add test --------- Co-authored-by: Robert Sparks --- ietf/doc/tests_review.py | 5 ++++- ietf/templates/review/completed_review.txt | 4 +++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/ietf/doc/tests_review.py b/ietf/doc/tests_review.py index e93bc02181..13ddbc22ba 100644 --- a/ietf/doc/tests_review.py +++ b/ietf/doc/tests_review.py @@ -902,7 +902,10 @@ def test_complete_review_enter_content(self): self.assertEqual(len(outbox), 1) self.assertIn(assignment.review_request.team.list_email, outbox[0]["To"]) - self.assertIn("This is a review", get_payload_text(outbox[0])) + payload = get_payload_text(outbox[0]) + self.assertIn("This is a review", payload) + self.assertIn(f"Document: {assignment.review_request.doc.name}", payload) + self.assertIn(f"Title: {assignment.review_request.doc.title}", payload) self.assertIn(settings.MAILING_LIST_ARCHIVE_URL, assignment.review.external_url) diff --git a/ietf/templates/review/completed_review.txt b/ietf/templates/review/completed_review.txt index bdbe321ca1..7d10d8bf13 100644 --- a/ietf/templates/review/completed_review.txt +++ b/ietf/templates/review/completed_review.txt @@ -1,7 +1,9 @@ {% load ietf_filters %}{% autoescape off %}{% filter maybewordwrap:80 %}{% if assignment.state_id == "part-completed" %} Review is partially done. Another assignment may be needed to complete it. -{% endif %}Reviewer: {{ assignment.reviewer.person }} +{% endif %}Document: {{ assignment.review_request.doc.name }} +Title: {{ assignment.review_request.doc.title }} +Reviewer: {{ assignment.reviewer.person }} Review result: {{ assignment.result.name }} {{ content }} From 2d974ed09b8e9ff93805fb06cfa65ab6f814e261 Mon Sep 17 00:00:00 2001 From: Matthew Holloway Date: Fri, 21 Mar 2025 13:45:27 +1300 Subject: [PATCH 26/44] feat(agenda): Agenda meeting materials in overflow menu (#8698) * feat: agenda meeting materials in overflow menu * chore: fixing agenda meeting materials tests * chore: fix agenda meeting material tests * chore: fixing agenda meeting materials test * fix: changing to -mat suffix rather than -meeting-materials * fix: agenda meeting materials lnk -> btn * fix: click handler on agenda meeting materials --- client/agenda/AgendaScheduleList.vue | 60 ++++++++++++++++++++----- playwright/tests/meeting/agenda.spec.js | 8 ++-- 2 files changed, 52 insertions(+), 16 deletions(-) diff --git a/client/agenda/AgendaScheduleList.vue b/client/agenda/AgendaScheduleList.vue index 5a971c25cf..1e50df5fb4 100644 --- a/client/agenda/AgendaScheduleList.vue +++ b/client/agenda/AgendaScheduleList.vue @@ -121,20 +121,12 @@ :options='item.links' key-field='id' :render-icon='renderLinkIcon' - :render-label='renderLinkLabel' + :render-label='renderLink' ) n-button(size='tiny') i.bi.bi-three-dots .agenda-table-cell-links-buttons(v-else-if='item.links && item.links.length > 0') - template(v-if='item.flags.agenda') - n-popover - template(#trigger) - i.bi.bi-collection( - :id='`btn-lnk-` + item.key + `-mat`' - @click='showMaterials(item.key)' - ) - span Show meeting materials - template(v-else-if='item.type === `regular`') + template(v-if='!item.flags.agenda && item.type === `regular`') n-popover template(#trigger) i.no-meeting-materials @@ -143,7 +135,16 @@ span No meeting materials yet. n-popover(v-for='lnk of item.links', :key='lnk.id') template(#trigger) + button( + v-if="lnk.click" + type="button" + :id='`btn-` + lnk.id' + @click='lnk.click' + :aria-label='lnk.label' + :class='`border-0 bg-transparent text-` + lnk.color' + ): i.bi(:class='`bi-` + lnk.icon') a( + v-else :id='`btn-` + lnk.id' :href='lnk.href' :aria-label='lnk.label' @@ -281,13 +282,28 @@ const meetingEvents = computed(() => { } acc.lastTypeName = typeName - // -> Populate event links + // + /** + * -> Populate event menu items + * + * links is an array of either, + * 1. { href: "...", click: undefined, ...sharedProps } + * 2. { click: () => {...}, href: undefined, ...sharedProps } + */ const links = [] const typesWithLinks = ['regular', 'plenary', 'other'] const purposesWithoutLinks = ['admin', 'closed_meeting', 'officehours', 'social'] if (item.flags.showAgenda || (typesWithLinks.includes(item.type) && !purposesWithoutLinks.includes(item.purpose))) { if (item.flags.agenda) { // -> Meeting Materials + links.push({ + id: `btn-${item.id}-mat`, + label: 'Show meeting materials', + icon: 'collection', + href: undefined, + click: () => showMaterials(item.id), + color: 'black' + }) links.push({ id: `lnk-${item.id}-tar`, label: 'Download meeting materials as .tar archive', @@ -585,7 +601,11 @@ function renderLinkIcon (opt) { return h('i', { class: `bi bi-${opt.icon} text-${opt.color}` }) } -function renderLinkLabel (opt) { +function renderLink (opt) { + if (opt.click) { + return h('button', { type: 'button', class: 'overflow-button', onClick: opt.click }, opt.label) + } + return h('a', { href: opt.href, target: '_blank' }, opt.label) } @@ -1564,6 +1584,22 @@ onBeforeUnmount(() => { } } +.overflow-button { + font-size: inherit; + padding: 0; + border: 0; + background: transparent; + + &:before { + content: ""; + position: absolute; + left: 0; + right: 0; + top: 0; + bottom: 0; + } +} + @keyframes fadeInAnim { 0% { opacity: 0; diff --git a/playwright/tests/meeting/agenda.spec.js b/playwright/tests/meeting/agenda.spec.js index e6c6e17031..b7e3df5718 100644 --- a/playwright/tests/meeting/agenda.spec.js +++ b/playwright/tests/meeting/agenda.spec.js @@ -275,7 +275,7 @@ test.describe('past - desktop', () => { const eventButtons = row.locator('.agenda-table-cell-links > .agenda-table-cell-links-buttons') if (event.flags.agenda) { // Show meeting materials button - await expect(eventButtons.locator('i.bi.bi-collection')).toBeVisible() + await expect(eventButtons.locator(`#btn-btn-${event.id}-mat`)).toBeVisible() // ZIP materials button await expect(eventButtons.locator(`#btn-lnk-${event.id}-tar`)).toHaveAttribute('href', `/meeting/${meetingData.meeting.number}/agenda/${event.acronym}-drafts.tgz`) await expect(eventButtons.locator(`#btn-lnk-${event.id}-tar > i.bi`)).toBeVisible() @@ -425,7 +425,7 @@ test.describe('past - desktop', () => { }) }) // Open dialog - await page.locator(`#agenda-rowid-${event.id} #btn-lnk-${event.id}-mat`).click() + await page.locator(`#agenda-rowid-${event.id} #btn-btn-${event.id}-mat`).click() await expect(page.locator('.agenda-eventdetails')).toBeVisible() // Header await expect(page.locator('.agenda-eventdetails .n-card-header__main > .detail-header > .bi')).toBeVisible() @@ -507,7 +507,7 @@ test.describe('past - desktop', () => { }) }) // Open dialog - await page.locator(`#btn-lnk-${event.id}-mat`).click() + await page.locator(`#btn-btn-${event.id}-mat`).click() await expect(page.locator('.agenda-eventdetails')).toBeVisible() // Slides Tab await page.locator('.agenda-eventdetails .detail-nav > a').nth(1).click() @@ -1158,7 +1158,7 @@ test.describe('future - desktop', () => { if (event.flags.showAgenda || (['regular', 'plenary', 'other'].includes(event.type) && !['admin', 'closed_meeting', 'officehours', 'social'].includes(event.purpose))) { if (event.flags.agenda) { // Show meeting materials button - await expect(eventButtons.locator('i.bi.bi-collection')).toBeVisible() + await expect(eventButtons.locator(`#btn-btn-${event.id}-mat`)).toBeVisible() // ZIP materials button await expect(eventButtons.locator(`#btn-lnk-${event.id}-tar`)).toHaveAttribute('href', `/meeting/${meetingData.meeting.number}/agenda/${event.acronym}-drafts.tgz`) await expect(eventButtons.locator(`#btn-lnk-${event.id}-tar > i.bi`)).toBeVisible() From 27ea6234a8b0b74062c0a57e849e316087c849f3 Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Fri, 21 Mar 2025 04:43:33 -0400 Subject: [PATCH 27/44] feat(agenda): keep video client + onsite tool buttons for plenary for rest of day (#8706) --- client/agenda/AgendaScheduleList.vue | 17 ++++ playwright/helpers/common.js | 24 ++++++ playwright/helpers/meeting.js | 3 + playwright/tests/meeting/agenda.spec.js | 102 ++++++++++++++++++++---- 4 files changed, 129 insertions(+), 17 deletions(-) diff --git a/client/agenda/AgendaScheduleList.vue b/client/agenda/AgendaScheduleList.vue index 1e50df5fb4..86c91bed85 100644 --- a/client/agenda/AgendaScheduleList.vue +++ b/client/agenda/AgendaScheduleList.vue @@ -464,6 +464,23 @@ const meetingEvents = computed(() => { color: 'purple' }) } + // -> Keep showing video client / on-site tool for Plenary until end of day, in case it goes over the planned time range + if (item.type === 'plenary' && item.adjustedEnd.day === current.day) { + links.push({ + id: `lnk-${item.id}-video`, + label: 'Full Client with Video', + icon: 'camera-video', + href: item.links.videoStream, + color: 'purple' + }) + links.push({ + id: `lnk-${item.id}-onsitetool`, + label: 'Onsite tool', + icon: 'telephone-outbound', + href: item.links.onsiteTool, + color: 'teal' + }) + } } } } diff --git a/playwright/helpers/common.js b/playwright/helpers/common.js index 5ba39ba022..c4dd7e2640 100644 --- a/playwright/helpers/common.js +++ b/playwright/helpers/common.js @@ -13,5 +13,29 @@ module.exports = { return rect.top < bottom && rect.top > 0 - rect.height }) + }, + /** + * Override page DateTime with a new value + * + * @param {Object} page Page object + * @param {Object} dateTimeOverride New DateTime object + */ + overridePageDateTime: async (page, dateTimeOverride) => { + await page.addInitScript(`{ + // Extend Date constructor to default to fixed time + Date = class extends Date { + constructor(...args) { + if (args.length === 0) { + super(${dateTimeOverride.toMillis()}); + } else { + super(...args); + } + } + } + // Override Date.now() to start from fixed time + const __DateNowOffset = ${dateTimeOverride.toMillis()} - Date.now(); + const __DateNow = Date.now; + Date.now = () => __DateNow() + __DateNowOffset; + }`) } } diff --git a/playwright/helpers/meeting.js b/playwright/helpers/meeting.js index 9722ffc68b..634ca2e8c6 100644 --- a/playwright/helpers/meeting.js +++ b/playwright/helpers/meeting.js @@ -609,6 +609,9 @@ module.exports = { startDateTime: curDay.set({ hour: 17, minute: 30 }), duration: '2h', type: 'plenary', + showAgenda: true, + hasAgenda: true, + hasRecordings: true, ...findAreaGroup('ietf-plenary', categories[2]) }, floors)) } diff --git a/playwright/tests/meeting/agenda.spec.js b/playwright/tests/meeting/agenda.spec.js index b7e3df5718..412a3fe9b8 100644 --- a/playwright/tests/meeting/agenda.spec.js +++ b/playwright/tests/meeting/agenda.spec.js @@ -1213,7 +1213,7 @@ test.describe('future - desktop', () => { await expect(eventButtons.locator(`#btn-lnk-${event.id}-remotecallin`)).toHaveAttribute('href', remoteCallInUrl) await expect(eventButtons.locator(`#btn-lnk-${event.id}-remotecallin > i.bi`)).toBeVisible() } - // calendar + // Calendar if (event.links.calendar) { await expect(eventButtons.locator(`#btn-lnk-${event.id}-calendar`)).toHaveAttribute('href', event.links.calendar) await expect(eventButtons.locator(`#btn-lnk-${event.id}-calendar > i.bi`)).toBeVisible() @@ -1278,22 +1278,7 @@ test.describe('live - desktop', () => { }) // Override Date in page to fixed time - await page.addInitScript(`{ - // Extend Date constructor to default to fixed time - Date = class extends Date { - constructor(...args) { - if (args.length === 0) { - super(${currentTime.toMillis()}); - } else { - super(...args); - } - } - } - // Override Date.now() to start from fixed time - const __DateNowOffset = ${currentTime.toMillis()} - Date.now(); - const __DateNow = Date.now; - Date.now = () => __DateNow() + __DateNowOffset; - }`) + await commonHelper.overridePageDateTime(page, currentTime) // Visit agenda page and await Meeting Data API call to complete await Promise.all([ @@ -1348,6 +1333,89 @@ test.describe('live - desktop', () => { }) }) +// ==================================================================== +// AGENDA (live meeting) | DESKTOP viewport | Plenary Extended Time Buttons +// ==================================================================== + +test.describe('live - desktop - plenary extended time buttons', () => { + let meetingData + let plenarySessionId + + test.beforeAll(async () => { + // Generate meeting data + meetingData = meetingHelper.generateAgendaResponse({ dateMode: 'current' }) + plenarySessionId = meetingData.schedule.find(s => s.type === 'plenary').id + }) + + test.beforeEach(async ({ page }) => { + // Intercept Meeting Data API + await page.route(`**/api/meeting/${meetingData.meeting.number}/agenda-data`, route => { + route.fulfill({ + status: 200, + contentType: 'application/json', + body: JSON.stringify(meetingData) + }) + }) + + await page.setViewportSize({ + width: viewports.desktop[0], + height: viewports.desktop[1] + }) + }) + + // -> BUTTONS PRESENT AFTER EVENT, SAME DAY + + test('same day - after event', async ({ page }) => { + // Override Date in page to fixed time + const currentTime = DateTime.fromISO('2022-02-01T13:45:15', { zone: 'Asia/Tokyo' }).plus({ days: 1 }).set({ hour: 20, minute: 30 }) + await commonHelper.overridePageDateTime(page, currentTime) + + // Visit agenda page and await Meeting Data API call to complete + await Promise.all([ + page.waitForResponse(`**/api/meeting/${meetingData.meeting.number}/agenda-data`), + page.goto(`/meeting/${meetingData.meeting.number}/agenda`) + ]) + + // Wait for page to be ready + await page.locator('.agenda h1').waitFor({ state: 'visible' }) + await setTimeout(500) + + // Check for plenary event + await expect(page.locator('.agenda .agenda-table-display-event.agenda-table-type-plenary')).toBeVisible() + await page.locator('.agenda .agenda-table-display-event.agenda-table-type-plenary').scrollIntoViewIfNeeded() + + // Check for full video client + on-site tool + await expect(page.locator(`.agenda .agenda-table-display-event.agenda-table-type-plenary .agenda-table-cell-links-buttons a#btn-lnk-${plenarySessionId}-video`)).toBeVisible() + await expect(page.locator(`.agenda .agenda-table-display-event.agenda-table-type-plenary .agenda-table-cell-links-buttons a#btn-lnk-${plenarySessionId}-onsitetool`)).toBeVisible() + }) + + // -> BUTTONS NO LONGER PRESENT AFTER EVENT, NEXT DAY + + test('next day - after event', async ({ page }) => { + // Override Date in page to fixed time + const currentTime = DateTime.fromISO('2022-02-01T13:45:15', { zone: 'Asia/Tokyo' }).plus({ days: 2 }).set({ hour: 2, minute: 30 }) + await commonHelper.overridePageDateTime(page, currentTime) + + // Visit agenda page and await Meeting Data API call to complete + await Promise.all([ + page.waitForResponse(`**/api/meeting/${meetingData.meeting.number}/agenda-data`), + page.goto(`/meeting/${meetingData.meeting.number}/agenda`) + ]) + + // Wait for page to be ready + await page.locator('.agenda h1').waitFor({ state: 'visible' }) + await setTimeout(500) + + // Check for plenary event + await expect(page.locator('.agenda .agenda-table-display-event.agenda-table-type-plenary')).toBeVisible() + await page.locator('.agenda .agenda-table-display-event.agenda-table-type-plenary').scrollIntoViewIfNeeded() + + // Check for full video client + on-site tool + await expect(page.locator(`.agenda .agenda-table-display-event.agenda-table-type-plenary .agenda-table-cell-links-buttons a#btn-lnk-${plenarySessionId}-video`)).not.toBeVisible() + await expect(page.locator(`.agenda .agenda-table-display-event.agenda-table-type-plenary .agenda-table-cell-links-buttons a#btn-lnk-${plenarySessionId}-onsitetool`)).not.toBeVisible() + }) +}) + // ==================================================================== // AGENDA (past meeting) | SMALL DESKTOP/TABLET/MOBILE viewports // ==================================================================== From ed2059a5813130cbf31ce77c0978a35751addc6c Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Fri, 21 Mar 2025 15:45:26 +0700 Subject: [PATCH 28/44] fix: add blank=true to SlideSubmission.doc (#8688) Allows admin to save an instance with a null doc reference, which was already permitted. --- .../0011_alter_slidesubmission_doc.py | 26 +++++++++++++++++++ ietf/meeting/models.py | 2 +- 2 files changed, 27 insertions(+), 1 deletion(-) create mode 100644 ietf/meeting/migrations/0011_alter_slidesubmission_doc.py diff --git a/ietf/meeting/migrations/0011_alter_slidesubmission_doc.py b/ietf/meeting/migrations/0011_alter_slidesubmission_doc.py new file mode 100644 index 0000000000..b9cbc58e99 --- /dev/null +++ b/ietf/meeting/migrations/0011_alter_slidesubmission_doc.py @@ -0,0 +1,26 @@ +# Generated by Django 4.2.19 on 2025-03-17 09:37 + +from django.db import migrations +import django.db.models.deletion +import ietf.utils.models + + +class Migration(migrations.Migration): + + dependencies = [ + ("doc", "0025_storedobject_storedobject_unique_name_per_store"), + ("meeting", "0010_alter_floorplan_image_alter_meetinghost_logo"), + ] + + operations = [ + migrations.AlterField( + model_name="slidesubmission", + name="doc", + field=ietf.utils.models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="doc.document", + ), + ), + ] diff --git a/ietf/meeting/models.py b/ietf/meeting/models.py index 5284420731..84d151c310 100644 --- a/ietf/meeting/models.py +++ b/ietf/meeting/models.py @@ -1385,7 +1385,7 @@ class SlideSubmission(models.Model): apply_to_all = models.BooleanField(default=False) submitter = ForeignKey(Person) status = ForeignKey(SlideSubmissionStatusName, null=True, default='pending', on_delete=models.SET_NULL) - doc = ForeignKey(Document, null=True, on_delete=models.SET_NULL) + doc = ForeignKey(Document, blank=True, null=True, on_delete=models.SET_NULL) def staged_filepath(self): return os.path.join(settings.SLIDE_STAGING_PATH , self.filename) From 37b4448b3d390b102b7e14890880bcfd1e965bcc Mon Sep 17 00:00:00 2001 From: Benson Muite Date: Fri, 21 Mar 2025 11:52:18 +0300 Subject: [PATCH 29/44] fix: Clarify legacy description in RFC banner (#8424) (#8670) --- ietf/templates/doc/disclaimer.html | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/ietf/templates/doc/disclaimer.html b/ietf/templates/doc/disclaimer.html index 31ce6c397e..db4c42ed68 100644 --- a/ietf/templates/doc/disclaimer.html +++ b/ietf/templates/doc/disclaimer.html @@ -3,12 +3,18 @@ {% load ietf_filters %} {% origin %} {% if doc.type_id == "rfc" %} - {% if doc.stream.slug != "ietf" and doc.std_level.slug|default:"unk" not in "bcp,ds,ps,std"|split:"," %} + {% if doc.stream.slug != "ietf" and doc.stream.desc != "Legacy" and doc.std_level.slug|default:"unk" not in "bcp,ds,ps,std"|split:"," %} + {% elif doc.stream.slug != "ietf" and doc.stream.desc == "Legacy" and doc.std_level.slug|default:"unk" not in "bcp,ds,ps,std"|split:"," %} + {% endif %} {% elif doc|is_in_stream %} {% if doc.stream.slug != "ietf" and doc.std_level.slug|default:"unk" not in "bcp,ds,ps,std"|split:"," %} @@ -25,4 +31,4 @@ This I-D is not endorsed by the IETF and has no formal standing in the IETF standards process. -{% endif %} \ No newline at end of file +{% endif %} From 8fec17282f5dc7ef28355990e2f74bff2a309002 Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Fri, 21 Mar 2025 19:30:40 -0400 Subject: [PATCH 30/44] ci: Update build-base-app.yml --- .github/workflows/build-base-app.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build-base-app.yml b/.github/workflows/build-base-app.yml index c8f66a22b7..5f0a0d11b8 100644 --- a/.github/workflows/build-base-app.yml +++ b/.github/workflows/build-base-app.yml @@ -51,7 +51,7 @@ jobs: push: true tags: | ghcr.io/ietf-tools/datatracker-app-base:${{ env.IMGVERSION }} - ghcr.io/ietf-tools/datatracker-app-base:latest + ${{ github.ref == 'refs/heads/main' && 'ghcr.io/ietf-tools/datatracker-app-base:latest' || '' }} - name: Update version references run: | @@ -61,6 +61,6 @@ jobs: - name: Commit CHANGELOG.md uses: stefanzweifel/git-auto-commit-action@v5 with: - branch: main + branch: ${{ github.ref_name }} commit_message: 'ci: update base image target version to ${{ env.IMGVERSION }}' file_pattern: dev/build/Dockerfile dev/build/TARGET_BASE From 6da36da3fde2721d3885001e4a4174c85579db8e Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Thu, 27 Mar 2025 00:29:48 +0700 Subject: [PATCH 31/44] chore: bump rabbitmq to 3.13 (#8603) * ci: param for rabbitmq_version * ci: drop latest tagging from datatracker-mq * chore: bump rabbitmq to 3.13 * Revert "ci: drop latest tagging from datatracker-mq" This reverts commit 58cea207c7300727f9cfc19a57859ab16c3b2c01. --- .github/workflows/build-mq-broker.yml | 15 +++++++++------ k8s/rabbitmq.yaml | 2 +- 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/.github/workflows/build-mq-broker.yml b/.github/workflows/build-mq-broker.yml index 8c6f1e6ae1..85c27c23cc 100644 --- a/.github/workflows/build-mq-broker.yml +++ b/.github/workflows/build-mq-broker.yml @@ -8,10 +8,13 @@ on: - 'dev/mq/**' - '.github/workflows/build-mq-broker.yml' - workflow_dispatch: - -env: - RABBITMQ_VERSION: 3.12-alpine + workflow_dispatch: + inputs: + rabbitmq_version: + description: 'RabbitMQ Version' + default: '3.13-alpine' + required: true + type: string jobs: publish: @@ -45,7 +48,7 @@ jobs: file: dev/mq/Dockerfile platforms: linux/amd64,linux/arm64 push: true - build-args: RABBITMQ_VERSION=${{ env.RABBITMQ_VERSION }} + build-args: RABBITMQ_VERSION=${{ inputs.rabbitmq_version }} tags: | - ghcr.io/ietf-tools/datatracker-mq:${{ env.RABBITMQ_VERSION }} + ghcr.io/ietf-tools/datatracker-mq:${{ inputs.rabbitmq_version }} ghcr.io/ietf-tools/datatracker-mq:latest diff --git a/k8s/rabbitmq.yaml b/k8s/rabbitmq.yaml index 3cab7ff565..0c8f0705b5 100644 --- a/k8s/rabbitmq.yaml +++ b/k8s/rabbitmq.yaml @@ -29,7 +29,7 @@ spec: # ----------------------------------------------------- # RabbitMQ Container # ----------------------------------------------------- - - image: "ghcr.io/ietf-tools/datatracker-mq:3.12-alpine" + - image: "ghcr.io/ietf-tools/datatracker-mq:3.13-alpine" imagePullPolicy: Always name: rabbitmq ports: From 9dd25b9aee2ccb565d74d55d90c67bf8388c910e Mon Sep 17 00:00:00 2001 From: Matthew Holloway Date: Fri, 28 Mar 2025 04:07:53 +1300 Subject: [PATCH 32/44] chore: vite sourcemap (#8719) --- vite.config.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/vite.config.js b/vite.config.js index 41a2cb02e0..bde2b9ed57 100644 --- a/vite.config.js +++ b/vite.config.js @@ -16,7 +16,8 @@ export default defineConfig(({ command, mode }) => { main: 'client/main.js', embedded: 'client/embedded.js' } - } + }, + sourcemap: true }, cacheDir: '.vite', plugins: [ From 7e9a46af63da0e45f3ea0e4ccfda69875b282190 Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Thu, 27 Mar 2025 13:59:24 -0500 Subject: [PATCH 33/44] feat: move base containers to bookworm (#8710) --- docker/base.Dockerfile | 17 ++++++++++------- docker/scripts/app-setup-debian.sh | 16 +++------------- ietf/submit/tests.py | 4 ++-- 3 files changed, 15 insertions(+), 22 deletions(-) diff --git a/docker/base.Dockerfile b/docker/base.Dockerfile index e2465f33c2..f364456c7a 100644 --- a/docker/base.Dockerfile +++ b/docker/base.Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.9-bullseye +FROM python:3.9-bookworm LABEL maintainer="IETF Tools Team " ENV DEBIAN_FRONTEND=noninteractive @@ -14,6 +14,9 @@ RUN apt-get install -y --no-install-recommends ca-certificates curl gnupg \ && mkdir -p /etc/apt/keyrings\ && curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg RUN echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_$NODE_MAJOR.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list +RUN echo "Package: nodejs" >> /etc/apt/preferences.d/preferences && \ + echo "Pin: origin deb.nodesource.com" >> /etc/apt/preferences.d/preferences && \ + echo "Pin-Priority: 1001" >> /etc/apt/preferences.d/preferences # Add Docker Source RUN curl -fsSL https://download.docker.com/linux/debian/gpg | gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg @@ -56,12 +59,13 @@ RUN apt-get update --fix-missing && apt-get install -qy --no-install-recommends libmagic-dev \ libmariadb-dev \ libmemcached-tools \ + libyang2-tools \ locales \ make \ mariadb-client \ memcached \ nano \ - netcat \ + netcat-traditional \ nodejs \ pgloader \ pigz \ @@ -77,7 +81,6 @@ RUN apt-get update --fix-missing && apt-get install -qy --no-install-recommends wget \ xauth \ xvfb \ - yang-tools \ zsh # Install kramdown-rfc2629 (ruby) @@ -106,11 +109,11 @@ RUN apt-get autoremove -y && apt-get clean -y && rm -rf /var/lib/apt/lists/* /va ENV DBUS_SESSION_BUS_ADDRESS=/dev/null # avoid million NPM install messages -ENV npm_config_loglevel warn +ENV npm_config_loglevel=warn # allow installing when the main user is root -ENV npm_config_unsafe_perm true +ENV npm_config_unsafe_perm=true # disable NPM funding messages -ENV npm_config_fund false +ENV npm_config_fund=false # Set locale to en_US.UTF-8 RUN echo "LC_ALL=en_US.UTF-8" >> /etc/environment && \ @@ -119,7 +122,7 @@ RUN echo "LC_ALL=en_US.UTF-8" >> /etc/environment && \ dpkg-reconfigure locales && \ locale-gen en_US.UTF-8 && \ update-locale LC_ALL en_US.UTF-8 -ENV LC_ALL en_US.UTF-8 +ENV LC_ALL=en_US.UTF-8 # Install idnits ADD https://raw.githubusercontent.com/ietf-tools/idnits-mirror/main/idnits /usr/local/bin/ diff --git a/docker/scripts/app-setup-debian.sh b/docker/scripts/app-setup-debian.sh index ddfc351995..ea9cc3fb87 100644 --- a/docker/scripts/app-setup-debian.sh +++ b/docker/scripts/app-setup-debian.sh @@ -10,7 +10,6 @@ # Syntax: ./common-debian.sh [install zsh flag] [username] [user UID] [user GID] [upgrade packages flag] [install Oh My Zsh! flag] [Add non-free packages] set -e - INSTALL_ZSH=${1:-"true"} USERNAME=${2:-"automatic"} USER_UID=${3:-"automatic"} @@ -116,18 +115,9 @@ if [ "${PACKAGES_ALREADY_INSTALLED}" != "true" ]; then # Needed for adding manpages-posix and manpages-posix-dev which are non-free packages in Debian if [ "${ADD_NON_FREE_PACKAGES}" = "true" ]; then # Bring in variables from /etc/os-release like VERSION_CODENAME - . /etc/os-release - sed -i -E "s/deb http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME} main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME} main contrib non-free/" /etc/apt/sources.list - sed -i -E "s/deb-src http:\/\/(deb|httredir)\.debian\.org\/debian ${VERSION_CODENAME} main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME} main contrib non-free/" /etc/apt/sources.list - sed -i -E "s/deb http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME}-updates main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME}-updates main contrib non-free/" /etc/apt/sources.list - sed -i -E "s/deb-src http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME}-updates main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME}-updates main contrib non-free/" /etc/apt/sources.list - sed -i "s/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main contrib non-free/" /etc/apt/sources.list - sed -i "s/deb-src http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main contrib non-free/" /etc/apt/sources.list - sed -i "s/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main contrib non-free/" /etc/apt/sources.list - sed -i "s/deb-src http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main contrib non-free/" /etc/apt/sources.list - # Handle bullseye location for security https://www.debian.org/releases/bullseye/amd64/release-notes/ch-information.en.html - sed -i "s/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main contrib non-free/" /etc/apt/sources.list - sed -i "s/deb-src http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main contrib non-free/" /etc/apt/sources.list + . /etc/os-release + sed -i -E "s/Components: main/Components: main contrib non-free/" /etc/apt/sources.list.d/debian.sources + echo "Running apt-get update..." apt-get update package_list="${package_list} manpages-posix manpages-posix-dev" diff --git a/ietf/submit/tests.py b/ietf/submit/tests.py index 9a993480cd..6b1c998e76 100644 --- a/ietf/submit/tests.py +++ b/ietf/submit/tests.py @@ -1888,7 +1888,7 @@ def test_submit_invalid_yang(self): r = self.client.get(status_url) q = PyQuery(r.content) # - self.assertContains(r, 'The yang validation returned 1 error') + self.assertContains(r, 'The yang validation returned 3 errors') # m = q('#yang-validation-message').text() for command in ['xym', 'pyang', 'yanglint']: @@ -1898,7 +1898,7 @@ def test_submit_invalid_yang(self): self.assertIn("draft-yang-testing-invalid-00.txt", m) self.assertIn("error: syntax error: illegal keyword: ;", m) if settings.SUBMIT_YANGLINT_COMMAND and os.path.exists(settings.YANGLINT_BINARY): - self.assertIn("No validation errors", m) + self.assertIn('libyang err : Parsing module "ietf-yang-metadata" failed.', m) def submit_conflicting_submissiondocevent_rev(self, new_rev='01', existing_rev='01'): """Test submitting a rev when an equal or later SubmissionDocEvent rev exists From 431c475060778ccaaf611e0de7a9bcc0c3b93c42 Mon Sep 17 00:00:00 2001 From: rjsparks <10996692+rjsparks@users.noreply.github.com> Date: Thu, 27 Mar 2025 19:12:31 +0000 Subject: [PATCH 34/44] ci: update base image target version to 20250327T1859 --- dev/build/Dockerfile | 2 +- dev/build/TARGET_BASE | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/dev/build/Dockerfile b/dev/build/Dockerfile index 2b02a091c5..cc55c92881 100644 --- a/dev/build/Dockerfile +++ b/dev/build/Dockerfile @@ -1,4 +1,4 @@ -FROM ghcr.io/ietf-tools/datatracker-app-base:20250315T0745 +FROM ghcr.io/ietf-tools/datatracker-app-base:20250327T1859 LABEL maintainer="IETF Tools Team " ENV DEBIAN_FRONTEND=noninteractive diff --git a/dev/build/TARGET_BASE b/dev/build/TARGET_BASE index e6f490b168..50e8bfd839 100644 --- a/dev/build/TARGET_BASE +++ b/dev/build/TARGET_BASE @@ -1 +1 @@ -20250315T0745 +20250327T1859 From 752bc2103146b534e1bb6188c7d54d11f07ba885 Mon Sep 17 00:00:00 2001 From: Rudi Matz Date: Thu, 27 Mar 2025 18:19:30 -0400 Subject: [PATCH 35/44] feat: highlight unexpected state in AD dashboard (#8738) * feat: highlight unexpected state in AD dashboard * test: add tests for is_unexpected_wg_state * test: improve tests using WgDraftFactory --- ietf/doc/templatetags/ietf_filters.py | 13 +++++++++++++ ietf/doc/templatetags/tests_ietf_filters.py | 16 ++++++++++++++++ ietf/templates/doc/search/status_columns.html | 4 ++++ 3 files changed, 33 insertions(+) diff --git a/ietf/doc/templatetags/ietf_filters.py b/ietf/doc/templatetags/ietf_filters.py index 86507eeaaa..d4adf96a27 100644 --- a/ietf/doc/templatetags/ietf_filters.py +++ b/ietf/doc/templatetags/ietf_filters.py @@ -480,6 +480,19 @@ def state(doc, slug): slug = "%s-stream-%s" % (doc.type_id, doc.stream_id) return doc.get_state(slug) + +@register.filter +def is_unexpected_wg_state(doc): + """Returns a flag indicating whether the document has an unexpected wg state.""" + if not doc.type_id == "draft": + return False + + draft_iesg_state = doc.get_state("draft-iesg") + draft_stream_state = doc.get_state("draft-stream-ietf") + + return draft_iesg_state.slug != "idexists" and draft_stream_state is not None and draft_stream_state.slug != "sub-pub" + + @register.filter def statehelp(state): "Output help icon with tooltip for state." diff --git a/ietf/doc/templatetags/tests_ietf_filters.py b/ietf/doc/templatetags/tests_ietf_filters.py index f018b7d9b3..b5130849ea 100644 --- a/ietf/doc/templatetags/tests_ietf_filters.py +++ b/ietf/doc/templatetags/tests_ietf_filters.py @@ -14,12 +14,14 @@ ConflictReviewFactory, BofreqFactory, StatementFactory, + RfcFactory, ) from ietf.doc.models import DocEvent from ietf.doc.templatetags.ietf_filters import ( urlize_ietf_docs, is_valid_url, is_in_stream, + is_unexpected_wg_state, ) from ietf.person.models import Person from ietf.utils.test_utils import TestCase @@ -174,3 +176,17 @@ def test_urlize_ietf_docs(self): for input, output in cases: # debug.show("(input, urlize_ietf_docs(input), output)") self.assertEqual(urlize_ietf_docs(input), output) + + def test_is_unexpected_wg_state(self): + """ + Test that the unexpected_wg_state function works correctly + """ + # test documents with expected wg states + self.assertFalse(is_unexpected_wg_state(RfcFactory())) + self.assertFalse(is_unexpected_wg_state(WgDraftFactory (states=[('draft-stream-ietf', 'sub-pub')]))) + self.assertFalse(is_unexpected_wg_state(WgDraftFactory (states=[('draft-iesg', 'idexists')]))) + self.assertFalse(is_unexpected_wg_state(WgDraftFactory (states=[('draft-stream-ietf', 'wg-cand'), ('draft-iesg','idexists')]))) + + # test documents with unexpected wg states due to invalid combination of states + self.assertTrue(is_unexpected_wg_state(WgDraftFactory (states=[('draft-stream-ietf', 'wg-cand'), ('draft-iesg','lc-req')]))) + self.assertTrue(is_unexpected_wg_state(WgDraftFactory (states=[('draft-stream-ietf', 'chair-w'), ('draft-iesg','pub-req')]))) diff --git a/ietf/templates/doc/search/status_columns.html b/ietf/templates/doc/search/status_columns.html index 15f284fd12..5ba41bb9c4 100644 --- a/ietf/templates/doc/search/status_columns.html +++ b/ietf/templates/doc/search/status_columns.html @@ -78,6 +78,10 @@ {% person_link action_holder.person title=action_holder.role_for_doc %}{% if action_holder|action_holder_badge %} {{ action_holder|action_holder_badge }}{% endif %}{% if not forloop.last %},{% endif %} {% endfor %} {% endif %} + {% if doc|is_unexpected_wg_state %} +
+ Unexpected WG state + {% endif %} {% else %} {# RFC #} {{ doc.std_level|safe }} RFC From aa2a3217a7006c05292650bb0d75b0243d2cc67b Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Fri, 28 Mar 2025 10:59:56 -0400 Subject: [PATCH 36/44] fix(agenda): strikethrough cancelled and rescheduled events (#8694) * fix(agenda): strikethrough cancelled and rescheduled events * fix(agenda): rescheduled + canceled badges width on mobile * fix(agenda): use em for canceled / rescheduled badge on mobile * fix: deleting badge width/display:block CSS --------- Co-authored-by: Matthew Holloway --- client/agenda/AgendaDetailsModal.vue | 1 - client/agenda/AgendaScheduleCalendar.vue | 1 - client/agenda/AgendaScheduleList.vue | 12 ++++++++++-- 3 files changed, 10 insertions(+), 4 deletions(-) diff --git a/client/agenda/AgendaDetailsModal.vue b/client/agenda/AgendaDetailsModal.vue index 037e32f9cb..2582bf2159 100644 --- a/client/agenda/AgendaDetailsModal.vue +++ b/client/agenda/AgendaDetailsModal.vue @@ -327,7 +327,6 @@ async function fetchSessionMaterials () { border-radius: 5px; .badge { - width: 30px; font-size: .7em; background-color: $yellow-200; border-bottom: 1px solid $yellow-500; diff --git a/client/agenda/AgendaScheduleCalendar.vue b/client/agenda/AgendaScheduleCalendar.vue index 6701ddabd7..9863296341 100644 --- a/client/agenda/AgendaScheduleCalendar.vue +++ b/client/agenda/AgendaScheduleCalendar.vue @@ -330,7 +330,6 @@ function close () { } .badge { - width: 30px; font-size: .7em; border: 1px solid #CCC; text-transform: uppercase; diff --git a/client/agenda/AgendaScheduleList.vue b/client/agenda/AgendaScheduleList.vue index 86c91bed85..d38c7842eb 100644 --- a/client/agenda/AgendaScheduleList.vue +++ b/client/agenda/AgendaScheduleList.vue @@ -1097,8 +1097,6 @@ onBeforeUnmount(() => { } @media screen and (max-width: $bs5-break-md) { - width: 30px; - display: block; margin: 2px 0 0 0; } } @@ -1321,6 +1319,11 @@ onBeforeUnmount(() => { &.agenda-table-cell-room { border-right: 1px solid darken($red-100, 5%) !important; + text-decoration: line-through; + } + + &.agenda-table-cell-name > a, &.agenda-table-cell-name > span { + text-decoration: line-through; } &:last-child { @@ -1348,6 +1351,11 @@ onBeforeUnmount(() => { &.agenda-table-cell-room { border-right: 1px solid darken($orange-100, 5%) !important; + text-decoration: line-through; + } + + &.agenda-table-cell-name > a, &.agenda-table-cell-name > span { + text-decoration: line-through; } &:last-child { From 527db359e3836a587ba67432b814004b75635022 Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Tue, 1 Apr 2025 09:03:59 -0400 Subject: [PATCH 37/44] chore: fix line ending for .github/ISSUE_TEMPLATE/config.yml --- .github/ISSUE_TEMPLATE/config.yml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index 5e27af9fed..320614b17e 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -1,8 +1,8 @@ -blank_issues_enabled: false -contact_links: - - name: Help and questions - url: https://github.com/ietf-tools/datatracker/discussions/categories/help-questions - about: Need help? Have a question on setting up the project or its usage? - - name: Discuss new ideas - url: https://github.com/ietf-tools/datatracker/discussions/categories/ideas - about: Submit ideas for new features or improvements to be discussed. +blank_issues_enabled: false +contact_links: + - name: Help and questions + url: https://github.com/ietf-tools/datatracker/discussions/categories/help-questions + about: Need help? Have a question on setting up the project or its usage? + - name: Discuss new ideas + url: https://github.com/ietf-tools/datatracker/discussions/categories/ideas + about: Submit ideas for new features or improvements to be discussed. From 9957cf190a4a61369f97e0224e0bb2ae6ed9c8be Mon Sep 17 00:00:00 2001 From: Rudi Matz Date: Tue, 1 Apr 2025 14:35:04 -0400 Subject: [PATCH 38/44] feat: block iesg ballot for docs from outside streams (#8761) * feat: block iesg ballot for docs from outside streams * test: correct failing irsg test * feat: improve/simplify tests and condition * test: filter out pyflakes tests related to globals --- ietf/doc/tests_ballot.py | 29 +++++++++++++++++++++++------ ietf/doc/tests_irsg_ballot.py | 2 +- ietf/doc/views_ballot.py | 5 ++++- ietf/utils/test_runner.py | 9 ++++++++- 4 files changed, 36 insertions(+), 9 deletions(-) diff --git a/ietf/doc/tests_ballot.py b/ietf/doc/tests_ballot.py index c7362b58e2..ec23f3d491 100644 --- a/ietf/doc/tests_ballot.py +++ b/ietf/doc/tests_ballot.py @@ -17,7 +17,7 @@ from ietf.doc.models import (Document, State, DocEvent, BallotPositionDocEvent, LastCallDocEvent, WriteupDocEvent, TelechatDocEvent) from ietf.doc.factories import (DocumentFactory, IndividualDraftFactory, IndividualRfcFactory, WgDraftFactory, - BallotPositionDocEventFactory, BallotDocEventFactory, IRSGBallotDocEventFactory) + BallotPositionDocEventFactory, BallotDocEventFactory, IRSGBallotDocEventFactory, RgDraftFactory) from ietf.doc.templatetags.ietf_filters import can_defer from ietf.doc.utils import create_ballot_if_not_open from ietf.doc.views_ballot import parse_ballot_edit_return_point @@ -360,7 +360,7 @@ def test_request_last_call(self): self.assertTrue('aread@' in outbox[-1]['Cc']) def test_edit_ballot_writeup(self): - draft = IndividualDraftFactory(states=[('draft','active'),('draft-iesg','iesg-eva')]) + draft = IndividualDraftFactory(states=[('draft','active'),('draft-iesg','iesg-eva')], stream_id='ietf') url = urlreverse('ietf.doc.views_ballot.ballot_writeupnotes', kwargs=dict(name=draft.name)) login_testing_unauthorized(self, "secretary", url) @@ -390,8 +390,25 @@ def test_edit_ballot_writeup(self): self.assertTrue("This is a simple test" in d.latest_event(WriteupDocEvent, type="changed_ballot_writeup_text").text) self.assertTrue('iesg-eva' == d.get_state_slug('draft-iesg')) + def test_edit_ballot_writeup_unauthorized_stream(self): + # Test that accessing a document from unauthorized (irtf) stream returns a 404 error + draft = RgDraftFactory() + url = urlreverse('ietf.doc.views_ballot.ballot_writeupnotes', kwargs=dict(name=draft.name)) + login_testing_unauthorized(self, "ad", url) + + r = self.client.get(url) + self.assertEqual(r.status_code, 404) + + def test_edit_ballot_writeup_invalid_name(self): + # Test that accessing a non-existent document returns a 404 error + url = urlreverse('ietf.doc.views_ballot.ballot_writeupnotes', kwargs=dict(name="invalid_name")) + login_testing_unauthorized(self, "ad", url) + + r = self.client.get(url) + self.assertEqual(r.status_code, 404) + def test_edit_ballot_writeup_already_approved(self): - draft = IndividualDraftFactory(states=[('draft','active'),('draft-iesg','approved')]) + draft = IndividualDraftFactory(states=[('draft','active'),('draft-iesg','approved')], stream_id='ietf') url = urlreverse('ietf.doc.views_ballot.ballot_writeupnotes', kwargs=dict(name=draft.name)) login_testing_unauthorized(self, "secretary", url) @@ -465,7 +482,7 @@ def test_edit_ballot_rfceditornote(self): def test_issue_ballot(self): ad = Person.objects.get(user__username="ad") for case in ('none','past','future'): - draft = IndividualDraftFactory(ad=ad) + draft = IndividualDraftFactory(ad=ad, stream_id='ietf') if case in ('past','future'): LastCallDocEvent.objects.create( by=Person.objects.get(name='(System)'), @@ -504,7 +521,7 @@ def test_issue_ballot(self): def test_issue_ballot_auto_state_change(self): ad = Person.objects.get(user__username="ad") - draft = IndividualDraftFactory(ad=ad, states=[('draft','active'),('draft-iesg','writeupw')]) + draft = IndividualDraftFactory(ad=ad, states=[('draft','active'),('draft-iesg','writeupw')], stream_id='ietf') url = urlreverse('ietf.doc.views_ballot.ballot_writeupnotes', kwargs=dict(name=draft.name)) login_testing_unauthorized(self, "secretary", url) @@ -528,7 +545,7 @@ def test_issue_ballot_auto_state_change(self): def test_issue_ballot_warn_if_early(self): ad = Person.objects.get(user__username="ad") - draft = IndividualDraftFactory(ad=ad, states=[('draft','active'),('draft-iesg','lc')]) + draft = IndividualDraftFactory(ad=ad, states=[('draft','active'),('draft-iesg','lc')], stream_id='ietf') url = urlreverse('ietf.doc.views_ballot.ballot_writeupnotes', kwargs=dict(name=draft.name)) login_testing_unauthorized(self, "secretary", url) diff --git a/ietf/doc/tests_irsg_ballot.py b/ietf/doc/tests_irsg_ballot.py index 92752e48c4..aa62d8aaf9 100644 --- a/ietf/doc/tests_irsg_ballot.py +++ b/ietf/doc/tests_irsg_ballot.py @@ -288,7 +288,7 @@ def test_edit_ballot_position_permissions(self): def test_iesg_ballot_no_irsg_actions(self): ad = Person.objects.get(user__username="ad") - wg_draft = IndividualDraftFactory(ad=ad) + wg_draft = IndividualDraftFactory(ad=ad, stream_id='ietf') irsgmember = get_active_irsg()[0] url = urlreverse('ietf.doc.views_ballot.ballot_writeupnotes', kwargs=dict(name=wg_draft.name)) diff --git a/ietf/doc/views_ballot.py b/ietf/doc/views_ballot.py index 4ff40d2268..9e2a417933 100644 --- a/ietf/doc/views_ballot.py +++ b/ietf/doc/views_ballot.py @@ -611,6 +611,10 @@ def clean_ballot_writeup(self): def ballot_writeupnotes(request, name): """Editing of ballot write-up and notes""" doc = get_object_or_404(Document, name=name) + + if doc.stream_id is None or doc.stream_id != 'ietf': + raise Http404("The requested operation is not allowed for this document.") + prev_state = doc.get_state("draft-iesg") login = request.user.person @@ -1335,4 +1339,3 @@ def parse_ballot_edit_return_point(path, doc_name, ballot_id): "ietf.iesg.views.past_documents", } return validate_return_to_path(path, get_default_path, allowed_path_handlers) - diff --git a/ietf/utils/test_runner.py b/ietf/utils/test_runner.py index 3c89a2d01c..6b6f282c49 100644 --- a/ietf/utils/test_runner.py +++ b/ietf/utils/test_runner.py @@ -263,7 +263,14 @@ def pyflakes_test(self): path = os.path.join(settings.BASE_DIR) warnings = [] warnings = pyflakes.checkPaths([path], verbosity=0) - self.assertEqual([], [str(w) for w in warnings]) + + # Filter out warnings about unused global variables + filtered_warnings = [ + w for w in warnings + if not re.search(r"`global \w+` is unused: name is never assigned in scope", str(w)) + ] + + self.assertEqual([], [str(w) for w in filtered_warnings]) class MyPyTest(TestCase): From 48f97c68406a158a4d4bf5da58e9168d12a5f92e Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Tue, 1 Apr 2025 16:12:33 -0300 Subject: [PATCH 39/44] chore: fix/refactor code near unneeded global declarations (#8765) * refactor: unused vars in draft.py * refactor: unwrap/simplify random_faker() prep * chore: types/globals in test_runner.py * chore: drop unneeded globals --- ietf/doc/templatetags/wg_menu.py | 2 -- ietf/person/factories.py | 26 ++++++++++++++------------ ietf/utils/draft.py | 8 +------- ietf/utils/patch.py | 3 +-- ietf/utils/test_runner.py | 31 ++++++++++++++----------------- 5 files changed, 30 insertions(+), 40 deletions(-) diff --git a/ietf/doc/templatetags/wg_menu.py b/ietf/doc/templatetags/wg_menu.py index 76bf7eb4d0..3e8d209448 100644 --- a/ietf/doc/templatetags/wg_menu.py +++ b/ietf/doc/templatetags/wg_menu.py @@ -62,8 +62,6 @@ @register.simple_tag def wg_menu(flavor): - global parents - for p in parents: p.short_name = parent_short_names.get(p.acronym) or p.name if p.short_name.endswith(" Area"): diff --git a/ietf/person/factories.py b/ietf/person/factories.py index 45de554766..98756f26c8 100644 --- a/ietf/person/factories.py +++ b/ietf/person/factories.py @@ -26,20 +26,22 @@ fake = faker.Factory.create() -def setup(): - global acceptable_fakers - # The transliteration of some Arabic and Devanagari names introduces - # non-alphabetic characters that don't work with the draft author - # extraction code, and also don't seem to match the way people with Arabic - # names romanize Arabic names. Exclude those locales from name generation - # in order to avoid test failures. - locales = set( [ l for l in faker.config.AVAILABLE_LOCALES if not (l.startswith('ar_') or l.startswith('sg_') or l=='fr_QC') ] ) - acceptable_fakers = [faker.Faker(locale) for locale in locales] -setup() +# The transliteration of some Arabic and Devanagari names introduces +# non-alphabetic characters that don't work with the draft author +# extraction code, and also don't seem to match the way people with Arabic +# names romanize Arabic names. Exclude those locales from name generation +# in order to avoid test failures. +_acceptable_fakers = [ + faker.Faker(locale) + for locale in set(faker.config.AVAILABLE_LOCALES) + if not (locale.startswith('ar_') or locale.startswith('sg_') or locale == 'fr_QC') +] + def random_faker(): - global acceptable_fakers - return random.sample(acceptable_fakers, 1)[0] + """Helper to get a random faker acceptable for User names""" + return random.sample(_acceptable_fakers, 1)[0] + class UserFactory(factory.django.DjangoModelFactory): class Meta: diff --git a/ietf/utils/draft.py b/ietf/utils/draft.py index 50add5abba..53d3d40811 100755 --- a/ietf/utils/draft.py +++ b/ietf/utils/draft.py @@ -65,7 +65,6 @@ opt_debug = False opt_timestamp = False opt_trace = False -opt_authorinfo = False opt_attributes = False # Don't forget to add the option variable to the globals list in _main below @@ -1332,8 +1331,6 @@ def getmeta(fn): # ---------------------------------------------------------------------- def _output(docname, fields, outfile=sys.stdout): - global company_domain - if opt_attributes: def outputkey(key, fields): field = fields[key] @@ -1373,9 +1370,8 @@ def _printmeta(fn, outfile=sys.stdout): # Main # ---------------------------------------------------------------------- -company_domain = {} # type: Dict[str, str] def _main(outfile=sys.stdout): - global opt_debug, opt_timestamp, opt_trace, opt_authorinfo, files, company_domain, opt_attributes + global opt_debug, opt_timestamp, opt_trace, files, opt_attributes # set default values, if any # ---------------------------------------------------------------------- # Option processing @@ -1423,8 +1419,6 @@ def _main(outfile=sys.stdout): elif opt in ["-T", "--trace"]: # Emit trace information while working opt_trace = True - company_domain = {} - if not files: files = [ "-" ] diff --git a/ietf/utils/patch.py b/ietf/utils/patch.py index 9de2270ebb..fd3e4a165d 100644 --- a/ietf/utils/patch.py +++ b/ietf/utils/patch.py @@ -87,8 +87,7 @@ def createLock(self): debugmode = False def setdebug(): - global debugmode, streamhandler - + global debugmode debugmode = True loglevel = logging.DEBUG logformat = "%(levelname)8s %(message)s" diff --git a/ietf/utils/test_runner.py b/ietf/utils/test_runner.py index 6b6f282c49..a77377ffb5 100644 --- a/ietf/utils/test_runner.py +++ b/ietf/utils/test_runner.py @@ -53,9 +53,10 @@ import factory.random import urllib3 import warnings -from urllib.parse import urlencode from fnmatch import fnmatch +from typing import Callable, Optional +from urllib.parse import urlencode from coverage.report import Reporter from coverage.results import Numbers @@ -90,11 +91,11 @@ from mypy_boto3_s3.service_resource import Bucket -loaded_templates = set() -visited_urls = set() -test_database_name = None -old_destroy = None -old_create = None +loaded_templates: set[str] = set() +visited_urls: set[str] = set() +test_database_name: Optional[str] = None +old_destroy: Optional[Callable] = None +old_create: Optional[Callable] = None template_coverage_collection = None code_coverage_collection = None @@ -230,10 +231,12 @@ def load_and_run_fixtures(verbosity): fn() def safe_create_test_db(self, verbosity, *args, **kwargs): - global test_database_name, old_create + if old_create is None: + raise RuntimeError("old_create has not been set, cannot proceed") keepdb = kwargs.get('keepdb', False) if not keepdb: print(" Creating test database...") + global test_database_name test_database_name = old_create(self, 0, *args, **kwargs) if settings.GLOBAL_TEST_FIXTURES: @@ -243,8 +246,9 @@ def safe_create_test_db(self, verbosity, *args, **kwargs): return test_database_name def safe_destroy_test_db(*args, **kwargs): + if old_destroy is None: + raise RuntimeError("old_destroy has not been set, cannot proceed") sys.stdout.write('\n') - global test_database_name, old_destroy keepdb = kwargs.get('keepdb', False) if not keepdb: if settings.DATABASES["default"]["NAME"] != test_database_name: @@ -358,15 +362,13 @@ class TemplateCoverageLoader(BaseLoader): is_usable = True def get_template(self, template_name, skip=None): - global template_coverage_collection, loaded_templates - if template_coverage_collection == True: + if template_coverage_collection: loaded_templates.add(str(template_name)) raise TemplateDoesNotExist(template_name) def record_urls_middleware(get_response): def record_urls(request): - global url_coverage_collection, visited_urls - if url_coverage_collection == True: + if url_coverage_collection: visited_urls.add(request.path) return get_response(request) return record_urls @@ -532,7 +534,6 @@ def report_test_result(self, test): ( test, test_coverage*100, latest_coverage_version, master_coverage*100, )) def template_coverage_test(self): - global loaded_templates if self.runner.check_coverage: apps = [ app.split('.')[-1] for app in self.runner.test_apps ] all = get_template_paths(apps) @@ -760,7 +761,6 @@ def __init__( self.show_logging = show_logging self.rerun = rerun self.test_labels = None - global validation_settings validation_settings["validate_html"] = self if validate_html else None validation_settings["validate_html_harder"] = self if validate_html and validate_html_harder else None validation_settings["show_logging"] = show_logging @@ -783,9 +783,6 @@ def __init__( self.blobstoremanager = TestBlobstoreManager() if manage_blobstore else None def setup_test_environment(self, **kwargs): - global template_coverage_collection - global url_coverage_collection - ietf.utils.mail.test_mode = True ietf.utils.mail.SMTP_ADDR['ip4'] = '127.0.0.1' ietf.utils.mail.SMTP_ADDR['port'] = 2025 From 48bee8a58381e5caa6cf5d6b56bd58eff614a918 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Tue, 1 Apr 2025 17:19:30 -0300 Subject: [PATCH 40/44] chore: update libreoffice install for bookworm (#8768) --- dev/build/Dockerfile | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/dev/build/Dockerfile b/dev/build/Dockerfile index cc55c92881..c25298d652 100644 --- a/dev/build/Dockerfile +++ b/dev/build/Dockerfile @@ -10,9 +10,8 @@ RUN groupadd -g 1000 datatracker && \ RUN apt-get purge -y imagemagick imagemagick-6-common # Install libreoffice (needed via PPT2PDF_COMMAND) -RUN echo "deb http://deb.debian.org/debian bullseye-backports main" > /etc/apt/sources.list.d/bullseye-backports.list && \ - apt-get update && \ - apt-get -qyt bullseye-backports install libreoffice-nogui +RUN apt-get update && \ + apt-get -qy install libreoffice-nogui COPY . . COPY ./dev/build/start.sh ./start.sh From 15ef59133a0e8b62158836b697d9413dc395228f Mon Sep 17 00:00:00 2001 From: Matthew Holloway Date: Thu, 3 Apr 2025 05:10:22 +1300 Subject: [PATCH 41/44] chore: libreoffice flags for resilience (#8769) --- ietf/settings.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/ietf/settings.py b/ietf/settings.py index 1fe5f48229..33a2f976d9 100644 --- a/ietf/settings.py +++ b/ietf/settings.py @@ -1103,7 +1103,12 @@ def skip_unreadable_post(record): DATATRACKER_MAX_UPLOAD_SIZE = 40960000 PPT2PDF_COMMAND = [ - "/usr/bin/soffice", "--headless", "--convert-to", "pdf:writer_globaldocument_pdf_Export", "--outdir" + "/usr/bin/soffice", + "--headless", # no GUI + "--safe-mode", # use a new libreoffice profile every time (ensures no reliance on accumulated profile config) + "--norestore", # don't attempt to restore files after a previous crash (ensures that one crash won't block future conversions until UI intervention) + "--convert-to", "pdf:writer_globaldocument_pdf_Export", + "--outdir" ] STATS_REGISTRATION_ATTENDEES_JSON_URL = 'https://registration.ietf.org/{number}/attendees/' From afa79dc55b16463d56b531bb1b6b06fbbeb12ace Mon Sep 17 00:00:00 2001 From: Jim Fenton Date: Wed, 2 Apr 2025 09:10:38 -0700 Subject: [PATCH 42/44] feat: Warn if uploading minutes before session end (#8700) * Warn if uploading minutes before sessionn end * Remove extraneous btn-primary for session future Co-authored-by: Robert Sparks * fix: guard against unscheduled sessions * fix: test addition of warning * fix: another guard against unscheduled sessions * feat: test future warning on session details pannel --------- Co-authored-by: Robert Sparks --- ietf/meeting/tests_views.py | 286 ++++++++++-------- ietf/meeting/views.py | 6 + .../meeting/session_details_panel.html | 2 +- .../meeting/upload_session_minutes.html | 5 + 4 files changed, 168 insertions(+), 131 deletions(-) diff --git a/ietf/meeting/tests_views.py b/ietf/meeting/tests_views.py index 0f91986f77..a93a26b981 100644 --- a/ietf/meeting/tests_views.py +++ b/ietf/meeting/tests_views.py @@ -6541,110 +6541,130 @@ def test_upload_bluesheets_interim_chair_access(self): self.assertIn('Upload', str(q("title"))) - def test_upload_minutes_agenda(self): - for doctype in ('minutes','agenda'): - session = SessionFactory(meeting__type_id='ietf') - if doctype == 'minutes': - url = urlreverse('ietf.meeting.views.upload_session_minutes',kwargs={'num':session.meeting.number,'session_id':session.id}) - else: - url = urlreverse('ietf.meeting.views.upload_session_agenda',kwargs={'num':session.meeting.number,'session_id':session.id}) - self.client.logout() - login_testing_unauthorized(self,"secretary",url) - r = self.client.get(url) - self.assertEqual(r.status_code, 200) - q = PyQuery(r.content) - self.assertIn('Upload', str(q("Title"))) - self.assertFalse(session.presentations.exists()) - self.assertFalse(q('form input[type="checkbox"]')) - - session2 = SessionFactory(meeting=session.meeting,group=session.group) - r = self.client.get(url) - self.assertEqual(r.status_code, 200) - q = PyQuery(r.content) - self.assertTrue(q('form input[type="checkbox"]')) - - # test not submitting a file - r = self.client.post(url, dict(submission_method="upload")) - self.assertEqual(r.status_code, 200) - q = PyQuery(r.content) - self.assertTrue(q("form .is-invalid")) - - test_file = BytesIO(b'this is some text for a test') - test_file.name = "not_really.json" - r = self.client.post(url,dict(submission_method="upload",file=test_file)) - self.assertEqual(r.status_code, 200) - q = PyQuery(r.content) - self.assertTrue(q('form .is-invalid')) - - test_file = BytesIO(b'this is some text for a test'*1510000) - test_file.name = "not_really.pdf" - r = self.client.post(url,dict(submission_method="upload",file=test_file)) - self.assertEqual(r.status_code, 200) - q = PyQuery(r.content) - self.assertTrue(q('form .is-invalid')) - - test_file = BytesIO(b'') - test_file.name = "not_really.html" - r = self.client.post(url,dict(submission_method="upload",file=test_file)) - self.assertEqual(r.status_code, 200) - q = PyQuery(r.content) - self.assertTrue(q('form .is-invalid')) - - # Test html sanitization - test_file = BytesIO(b'Title

Title

Some text
') - test_file.name = "some.html" - r = self.client.post(url,dict(submission_method="upload",file=test_file)) - self.assertEqual(r.status_code, 302) - doc = session.presentations.filter(document__type_id=doctype).first().document - self.assertEqual(doc.rev,'00') - text = doc.text() - self.assertIn('Some text', text) - self.assertNotIn('
', text) - text = retrieve_str(doctype, f"{doc.name}-{doc.rev}.html") - self.assertIn('Some text', text) - self.assertNotIn('
', text) - - # txt upload - test_bytes = b'This is some text for a test, with the word\nvirtual at the beginning of a line.' - test_file = BytesIO(test_bytes) - test_file.name = "some.txt" - r = self.client.post(url,dict(submission_method="upload",file=test_file,apply_to_all=False)) - self.assertEqual(r.status_code, 302) - doc = session.presentations.filter(document__type_id=doctype).first().document - self.assertEqual(doc.rev,'01') - self.assertFalse(session2.presentations.filter(document__type_id=doctype)) - retrieved_bytes = retrieve_bytes(doctype, f"{doc.name}-{doc.rev}.txt") - self.assertEqual(retrieved_bytes, test_bytes) - + def test_label_future_sessions(self): + self.client.login(username='secretary', password='secretary+password') + for future in (True, False): + mtg_date = date_today()+datetime.timedelta(days=180 if future else -180) + session = SessionFactory(meeting__type_id='ietf', meeting__date=mtg_date) + # Verify future warning shows on the session details panel + url = urlreverse('ietf.meeting.views.session_details', kwargs={'num':session.meeting.number, 'acronym': session.group.acronym}) r = self.client.get(url) - self.assertEqual(r.status_code, 200) - q = PyQuery(r.content) - self.assertIn('Revise', str(q("Title"))) - test_bytes = b'this is some different text for a test' - test_file = BytesIO(test_bytes) - test_file.name = "also_some.txt" - r = self.client.post(url,dict(submission_method="upload",file=test_file,apply_to_all=True)) - self.assertEqual(r.status_code, 302) - doc = Document.objects.get(pk=doc.pk) - self.assertEqual(doc.rev,'02') - self.assertTrue(session2.presentations.filter(document__type_id=doctype)) - retrieved_bytes = retrieve_bytes(doctype, f"{doc.name}-{doc.rev}.txt") - self.assertEqual(retrieved_bytes, test_bytes) - - # Test bad encoding - test_file = BytesIO('

Title

Some\x93text
'.encode('latin1')) - test_file.name = "some.html" - r = self.client.post(url,dict(submission_method="upload",file=test_file)) - self.assertContains(r, 'Could not identify the file encoding') - doc = Document.objects.get(pk=doc.pk) - self.assertEqual(doc.rev,'02') + self.assertTrue(r.status_code==200) + if future: + self.assertContains(r, "Session has not ended yet") + else: + self.assertNotContains(r, "Session has not ended yet") - # Verify that we don't have dead links - url = urlreverse('ietf.meeting.views.session_details', kwargs={'num':session.meeting.number, 'acronym': session.group.acronym}) - top = '/meeting/%s/' % session.meeting.number - self.requests_mock.get(f'{session.notes_url()}/download', text='markdown notes') - self.requests_mock.get(f'{session.notes_url()}/info', text=json.dumps({'title': 'title', 'updatetime': '2021-12-01T17:11:00z'})) - self.crawl_materials(url=url, top=top) + def test_upload_minutes_agenda(self): + for doctype in ('minutes','agenda'): + for future in (True, False): + mtg_date = date_today()+datetime.timedelta(days=180 if future else -180) + session = SessionFactory(meeting__type_id='ietf', meeting__date=mtg_date) + if doctype == 'minutes': + url = urlreverse('ietf.meeting.views.upload_session_minutes',kwargs={'num':session.meeting.number,'session_id':session.id}) + else: + url = urlreverse('ietf.meeting.views.upload_session_agenda',kwargs={'num':session.meeting.number,'session_id':session.id}) + self.client.logout() + login_testing_unauthorized(self,"secretary",url) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + q = PyQuery(r.content) + self.assertIn('Upload', str(q("Title"))) + self.assertFalse(session.presentations.exists()) + self.assertFalse(q('form input[type="checkbox"]')) + if future and doctype == "minutes": + self.assertContains(r, "Session has not ended yet") + else: + self.assertNotContains(r, "Session has not ended yet") + + session2 = SessionFactory(meeting=session.meeting,group=session.group) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + q = PyQuery(r.content) + self.assertTrue(q('form input[type="checkbox"]')) + + # test not submitting a file + r = self.client.post(url, dict(submission_method="upload")) + self.assertEqual(r.status_code, 200) + q = PyQuery(r.content) + self.assertTrue(q("form .is-invalid")) + + test_file = BytesIO(b'this is some text for a test') + test_file.name = "not_really.json" + r = self.client.post(url,dict(submission_method="upload",file=test_file)) + self.assertEqual(r.status_code, 200) + q = PyQuery(r.content) + self.assertTrue(q('form .is-invalid')) + + test_file = BytesIO(b'this is some text for a test'*1510000) + test_file.name = "not_really.pdf" + r = self.client.post(url,dict(submission_method="upload",file=test_file)) + self.assertEqual(r.status_code, 200) + q = PyQuery(r.content) + self.assertTrue(q('form .is-invalid')) + + test_file = BytesIO(b'') + test_file.name = "not_really.html" + r = self.client.post(url,dict(submission_method="upload",file=test_file)) + self.assertEqual(r.status_code, 200) + q = PyQuery(r.content) + self.assertTrue(q('form .is-invalid')) + + # Test html sanitization + test_file = BytesIO(b'Title

Title

Some text
') + test_file.name = "some.html" + r = self.client.post(url,dict(submission_method="upload",file=test_file)) + self.assertEqual(r.status_code, 302) + doc = session.presentations.filter(document__type_id=doctype).first().document + self.assertEqual(doc.rev,'00') + text = doc.text() + self.assertIn('Some text', text) + self.assertNotIn('
', text) + text = retrieve_str(doctype, f"{doc.name}-{doc.rev}.html") + self.assertIn('Some text', text) + self.assertNotIn('
', text) + + # txt upload + test_bytes = b'This is some text for a test, with the word\nvirtual at the beginning of a line.' + test_file = BytesIO(test_bytes) + test_file.name = "some.txt" + r = self.client.post(url,dict(submission_method="upload",file=test_file,apply_to_all=False)) + self.assertEqual(r.status_code, 302) + doc = session.presentations.filter(document__type_id=doctype).first().document + self.assertEqual(doc.rev,'01') + self.assertFalse(session2.presentations.filter(document__type_id=doctype)) + retrieved_bytes = retrieve_bytes(doctype, f"{doc.name}-{doc.rev}.txt") + self.assertEqual(retrieved_bytes, test_bytes) + + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + q = PyQuery(r.content) + self.assertIn('Revise', str(q("Title"))) + test_bytes = b'this is some different text for a test' + test_file = BytesIO(test_bytes) + test_file.name = "also_some.txt" + r = self.client.post(url,dict(submission_method="upload",file=test_file,apply_to_all=True)) + self.assertEqual(r.status_code, 302) + doc = Document.objects.get(pk=doc.pk) + self.assertEqual(doc.rev,'02') + self.assertTrue(session2.presentations.filter(document__type_id=doctype)) + retrieved_bytes = retrieve_bytes(doctype, f"{doc.name}-{doc.rev}.txt") + self.assertEqual(retrieved_bytes, test_bytes) + + # Test bad encoding + test_file = BytesIO('

Title

Some\x93text
'.encode('latin1')) + test_file.name = "some.html" + r = self.client.post(url,dict(submission_method="upload",file=test_file)) + self.assertContains(r, 'Could not identify the file encoding') + doc = Document.objects.get(pk=doc.pk) + self.assertEqual(doc.rev,'02') + + # Verify that we don't have dead links + url = urlreverse('ietf.meeting.views.session_details', kwargs={'num':session.meeting.number, 'acronym': session.group.acronym}) + top = '/meeting/%s/' % session.meeting.number + self.requests_mock.get(f'{session.notes_url()}/download', text='markdown notes') + self.requests_mock.get(f'{session.notes_url()}/info', text=json.dumps({'title': 'title', 'updatetime': '2021-12-01T17:11:00z'})) + self.crawl_materials(url=url, top=top) def test_upload_minutes_agenda_unscheduled(self): for doctype in ('minutes','agenda'): @@ -6661,6 +6681,7 @@ def test_upload_minutes_agenda_unscheduled(self): self.assertIn('Upload', str(q("Title"))) self.assertFalse(session.presentations.exists()) self.assertFalse(q('form input[type="checkbox"]')) + self.assertNotContains(r, "Session has not ended yet") test_file = BytesIO(b'this is some text for a test') test_file.name = "not_really.txt" @@ -6669,35 +6690,40 @@ def test_upload_minutes_agenda_unscheduled(self): @override_settings(MEETING_MATERIALS_SERVE_LOCALLY=True) def test_upload_minutes_agenda_interim(self): - session=SessionFactory(meeting__type_id='interim') for doctype in ('minutes','agenda'): - if doctype=='minutes': - url = urlreverse('ietf.meeting.views.upload_session_minutes',kwargs={'num':session.meeting.number,'session_id':session.id}) - else: - url = urlreverse('ietf.meeting.views.upload_session_agenda',kwargs={'num':session.meeting.number,'session_id':session.id}) - self.client.logout() - login_testing_unauthorized(self,"secretary",url) - r = self.client.get(url) - self.assertEqual(r.status_code, 200) - q = PyQuery(r.content) - self.assertIn('Upload', str(q("title"))) - self.assertFalse(session.presentations.filter(document__type_id=doctype)) - test_bytes = b'this is some text for a test' - test_file = BytesIO(test_bytes) - test_file.name = "not_really.txt" - r = self.client.post(url,dict(submission_method="upload",file=test_file)) - self.assertEqual(r.status_code, 302) - doc = session.presentations.filter(document__type_id=doctype).first().document - self.assertEqual(doc.rev,'00') - retrieved_bytes = retrieve_bytes(doctype, f"{doc.name}-{doc.rev}.txt") - self.assertEqual(retrieved_bytes, test_bytes) - - # Verify that we don't have dead links - url = urlreverse('ietf.meeting.views.session_details', kwargs={'num':session.meeting.number, 'acronym': session.group.acronym}) - top = '/meeting/%s/' % session.meeting.number - self.requests_mock.get(f'{session.notes_url()}/download', text='markdown notes') - self.requests_mock.get(f'{session.notes_url()}/info', text=json.dumps({'title': 'title', 'updatetime': '2021-12-01T17:11:00z'})) - self.crawl_materials(url=url, top=top) + for future in (True, False): + session=SessionFactory(meeting__type_id='interim', meeting__date = date_today()+datetime.timedelta(days=180 if future else -180)) + if doctype=='minutes': + url = urlreverse('ietf.meeting.views.upload_session_minutes',kwargs={'num':session.meeting.number,'session_id':session.id}) + else: + url = urlreverse('ietf.meeting.views.upload_session_agenda',kwargs={'num':session.meeting.number,'session_id':session.id}) + self.client.logout() + login_testing_unauthorized(self,"secretary",url) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + q = PyQuery(r.content) + self.assertIn('Upload', str(q("title"))) + self.assertFalse(session.presentations.filter(document__type_id=doctype)) + if future and doctype == "minutes": + self.assertContains(r, "Session has not ended yet") + else: + self.assertNotContains(r, "Session has not ended yet") + test_bytes = b'this is some text for a test' + test_file = BytesIO(test_bytes) + test_file.name = "not_really.txt" + r = self.client.post(url,dict(submission_method="upload",file=test_file)) + self.assertEqual(r.status_code, 302) + doc = session.presentations.filter(document__type_id=doctype).first().document + self.assertEqual(doc.rev,'00') + retrieved_bytes = retrieve_bytes(doctype, f"{doc.name}-{doc.rev}.txt") + self.assertEqual(retrieved_bytes, test_bytes) + + # Verify that we don't have dead links + url = urlreverse('ietf.meeting.views.session_details', kwargs={'num':session.meeting.number, 'acronym': session.group.acronym}) + top = '/meeting/%s/' % session.meeting.number + self.requests_mock.get(f'{session.notes_url()}/download', text='markdown notes') + self.requests_mock.get(f'{session.notes_url()}/info', text=json.dumps({'title': 'title', 'updatetime': '2021-12-01T17:11:00z'})) + self.crawl_materials(url=url, top=top) @override_settings(MEETING_MATERIALS_SERVE_LOCALLY=True) def test_upload_narrativeminutes(self): diff --git a/ietf/meeting/views.py b/ietf/meeting/views.py index 6a73059d92..722bf829e1 100644 --- a/ietf/meeting/views.py +++ b/ietf/meeting/views.py @@ -2522,6 +2522,8 @@ def session_details(request, num, acronym): else: pending_suggestions = SlideSubmission.objects.none() + tsa = session.official_timeslotassignment() + future = tsa is not None and timezone.now() < tsa.timeslot.end_time() return render(request, "meeting/session_details.html", { 'scheduled_sessions':scheduled_sessions , 'unscheduled_sessions':unscheduled_sessions , @@ -2532,6 +2534,7 @@ def session_details(request, num, acronym): 'can_manage_materials' : can_manage, 'can_view_request': can_view_request, 'thisweek': datetime_today()-datetime.timedelta(days=7), + 'future': future, }) class SessionDraftsForm(forms.Form): @@ -2823,11 +2826,14 @@ def upload_session_minutes(request, session_id, num): else: form = UploadMinutesForm(show_apply_to_all_checkbox) + tsa = session.official_timeslotassignment() + future = tsa is not None and timezone.now() < tsa.timeslot.end_time() return render(request, "meeting/upload_session_minutes.html", {'session': session, 'session_number': session_number, 'minutes_sp' : minutes_sp, 'form': form, + 'future': future, }) @role_required("Secretariat") diff --git a/ietf/templates/meeting/session_details_panel.html b/ietf/templates/meeting/session_details_panel.html index 87d9e3d672..7c52ac0b4a 100644 --- a/ietf/templates/meeting/session_details_panel.html +++ b/ietf/templates/meeting/session_details_panel.html @@ -109,7 +109,7 @@

Agenda, Minutes, and Bluesheets

{% endif %} {% if not session.type_counter.minutes %} - Import minutes from notes.ietf.org + Import minutes from notes.ietf.org Upload minutes diff --git a/ietf/templates/meeting/upload_session_minutes.html b/ietf/templates/meeting/upload_session_minutes.html index 30eadda277..324440681f 100644 --- a/ietf/templates/meeting/upload_session_minutes.html +++ b/ietf/templates/meeting/upload_session_minutes.html @@ -26,6 +26,11 @@

{% if session_number %}

Session {{ session_number }} : {{ session.official_timeslotassignment.timeslot.time|timezone:session.meeting.time_zone|date:"D M-d-Y Hi" }}

{% endif %} + {% if future %} +

+ Caution: Session has not ended yet +

+ {% endif %}
{% csrf_token %} {% bootstrap_form form %} From c70e67dc917dea9fe0347575811d5ab02ef91ce8 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Wed, 2 Apr 2025 13:11:08 -0300 Subject: [PATCH 43/44] chore: unpin importlib-metadata (#8762) --- requirements.txt | 1 - 1 file changed, 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 8bd906c220..cd93f448e2 100644 --- a/requirements.txt +++ b/requirements.txt @@ -42,7 +42,6 @@ gunicorn>=20.1.0 hashids>=1.3.1 html2text>=2020.1.16 # Used only to clean comment field of secr/sreq html5lib>=1.1 # Only used in tests -importlib-metadata<8.5.0 # indirect req of Markdown/inflect; https://github.com/ietf-tools/datatracker/issues/7924 inflect>= 6.0.2 jsonfield>=3.1.0 # for SubmissionCheck. This is https://github.com/bradjasper/django-jsonfield/. jsonschema[format]>=4.2.1 From b95bbe670fb1f3506ef156034fd5107c1dca3365 Mon Sep 17 00:00:00 2001 From: rjsparks <10996692+rjsparks@users.noreply.github.com> Date: Wed, 2 Apr 2025 16:24:20 +0000 Subject: [PATCH 44/44] ci: update base image target version to 20250402T1611 --- dev/build/Dockerfile | 2 +- dev/build/TARGET_BASE | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/dev/build/Dockerfile b/dev/build/Dockerfile index c25298d652..cd0a70667c 100644 --- a/dev/build/Dockerfile +++ b/dev/build/Dockerfile @@ -1,4 +1,4 @@ -FROM ghcr.io/ietf-tools/datatracker-app-base:20250327T1859 +FROM ghcr.io/ietf-tools/datatracker-app-base:20250402T1611 LABEL maintainer="IETF Tools Team " ENV DEBIAN_FRONTEND=noninteractive diff --git a/dev/build/TARGET_BASE b/dev/build/TARGET_BASE index 50e8bfd839..1195fc9a0b 100644 --- a/dev/build/TARGET_BASE +++ b/dev/build/TARGET_BASE @@ -1 +1 @@ -20250327T1859 +20250402T1611