{% endblock %}
{% block js %}
+
+
{% endblock %}
\ No newline at end of file
diff --git a/ietf/templates/doc/ballot/send_ballot_comment.html b/ietf/templates/doc/ballot/send_ballot_comment.html
deleted file mode 100644
index 1c5f521859..0000000000
--- a/ietf/templates/doc/ballot/send_ballot_comment.html
+++ /dev/null
@@ -1,44 +0,0 @@
-{% extends "base.html" %}
-{# Copyright The IETF Trust 2015, All Rights Reserved #}
-{% load origin %}
-{% load django_bootstrap5 %}
-{% load ietf_filters %}
-{% block title %}Send ballot position for {{ balloter }} on {{ doc }}{% endblock %}
-{% block content %}
- {% origin %}
-
- Send ballot position for {{ balloter }}
-
- {{ doc }}
-
-
-{% endblock %}
From 8f2feef631acbd8b181a845140647c2c83a9299f Mon Sep 17 00:00:00 2001
From: NGPixel
Date: Tue, 14 Oct 2025 18:57:50 -0400
Subject: [PATCH 002/187] ci: update build workflow to deploy to dev
---
.github/workflows/build.yml | 71 ++---
dev/k8s-get-deploy-name/.editorconfig | 7 +
dev/k8s-get-deploy-name/.gitignore | 1 +
dev/k8s-get-deploy-name/.npmrc | 3 +
dev/k8s-get-deploy-name/README.md | 16 ++
dev/k8s-get-deploy-name/cli.js | 22 ++
dev/k8s-get-deploy-name/package-lock.json | 303 ++++++++++++++++++++++
dev/k8s-get-deploy-name/package.json | 8 +
8 files changed, 396 insertions(+), 35 deletions(-)
create mode 100644 dev/k8s-get-deploy-name/.editorconfig
create mode 100644 dev/k8s-get-deploy-name/.gitignore
create mode 100644 dev/k8s-get-deploy-name/.npmrc
create mode 100644 dev/k8s-get-deploy-name/README.md
create mode 100644 dev/k8s-get-deploy-name/cli.js
create mode 100644 dev/k8s-get-deploy-name/package-lock.json
create mode 100644 dev/k8s-get-deploy-name/package.json
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 8567446cae..15eaba48d1 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -16,13 +16,13 @@ on:
- Skip
- Staging Only
- Staging + Prod
- sandbox:
- description: 'Deploy to Sandbox'
+ dev:
+ description: 'Deploy to Dev'
default: true
required: true
type: boolean
- sandboxNoDbRefresh:
- description: 'Sandbox Disable Daily DB Refresh'
+ devNoDbRefresh:
+ description: 'Dev Disable Daily DB Refresh'
default: false
required: true
type: boolean
@@ -392,44 +392,45 @@ jobs:
value: "Failed"
# -----------------------------------------------------------------
- # SANDBOX
+ # DEV
# -----------------------------------------------------------------
- sandbox:
- name: Deploy to Sandbox
- if: ${{ !failure() && !cancelled() && github.event.inputs.sandbox == 'true' }}
+ dev:
+ name: Deploy to Dev
+ if: ${{ !failure() && !cancelled() && github.event.inputs.dev == 'true' }}
needs: [prepare, release]
- runs-on: [self-hosted, dev-server]
+ runs-on: ubuntu-latest
environment:
- name: sandbox
+ name: dev
env:
PKG_VERSION: ${{needs.prepare.outputs.pkg_version}}
steps:
- - uses: actions/checkout@v4
-
- - name: Download a Release Artifact
- uses: actions/download-artifact@v4.3.0
- with:
- name: release-${{ env.PKG_VERSION }}
-
- - name: Deploy to containers
- env:
- DEBIAN_FRONTEND: noninteractive
- run: |
- echo "Reset production flags in settings.py..."
- sed -i -r -e 's/^DEBUG *= *.*$/DEBUG = True/' -e "s/^SERVER_MODE *= *.*\$/SERVER_MODE = 'development'/" ietf/settings.py
- echo "Install Deploy to Container CLI dependencies..."
- cd dev/deploy-to-container
- npm ci
- cd ../..
- echo "Start Deploy..."
- node ./dev/deploy-to-container/cli.js --branch ${{ github.ref_name }} --domain dev.ietf.org --appversion ${{ env.PKG_VERSION }} --commit ${{ github.sha }} --ghrunid ${{ github.run_id }} --nodbrefresh ${{ github.event.inputs.sandboxNoDbRefresh }}
-
- - name: Cleanup old docker resources
- env:
- DEBIAN_FRONTEND: noninteractive
- run: |
- docker image prune -a -f
+ - uses: actions/checkout@v4
+ with:
+ ref: main
+
+ - name: Get Deploy Name
+ env:
+ DEBIAN_FRONTEND: noninteractive
+ run: |
+ echo "Install Get Deploy Name CLI dependencies..."
+ cd dev/k8s-get-deploy-name
+ npm ci
+ echo "Get Deploy Name..."
+ echo "DEPLOY_NAMESPACE=$(node cli.js --branch ${{ github.ref_name }})" >> "$GITHUB_ENV"
+
+ - name: Deploy to dev
+ uses: the-actions-org/workflow-dispatch@v4
+ with:
+ workflow: deploy-dev.yml
+ repo: ietf-tools/infra-k8s
+ ref: main
+ token: ${{ secrets.GH_INFRA_K8S_TOKEN }}
+ inputs: '{ "app":"datatracker", "appVersion":"${{ env.PKG_VERSION }}", "remoteRef":"${{ github.sha }}", "namespace":"${{ env.DEPLOY_NAMESPACE }}" }'
+ wait-for-completion: true
+ wait-for-completion-timeout: 30m
+ wait-for-completion-interval: 30s
+ display-workflow-run-url: false
# -----------------------------------------------------------------
# STAGING
diff --git a/dev/k8s-get-deploy-name/.editorconfig b/dev/k8s-get-deploy-name/.editorconfig
new file mode 100644
index 0000000000..fec5c66519
--- /dev/null
+++ b/dev/k8s-get-deploy-name/.editorconfig
@@ -0,0 +1,7 @@
+[*]
+indent_size = 2
+indent_style = space
+charset = utf-8
+trim_trailing_whitespace = false
+end_of_line = lf
+insert_final_newline = true
diff --git a/dev/k8s-get-deploy-name/.gitignore b/dev/k8s-get-deploy-name/.gitignore
new file mode 100644
index 0000000000..07e6e472cc
--- /dev/null
+++ b/dev/k8s-get-deploy-name/.gitignore
@@ -0,0 +1 @@
+/node_modules
diff --git a/dev/k8s-get-deploy-name/.npmrc b/dev/k8s-get-deploy-name/.npmrc
new file mode 100644
index 0000000000..580a68c499
--- /dev/null
+++ b/dev/k8s-get-deploy-name/.npmrc
@@ -0,0 +1,3 @@
+audit = false
+fund = false
+save-exact = true
diff --git a/dev/k8s-get-deploy-name/README.md b/dev/k8s-get-deploy-name/README.md
new file mode 100644
index 0000000000..a6605e4dd2
--- /dev/null
+++ b/dev/k8s-get-deploy-name/README.md
@@ -0,0 +1,16 @@
+# Datatracker Get Deploy Name
+
+This tool process and slugify a git branch into an appropriate subdomain name.
+
+## Usage
+
+1. From the `dev/k8s-get-deploy-name` directory, install the dependencies:
+```sh
+npm install
+```
+2. Run the command: (replacing the `branch` argument)
+```sh
+node /cli.js --branch feat/fooBar-123
+```
+
+The subdomain name will be output. It can then be used in a workflow as a namespace name and subdomain value.
diff --git a/dev/k8s-get-deploy-name/cli.js b/dev/k8s-get-deploy-name/cli.js
new file mode 100644
index 0000000000..b6c3b5119e
--- /dev/null
+++ b/dev/k8s-get-deploy-name/cli.js
@@ -0,0 +1,22 @@
+#!/usr/bin/env node
+
+import yargs from 'yargs/yargs'
+import { hideBin } from 'yargs/helpers'
+import slugify from 'slugify'
+
+const argv = yargs(hideBin(process.argv)).argv
+
+let branch = argv.branch
+if (!branch) {
+ throw new Error('Missing --branch argument!')
+}
+if (branch.indexOf('/') >= 0) {
+ branch = branch.split('/').slice(1).join('-')
+}
+branch = slugify(branch, { lower: true, strict: true })
+if (branch.length < 1) {
+ throw new Error('Branch name is empty!')
+}
+process.stdout.write(`dt-${branch}`)
+
+process.exit(0)
diff --git a/dev/k8s-get-deploy-name/package-lock.json b/dev/k8s-get-deploy-name/package-lock.json
new file mode 100644
index 0000000000..e492a4cd38
--- /dev/null
+++ b/dev/k8s-get-deploy-name/package-lock.json
@@ -0,0 +1,303 @@
+{
+ "name": "k8s-get-deploy-name",
+ "lockfileVersion": 2,
+ "requires": true,
+ "packages": {
+ "": {
+ "name": "k8s-get-deploy-name",
+ "dependencies": {
+ "slugify": "1.6.6",
+ "yargs": "17.7.2"
+ }
+ },
+ "node_modules/ansi-regex": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
+ "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/ansi-styles": {
+ "version": "4.3.0",
+ "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
+ "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
+ "dependencies": {
+ "color-convert": "^2.0.1"
+ },
+ "engines": {
+ "node": ">=8"
+ },
+ "funding": {
+ "url": "https://github.com/chalk/ansi-styles?sponsor=1"
+ }
+ },
+ "node_modules/cliui": {
+ "version": "8.0.1",
+ "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz",
+ "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==",
+ "dependencies": {
+ "string-width": "^4.2.0",
+ "strip-ansi": "^6.0.1",
+ "wrap-ansi": "^7.0.0"
+ },
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/color-convert": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
+ "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
+ "dependencies": {
+ "color-name": "~1.1.4"
+ },
+ "engines": {
+ "node": ">=7.0.0"
+ }
+ },
+ "node_modules/color-name": {
+ "version": "1.1.4",
+ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
+ "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="
+ },
+ "node_modules/emoji-regex": {
+ "version": "8.0.0",
+ "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
+ "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="
+ },
+ "node_modules/escalade": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz",
+ "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==",
+ "engines": {
+ "node": ">=6"
+ }
+ },
+ "node_modules/get-caller-file": {
+ "version": "2.0.5",
+ "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz",
+ "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==",
+ "engines": {
+ "node": "6.* || 8.* || >= 10.*"
+ }
+ },
+ "node_modules/is-fullwidth-code-point": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
+ "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==",
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/require-directory": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz",
+ "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==",
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/slugify": {
+ "version": "1.6.6",
+ "resolved": "https://registry.npmjs.org/slugify/-/slugify-1.6.6.tgz",
+ "integrity": "sha512-h+z7HKHYXj6wJU+AnS/+IH8Uh9fdcX1Lrhg1/VMdf9PwoBQXFcXiAdsy2tSK0P6gKwJLXp02r90ahUCqHk9rrw==",
+ "engines": {
+ "node": ">=8.0.0"
+ }
+ },
+ "node_modules/string-width": {
+ "version": "4.2.3",
+ "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
+ "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
+ "dependencies": {
+ "emoji-regex": "^8.0.0",
+ "is-fullwidth-code-point": "^3.0.0",
+ "strip-ansi": "^6.0.1"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/strip-ansi": {
+ "version": "6.0.1",
+ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
+ "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
+ "dependencies": {
+ "ansi-regex": "^5.0.1"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/wrap-ansi": {
+ "version": "7.0.0",
+ "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
+ "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
+ "dependencies": {
+ "ansi-styles": "^4.0.0",
+ "string-width": "^4.1.0",
+ "strip-ansi": "^6.0.0"
+ },
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "url": "https://github.com/chalk/wrap-ansi?sponsor=1"
+ }
+ },
+ "node_modules/y18n": {
+ "version": "5.0.8",
+ "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz",
+ "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==",
+ "engines": {
+ "node": ">=10"
+ }
+ },
+ "node_modules/yargs": {
+ "version": "17.7.2",
+ "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz",
+ "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==",
+ "dependencies": {
+ "cliui": "^8.0.1",
+ "escalade": "^3.1.1",
+ "get-caller-file": "^2.0.5",
+ "require-directory": "^2.1.1",
+ "string-width": "^4.2.3",
+ "y18n": "^5.0.5",
+ "yargs-parser": "^21.1.1"
+ },
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/yargs-parser": {
+ "version": "21.1.1",
+ "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz",
+ "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==",
+ "engines": {
+ "node": ">=12"
+ }
+ }
+ },
+ "dependencies": {
+ "ansi-regex": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
+ "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="
+ },
+ "ansi-styles": {
+ "version": "4.3.0",
+ "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
+ "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
+ "requires": {
+ "color-convert": "^2.0.1"
+ }
+ },
+ "cliui": {
+ "version": "8.0.1",
+ "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz",
+ "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==",
+ "requires": {
+ "string-width": "^4.2.0",
+ "strip-ansi": "^6.0.1",
+ "wrap-ansi": "^7.0.0"
+ }
+ },
+ "color-convert": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
+ "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
+ "requires": {
+ "color-name": "~1.1.4"
+ }
+ },
+ "color-name": {
+ "version": "1.1.4",
+ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
+ "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="
+ },
+ "emoji-regex": {
+ "version": "8.0.0",
+ "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
+ "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="
+ },
+ "escalade": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz",
+ "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw=="
+ },
+ "get-caller-file": {
+ "version": "2.0.5",
+ "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz",
+ "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg=="
+ },
+ "is-fullwidth-code-point": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
+ "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg=="
+ },
+ "require-directory": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz",
+ "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q=="
+ },
+ "slugify": {
+ "version": "1.6.6",
+ "resolved": "https://registry.npmjs.org/slugify/-/slugify-1.6.6.tgz",
+ "integrity": "sha512-h+z7HKHYXj6wJU+AnS/+IH8Uh9fdcX1Lrhg1/VMdf9PwoBQXFcXiAdsy2tSK0P6gKwJLXp02r90ahUCqHk9rrw=="
+ },
+ "string-width": {
+ "version": "4.2.3",
+ "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
+ "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
+ "requires": {
+ "emoji-regex": "^8.0.0",
+ "is-fullwidth-code-point": "^3.0.0",
+ "strip-ansi": "^6.0.1"
+ }
+ },
+ "strip-ansi": {
+ "version": "6.0.1",
+ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
+ "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
+ "requires": {
+ "ansi-regex": "^5.0.1"
+ }
+ },
+ "wrap-ansi": {
+ "version": "7.0.0",
+ "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
+ "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
+ "requires": {
+ "ansi-styles": "^4.0.0",
+ "string-width": "^4.1.0",
+ "strip-ansi": "^6.0.0"
+ }
+ },
+ "y18n": {
+ "version": "5.0.8",
+ "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz",
+ "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA=="
+ },
+ "yargs": {
+ "version": "17.7.2",
+ "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz",
+ "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==",
+ "requires": {
+ "cliui": "^8.0.1",
+ "escalade": "^3.1.1",
+ "get-caller-file": "^2.0.5",
+ "require-directory": "^2.1.1",
+ "string-width": "^4.2.3",
+ "y18n": "^5.0.5",
+ "yargs-parser": "^21.1.1"
+ }
+ },
+ "yargs-parser": {
+ "version": "21.1.1",
+ "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz",
+ "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw=="
+ }
+ }
+}
diff --git a/dev/k8s-get-deploy-name/package.json b/dev/k8s-get-deploy-name/package.json
new file mode 100644
index 0000000000..849f5d9b8d
--- /dev/null
+++ b/dev/k8s-get-deploy-name/package.json
@@ -0,0 +1,8 @@
+{
+ "name": "k8s-get-deploy-name",
+ "type": "module",
+ "dependencies": {
+ "slugify": "1.6.6",
+ "yargs": "17.7.2"
+ }
+}
From 5a7be260dd6dfd9c484bc7c50ef991642fa8ad8e Mon Sep 17 00:00:00 2001
From: NGPixel
Date: Wed, 15 Oct 2025 03:07:07 -0400
Subject: [PATCH 003/187] chore: add disableDailyDbRefresh flag to build
workflow
---
.github/workflows/build.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 15eaba48d1..4c70456a73 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -426,7 +426,7 @@ jobs:
repo: ietf-tools/infra-k8s
ref: main
token: ${{ secrets.GH_INFRA_K8S_TOKEN }}
- inputs: '{ "app":"datatracker", "appVersion":"${{ env.PKG_VERSION }}", "remoteRef":"${{ github.sha }}", "namespace":"${{ env.DEPLOY_NAMESPACE }}" }'
+ inputs: '{ "app":"datatracker", "appVersion":"${{ env.PKG_VERSION }}", "remoteRef":"${{ github.sha }}", "namespace":"${{ env.DEPLOY_NAMESPACE }}", "disableDailyDbRefresh":${{ inputs.devNoDbRefresh }} }'
wait-for-completion: true
wait-for-completion-timeout: 30m
wait-for-completion-interval: 30s
From 93c1124c21267556625df760c68f35f6d4ae8139 Mon Sep 17 00:00:00 2001
From: Robert Sparks
Date: Wed, 15 Oct 2025 13:06:45 -0500
Subject: [PATCH 004/187] ci: add ruff to devcontainer (#9731)
---
.devcontainer/devcontainer.json | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json
index 6b0fd79bb3..bf28550084 100644
--- a/.devcontainer/devcontainer.json
+++ b/.devcontainer/devcontainer.json
@@ -23,7 +23,6 @@
"dbaeumer.vscode-eslint",
"eamodio.gitlens",
"editorconfig.editorconfig",
- // Newer volar >=3.0.0 causes crashes in devcontainers
"vue.volar@2.2.10",
"mrmlnc.vscode-duplicate",
"ms-azuretools.vscode-docker",
@@ -35,7 +34,8 @@
"redhat.vscode-yaml",
"spmeesseman.vscode-taskexplorer",
"visualstudioexptteam.vscodeintellicode",
- "ms-python.pylint"
+ "ms-python.pylint",
+ "charliermarsh.ruff"
],
"settings": {
"terminal.integrated.defaultProfile.linux": "zsh",
From d5660ab8e953fec25dbb20025aba73b2e58f0609 Mon Sep 17 00:00:00 2001
From: Jennifer Richards
Date: Wed, 15 Oct 2025 18:30:18 -0300
Subject: [PATCH 005/187] fix: unbreak EmptyAwareJSONField (#9732)
* fix: specify default form_class correctly
* style: ruff ruff
---
ietf/utils/db.py | 63 ++++++++++++++++++++++++++++++------------------
1 file changed, 40 insertions(+), 23 deletions(-)
diff --git a/ietf/utils/db.py b/ietf/utils/db.py
index 865c9b999f..49c89da13a 100644
--- a/ietf/utils/db.py
+++ b/ietf/utils/db.py
@@ -1,33 +1,44 @@
-# Copyright The IETF Trust 2021, All Rights Reserved
-# -*- coding: utf-8 -*-
-
-# Taken from/inspired by
-# https://stackoverflow.com/questions/55147169/django-admin-jsonfield-default-empty-dict-wont-save-in-admin
-#
-# JSONField should recognize {}, (), and [] as valid, non-empty JSON
-# values. However, the base Field class excludes them
+# Copyright The IETF Trust 2021-2025, All Rights Reserved
import jsonfield
from django.db import models
-from ietf.utils.fields import IETFJSONField as FormIETFJSONField, EmptyAwareJSONField as FormEmptyAwareJSONField
+from ietf.utils.fields import (
+ IETFJSONField as FormIETFJSONField,
+ EmptyAwareJSONField as FormEmptyAwareJSONField,
+)
class EmptyAwareJSONField(models.JSONField):
- form_class = FormEmptyAwareJSONField
+ """JSONField that allows empty JSON values when model specifies empty=False
+
+ Taken from/inspired by
+ https://stackoverflow.com/questions/55147169/django-admin-jsonfield-default-empty-dict-wont-save-in-admin
+
+ JSONField should recognize {}, (), and [] as valid, non-empty JSON values.
- def __init__(self, *args, empty_values=FormEmptyAwareJSONField.empty_values, accepted_empty_values=None, **kwargs):
+ If customizing the formfield, the field must accept the `empty_values` argument.
+ """
+
+ def __init__(
+ self,
+ *args,
+ empty_values=FormEmptyAwareJSONField.empty_values,
+ accepted_empty_values=None,
+ **kwargs,
+ ):
if accepted_empty_values is None:
accepted_empty_values = []
- self.empty_values = [x
- for x in empty_values
- if x not in accepted_empty_values]
+ self.empty_values = [x for x in empty_values if x not in accepted_empty_values]
super().__init__(*args, **kwargs)
def formfield(self, **kwargs):
- if 'form_class' not in kwargs or issubclass(kwargs['form_class'], FormEmptyAwareJSONField):
- kwargs.setdefault('empty_values', self.empty_values)
- return super().formfield(**{**kwargs})
+ defaults = {
+ "form_class": FormEmptyAwareJSONField,
+ "empty_values": self.empty_values,
+ }
+ defaults.update(kwargs)
+ return super().formfield(**defaults)
class IETFJSONField(jsonfield.JSONField): # pragma: no cover
@@ -36,15 +47,21 @@ class IETFJSONField(jsonfield.JSONField): # pragma: no cover
# Remove this class when migrations are squashed and it is no longer referenced
form_class = FormIETFJSONField
- def __init__(self, *args, empty_values=FormIETFJSONField.empty_values, accepted_empty_values=None, **kwargs):
+ def __init__(
+ self,
+ *args,
+ empty_values=FormIETFJSONField.empty_values,
+ accepted_empty_values=None,
+ **kwargs,
+ ):
if accepted_empty_values is None:
accepted_empty_values = []
- self.empty_values = [x
- for x in empty_values
- if x not in accepted_empty_values]
+ self.empty_values = [x for x in empty_values if x not in accepted_empty_values]
super().__init__(*args, **kwargs)
def formfield(self, **kwargs):
- if 'form_class' not in kwargs or issubclass(kwargs['form_class'], FormIETFJSONField):
- kwargs.setdefault('empty_values', self.empty_values)
+ if "form_class" not in kwargs or issubclass(
+ kwargs["form_class"], FormIETFJSONField
+ ):
+ kwargs.setdefault("empty_values", self.empty_values)
return super().formfield(**{**kwargs})
From 1d2d304fa5c99db6cd2a944328246ce900c73b3c Mon Sep 17 00:00:00 2001
From: Jennifer Richards
Date: Thu, 16 Oct 2025 12:39:04 -0300
Subject: [PATCH 006/187] fix: improve proceedings caching/performance (#9733)
* refactor: speed up get_attendance()
* fix: avoid cache invalidation by later draft rev
* fix: guard against empty value
* feat: freeze cache key for final proceedings
---
ietf/meeting/models.py | 28 +++++++++++++++++++++-------
ietf/meeting/utils.py | 39 +++++++++++++++++++++++++++++++++++----
2 files changed, 56 insertions(+), 11 deletions(-)
diff --git a/ietf/meeting/models.py b/ietf/meeting/models.py
index f3df23e916..9e44df33b7 100644
--- a/ietf/meeting/models.py
+++ b/ietf/meeting/models.py
@@ -250,25 +250,39 @@ def get_attendance(self):
# MeetingRegistration.attended started conflating badge-pickup and session attendance before IETF 114.
# We've separated session attendance off to ietf.meeting.Attended, but need to report attendance at older
# meetings correctly.
-
+ #
+ # Looking up by registration and attendance records separately and joining in
+ # python is far faster than combining the Q objects in the query (~100x).
+ # Further optimization may be possible, but the queries are tricky...
attended_per_meeting_registration = (
Q(registration__meeting=self) & (
Q(registration__attended=True) |
Q(registration__checkedin=True)
)
)
+ attendees_by_reg = set(
+ Person.objects.filter(attended_per_meeting_registration).values_list(
+ "pk", flat=True
+ )
+ )
+
attended_per_meeting_attended = (
Q(attended__session__meeting=self)
# Note that we are not filtering to plenary, wg, or rg sessions
# as we do for nomcom eligibility - if picking up a badge (see above)
# is good enough, just attending e.g. a training session is also good enough
)
- attended = Person.objects.filter(
- attended_per_meeting_registration | attended_per_meeting_attended
- ).distinct()
-
- onsite = set(attended.filter(registration__meeting=self, registration__tickets__attendance_type__slug='onsite'))
- remote = set(attended.filter(registration__meeting=self, registration__tickets__attendance_type__slug='remote'))
+ attendees_by_att = set(
+ Person.objects.filter(attended_per_meeting_attended).values_list(
+ "pk", flat=True
+ )
+ )
+
+ attendees = Person.objects.filter(
+ pk__in=attendees_by_att | attendees_by_reg
+ )
+ onsite = set(attendees.filter(registration__meeting=self, registration__tickets__attendance_type__slug='onsite'))
+ remote = set(attendees.filter(registration__meeting=self, registration__tickets__attendance_type__slug='remote'))
remote.difference_update(onsite)
return Attendance(
diff --git a/ietf/meeting/utils.py b/ietf/meeting/utils.py
index f6925269aa..feadb0c7fd 100644
--- a/ietf/meeting/utils.py
+++ b/ietf/meeting/utils.py
@@ -1027,10 +1027,41 @@ def generate_proceedings_content(meeting, force_refresh=False):
:force_refresh: true to force regeneration and cache refresh
"""
cache = caches["default"]
- cache_version = Document.objects.filter(session__meeting__number=meeting.number).aggregate(Max('time'))["time__max"]
- # Include proceedings_final in the bare_key so we'll always reflect that accurately, even at the cost of
- # a recomputation in the view
- bare_key = f"proceedings.{meeting.number}.{cache_version}.final={meeting.proceedings_final}"
+ key_components = [
+ "proceedings",
+ str(meeting.number),
+ ]
+ if meeting.proceedings_final:
+ # Freeze the cache key once proceedings are finalized. Further changes will
+ # not be picked up until the cache expires or is refreshed by the
+ # proceedings_content_refresh_task()
+ key_components.append("final")
+ else:
+ # Build a cache key that changes when materials are modified. For all but drafts,
+ # use the last modification time of the document. Exclude drafts from this because
+ # revisions long after the meeting ends will otherwise show up as changes and
+ # incorrectly invalidate the cache. Instead, include an ordered list of the
+ # drafts linked to the meeting so adding or removing drafts will trigger a
+ # recalculation. The list is long but that doesn't matter because we hash it into
+ # a fixed-length key.
+ meeting_docs = Document.objects.filter(session__meeting__number=meeting.number)
+ last_materials_update = (
+ meeting_docs.exclude(type_id="draft")
+ .filter(session__meeting__number=meeting.number)
+ .aggregate(Max("time"))["time__max"]
+ )
+ draft_names = (
+ meeting_docs
+ .filter(type_id="draft")
+ .order_by("name")
+ .values_list("name", flat=True)
+ )
+ key_components += [
+ last_materials_update.isoformat() if last_materials_update else "-",
+ ",".join(draft_names),
+ ]
+
+ bare_key = ".".join(key_components)
cache_key = sha384(bare_key.encode("utf8")).hexdigest()
if not force_refresh:
cached_content = cache.get(cache_key, None)
From 2cfbaf90c3504a53135d61f9bf976bab3b388eb9 Mon Sep 17 00:00:00 2001
From: Jennifer Richards
Date: Thu, 16 Oct 2025 14:28:13 -0300
Subject: [PATCH 007/187] ci: drop caching from build images step (#9738)
---
.github/workflows/build.yml | 2 --
1 file changed, 2 deletions(-)
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 4c70456a73..7eac7b1c64 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -278,8 +278,6 @@ jobs:
tags: |
ghcr.io/ietf-tools/datatracker:${{ env.PKG_VERSION }}
${{ env.FEATURE_LATEST_TAG && format('ghcr.io/ietf-tools/datatracker:{0}-latest', env.FEATURE_LATEST_TAG) || null }}
- cache-from: type=gha
- cache-to: type=gha,mode=max
- name: Update CHANGELOG
id: changelog
From b0ec8c4b27d6225c6ffa6cac27ce554ec4a49a7c Mon Sep 17 00:00:00 2001
From: Jennifer Richards
Date: Fri, 17 Oct 2025 13:08:11 -0300
Subject: [PATCH 008/187] chore: remove unused variables (#9742)
---
ietf/meeting/models.py | 2 --
1 file changed, 2 deletions(-)
diff --git a/ietf/meeting/models.py b/ietf/meeting/models.py
index 9e44df33b7..7d9e318aab 100644
--- a/ietf/meeting/models.py
+++ b/ietf/meeting/models.py
@@ -956,8 +956,6 @@ class Meta:
def __str__(self):
return u"%s -> %s-%s" % (self.session, self.document.name, self.rev)
-constraint_cache_uses = 0
-constraint_cache_initials = 0
class SessionQuerySet(models.QuerySet):
def with_current_status(self):
From 62f720ceaf951fba91b5a818473d798663dfbf1d Mon Sep 17 00:00:00 2001
From: Jennifer Richards
Date: Tue, 21 Oct 2025 12:31:39 -0300
Subject: [PATCH 009/187] ci: imagePullPolicy for migration container (#9764)
---
k8s/datatracker.yaml | 1 +
1 file changed, 1 insertion(+)
diff --git a/k8s/datatracker.yaml b/k8s/datatracker.yaml
index 3d9e86a29d..50a2c69687 100644
--- a/k8s/datatracker.yaml
+++ b/k8s/datatracker.yaml
@@ -115,6 +115,7 @@ spec:
initContainers:
- name: migration
image: "ghcr.io/ietf-tools/datatracker:$APP_IMAGE_TAG"
+ imagePullPolicy: Always
env:
- name: "CONTAINER_ROLE"
value: "migrations"
From a3a3d215ca4067e722ead94e886175adb589e235 Mon Sep 17 00:00:00 2001
From: Robert Sparks
Date: Thu, 23 Oct 2025 12:14:48 -0500
Subject: [PATCH 010/187] fix: don't limit from_contact for incoming liaison
statements (#9773)
---
ietf/liaisons/forms.py | 20 ++++++++++++--------
1 file changed, 12 insertions(+), 8 deletions(-)
diff --git a/ietf/liaisons/forms.py b/ietf/liaisons/forms.py
index ef5b29535e..1747e55571 100644
--- a/ietf/liaisons/forms.py
+++ b/ietf/liaisons/forms.py
@@ -495,14 +495,18 @@ def set_from_fields(self):
self.fields['from_groups'].initial = qs
# Note that the IAB chair currently doesn't get to work with incoming liaison statements
- if not (
- has_role(self.user, "Secretariat")
- or has_role(self.user, "Liaison Coordinator")
- ):
- self.fields["from_contact"].initial = (
- self.person.role_set.filter(group=qs[0]).first().email.formatted_email()
- )
- self.fields["from_contact"].widget.attrs["disabled"] = True
+
+ # Removing this block at the request of the IAB - as a workaround until the new liaison tool is
+ # create, anyone with access to the form can set any from_contact value
+ #
+ # if not (
+ # has_role(self.user, "Secretariat")
+ # or has_role(self.user, "Liaison Coordinator")
+ # ):
+ # self.fields["from_contact"].initial = (
+ # self.person.role_set.filter(group=qs[0]).first().email.formatted_email()
+ # )
+ # self.fields["from_contact"].widget.attrs["disabled"] = True
def set_to_fields(self):
'''Set to_groups and to_contacts options and initial value based on user
From 1243957f06da485e5cf4c04a8479d551817d4d78 Mon Sep 17 00:00:00 2001
From: Jennifer Richards
Date: Thu, 23 Oct 2025 14:15:22 -0300
Subject: [PATCH 011/187] feat: unversioned proceedings cache (#9779)
* feat: separate, unversioned proceedings cache
* refactor: don't double-hash the cache key
---
ietf/meeting/utils.py | 8 ++++----
ietf/settings.py | 22 ++++++++++++++++++++++
2 files changed, 26 insertions(+), 4 deletions(-)
diff --git a/ietf/meeting/utils.py b/ietf/meeting/utils.py
index feadb0c7fd..afcf7656f2 100644
--- a/ietf/meeting/utils.py
+++ b/ietf/meeting/utils.py
@@ -5,7 +5,6 @@
import jsonschema
import os
import requests
-from hashlib import sha384
import pytz
import subprocess
@@ -1026,7 +1025,7 @@ def generate_proceedings_content(meeting, force_refresh=False):
:meeting: meeting whose proceedings should be rendered
:force_refresh: true to force regeneration and cache refresh
"""
- cache = caches["default"]
+ cache = caches["proceedings"]
key_components = [
"proceedings",
str(meeting.number),
@@ -1061,8 +1060,9 @@ def generate_proceedings_content(meeting, force_refresh=False):
",".join(draft_names),
]
- bare_key = ".".join(key_components)
- cache_key = sha384(bare_key.encode("utf8")).hexdigest()
+ # Key is potentially long, but the "proceedings" cache hashes it to a fixed
+ # length. If that changes, hash it separately here first.
+ cache_key = ".".join(key_components)
if not force_refresh:
cached_content = cache.get(cache_key, None)
if cached_content is not None:
diff --git a/ietf/settings.py b/ietf/settings.py
index 9a213c1a73..5e576430ed 100644
--- a/ietf/settings.py
+++ b/ietf/settings.py
@@ -1374,6 +1374,17 @@ def skip_unreadable_post(record):
"LOCATION": f"{MEMCACHED_HOST}:{MEMCACHED_PORT}",
"VERSION": __version__,
"KEY_PREFIX": "ietf:dt",
+ # Key function is default except with sha384-encoded key
+ "KEY_FUNCTION": lambda key, key_prefix, version: (
+ f"{key_prefix}:{version}:{sha384(str(key).encode('utf8')).hexdigest()}"
+ ),
+ },
+ "proceedings": {
+ "BACKEND": "ietf.utils.cache.LenientMemcacheCache",
+ "LOCATION": f"{MEMCACHED_HOST}:{MEMCACHED_PORT}",
+ # No release-specific VERSION setting.
+ "KEY_PREFIX": "ietf:dt:proceedings",
+ # Key function is default except with sha384-encoded key
"KEY_FUNCTION": lambda key, key_prefix, version: (
f"{key_prefix}:{version}:{sha384(str(key).encode('utf8')).hexdigest()}"
),
@@ -1421,6 +1432,17 @@ def skip_unreadable_post(record):
"VERSION": __version__,
"KEY_PREFIX": "ietf:dt",
},
+ "proceedings": {
+ "BACKEND": "django.core.cache.backends.dummy.DummyCache",
+ # "BACKEND": "ietf.utils.cache.LenientMemcacheCache",
+ # "LOCATION": "127.0.0.1:11211",
+ # No release-specific VERSION setting.
+ "KEY_PREFIX": "ietf:dt:proceedings",
+ # Key function is default except with sha384-encoded key
+ "KEY_FUNCTION": lambda key, key_prefix, version: (
+ f"{key_prefix}:{version}:{sha384(str(key).encode('utf8')).hexdigest()}"
+ ),
+ },
"sessions": {
"BACKEND": "django.core.cache.backends.locmem.LocMemCache",
},
From 6412d1e24a9c499c39245bba58c2c31ec8110c0e Mon Sep 17 00:00:00 2001
From: Jennifer Richards
Date: Thu, 23 Oct 2025 17:41:06 -0300
Subject: [PATCH 012/187] feat: blobstore-driven meeting materials (#9780)
* feat: meeting materials blob resolver API (#9700)
* refactor: exclude_deleted() for StoredObject queryset
* chore: comment
* feat: meeting materials blob resolver API
* feat: materials blob retrieval API (#9728)
* feat: materials blob retrieval API (WIP)
* refactor: alphabetize ARTIFACT_STORAGE_NAMES
* chore: limit buckets served
* refactor: any-meeting option in _get_materials_doc()
* feat: create missing blobs on retrieval
* feat: render HTML from markdown via API (#9729)
* chore: add comment
* fix: allow bluesheets to be retrieved
Normally not retrieved through /meeting/materials, but they're
close enough in purpose that we might as well make them available.
* fix: only update StoredObject.modified if changed
* fix: preserve mtime when creating blob
* refactor: better exception name
* feat: render .md.html from .md blob
* fix: explicit STATIC_IETF_ORG value in template
Django's context_processors are not applied to render_string calls as we use them here, so settings are not available.
* fix: typo
* fix: decode utf-8 properly
* feat: use filesystem to render .md.html
* fix: copy/paste error in api_resolve_materials_name
* refactor: get actual rev in _get_materials_doc (#9741)
* fix: return filename, not full path
* feat: precompute blob lookups for meeting materials (#9746)
* feat: ResolvedMaterial model + migration
* feat: method to populate ResolvedMaterial (WIP)
* refactor: don't delete ResolvedMaterials
Instead of deleting the ResolvedMaterials for a meeting, which might
lose updates made during processing, update existing rows with any
changes and warn if anything changed during the process.
* fix: fix _get_materials_doc()
Did not handle the possibility of multiple DocHistory objects with the
same rev.
* refactor: factor out material lookup helper
* feat: resolve blobs via blobdb/fs for cache
* chore: add resource
* feat: admin for ResolvedMaterial
* feat: cache-driven resolve materials API
* fix: add all ResolvedMaterials; var names
* fix: handle null case
* feat: resolve_meeting_materials_task
* feat: update resolver cache on material upload (#9759)
* feat: robustness + date range for resolve materials task (#9760)
* fix: limit types added to ResolvedMaterial
* feat: resolve meeting materials in order by date
* feat: add meetings_until param
* fix: log&continue if resolving fails on a meeting
* feat: log error message on parse errors
* refactor: move ResolvedMaterial to blobdb app (#9762)
* refactor: move ResolvedMaterial to blobdb app
* fix: undo accidental removal
* chore: fix lint (#9767)
* fix: don't use DocHistory to find materials (#9771)
* fix: don't use DocHistory to validate revs
The DocHistory records are incomplete and, in particular, -00 revs are
often missing.
* Revert "refactor: get actual rev in _get_materials_doc (#9741)"
This reverts commit 7fd15801
* chore: remove the on-demand resolver api
* chore: fix lint
* feat: populate materials buckets (#9777)
* refactor: drop .txt from filename_with_rev()
* feat: utilities to populate materials blobs
* feat: store materials for a full meeting as blobs
Plus a bunch of fixup from working with real data.
(Based on meetings 71, 83, and 118, picked arbitrarily)
* chore: update migration
* feat: task to store materials in blobdb
* refactor: reimplement api_retrieve_materials_blob
* fix: update resolving task, fix bugs
* Revert "refactor: drop .txt from filename_with_rev()"
This reverts commit a849d0f92d4df54296a7062b6c3a05fb0977be93.
* chore: fix lint
---------
Co-authored-by: Robert Sparks
---
ietf/api/urls.py | 3 +
ietf/blobdb/admin.py | 11 +-
.../migrations/0002_resolvedmaterial.py | 48 +++
ietf/blobdb/models.py | 20 +
ietf/doc/models.py | 9 +
ietf/doc/storage.py | 10 +-
ietf/doc/storage_utils.py | 12 +-
ietf/doc/views_material.py | 4 +
ietf/meeting/resources.py | 14 +-
ietf/meeting/tasks.py | 131 ++++++-
ietf/meeting/utils.py | 355 +++++++++++++++++-
ietf/meeting/views.py | 177 ++++++++-
ietf/settings.py | 44 ++-
ietf/templates/minimal.html | 4 +-
14 files changed, 798 insertions(+), 44 deletions(-)
create mode 100644 ietf/blobdb/migrations/0002_resolvedmaterial.py
diff --git a/ietf/api/urls.py b/ietf/api/urls.py
index 6f2efb3c1e..04575b34cb 100644
--- a/ietf/api/urls.py
+++ b/ietf/api/urls.py
@@ -49,6 +49,9 @@
url(r'^group/role-holder-addresses/$', api_views.role_holder_addresses),
# Let IESG members set positions programmatically
url(r'^iesg/position', views_ballot.api_set_position),
+ # Find the blob to store for a given materials document path
+ url(r'^meeting/(?:(?P(?:interim-)?[a-z0-9-]+)/)?materials/%(document)s(?P\.[A-Za-z0-9]+)?/resolve-cached/$' % settings.URL_REGEXPS, meeting_views.api_resolve_materials_name_cached),
+ url(r'^meeting/blob/(?P[a-z0-9-]+)/(?P[a-z][a-z0-9.-]+)$', meeting_views.api_retrieve_materials_blob),
# Let Meetecho set session video URLs
url(r'^meeting/session/video/url$', meeting_views.api_set_session_video_url),
# Let Meetecho tell us the name of its recordings
diff --git a/ietf/blobdb/admin.py b/ietf/blobdb/admin.py
index f4cd002e07..3e1a2a311f 100644
--- a/ietf/blobdb/admin.py
+++ b/ietf/blobdb/admin.py
@@ -3,7 +3,7 @@
from django.db.models.functions import Length
from rangefilter.filters import DateRangeQuickSelectListFilterBuilder
-from .models import Blob
+from .models import Blob, ResolvedMaterial
@admin.register(Blob)
@@ -29,3 +29,12 @@ def get_queryset(self, request):
def object_size(self, instance):
"""Get the size of the object"""
return instance.object_size # annotation added in get_queryset()
+
+
+@admin.register(ResolvedMaterial)
+class ResolvedMaterialAdmin(admin.ModelAdmin):
+ model = ResolvedMaterial
+ list_display = ["name", "meeting_number", "bucket", "blob"]
+ list_filter = ["meeting_number", "bucket"]
+ search_fields = ["name", "blob"]
+ ordering = ["name"]
diff --git a/ietf/blobdb/migrations/0002_resolvedmaterial.py b/ietf/blobdb/migrations/0002_resolvedmaterial.py
new file mode 100644
index 0000000000..e0ab405b11
--- /dev/null
+++ b/ietf/blobdb/migrations/0002_resolvedmaterial.py
@@ -0,0 +1,48 @@
+# Copyright The IETF Trust 2025, All Rights Reserved
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ("blobdb", "0001_initial"),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name="ResolvedMaterial",
+ fields=[
+ (
+ "id",
+ models.BigAutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
+ ("name", models.CharField(help_text="Name to resolve", max_length=300)),
+ (
+ "meeting_number",
+ models.CharField(
+ help_text="Meeting material is related to", max_length=64
+ ),
+ ),
+ (
+ "bucket",
+ models.CharField(help_text="Resolved bucket name", max_length=255),
+ ),
+ (
+ "blob",
+ models.CharField(help_text="Resolved blob name", max_length=300),
+ ),
+ ],
+ ),
+ migrations.AddConstraint(
+ model_name="resolvedmaterial",
+ constraint=models.UniqueConstraint(
+ fields=("name", "meeting_number"), name="unique_name_per_meeting"
+ ),
+ ),
+ ]
diff --git a/ietf/blobdb/models.py b/ietf/blobdb/models.py
index 8f423d9f6c..fa7831f203 100644
--- a/ietf/blobdb/models.py
+++ b/ietf/blobdb/models.py
@@ -96,3 +96,23 @@ def _emit_blob_change_event(self, using=None):
),
using=using,
)
+
+
+class ResolvedMaterial(models.Model):
+ # A Document name can be 255 characters; allow this name to be a bit longer
+ name = models.CharField(max_length=300, help_text="Name to resolve")
+ meeting_number = models.CharField(
+ max_length=64, help_text="Meeting material is related to"
+ )
+ bucket = models.CharField(max_length=255, help_text="Resolved bucket name")
+ blob = models.CharField(max_length=300, help_text="Resolved blob name")
+
+ class Meta:
+ constraints = [
+ models.UniqueConstraint(
+ fields=["name", "meeting_number"], name="unique_name_per_meeting"
+ )
+ ]
+
+ def __str__(self):
+ return f"{self.name}@{self.meeting_number} -> {self.bucket}:{self.blob}"
diff --git a/ietf/doc/models.py b/ietf/doc/models.py
index 25ee734cbe..8bb79b64ed 100644
--- a/ietf/doc/models.py
+++ b/ietf/doc/models.py
@@ -913,6 +913,7 @@ def role_for_doc(self):
roles.append('Action Holder')
return ', '.join(roles)
+# N.B., at least a couple dozen documents exist that do not satisfy this validator
validate_docname = RegexValidator(
r'^[-a-z0-9]+$',
"Provide a valid document name consisting of lowercase letters, numbers and hyphens.",
@@ -1588,9 +1589,17 @@ class BofreqResponsibleDocEvent(DocEvent):
""" Capture the responsible leadership (IAB and IESG members) for a BOF Request """
responsible = models.ManyToManyField('person.Person', blank=True)
+
+class StoredObjectQuerySet(models.QuerySet):
+ def exclude_deleted(self):
+ return self.filter(deleted__isnull=True)
+
+
class StoredObject(models.Model):
"""Hold metadata about objects placed in object storage"""
+ objects = StoredObjectQuerySet.as_manager()
+
store = models.CharField(max_length=256)
name = models.CharField(max_length=1024, null=False, blank=False) # N.B. the 1024 limit on name comes from S3
sha384 = models.CharField(max_length=96)
diff --git a/ietf/doc/storage.py b/ietf/doc/storage.py
index a234ef2d4f..375620ccaf 100644
--- a/ietf/doc/storage.py
+++ b/ietf/doc/storage.py
@@ -32,7 +32,7 @@ def __init__(self, file, name, mtime=None, content_type="", store=None, doc_name
@classmethod
def from_storedobject(cls, file, name, store):
"""Alternate constructor for objects that already exist in the StoredObject table"""
- stored_object = StoredObject.objects.filter(store=store, name=name, deleted__isnull=True).first()
+ stored_object = StoredObject.objects.exclude_deleted().filter(store=store, name=name).first()
if stored_object is None:
raise FileNotFoundError(f"StoredObject for {store}:{name} does not exist or was deleted")
file = cls(file, name, store, doc_name=stored_object.doc_name, doc_rev=stored_object.doc_rev)
@@ -140,7 +140,11 @@ def _save_stored_object(self, name, content) -> StoredObject:
),
),
)
- if not created:
+ if not created and (
+ record.sha384 != content.custom_metadata["sha384"]
+ or record.len != int(content.custom_metadata["len"])
+ or record.deleted is not None
+ ):
record.sha384 = content.custom_metadata["sha384"]
record.len = int(content.custom_metadata["len"])
record.modified = now
@@ -160,7 +164,7 @@ def _delete_stored_object(self, name) -> Optional[StoredObject]:
else:
now = timezone.now()
# Note that existing_record is a queryset that will have one matching object
- existing_record.filter(deleted__isnull=True).update(deleted=now)
+ existing_record.exclude_deleted().update(deleted=now)
return existing_record.first()
def _save(self, name, content):
diff --git a/ietf/doc/storage_utils.py b/ietf/doc/storage_utils.py
index 510c98c4f5..81588c83ec 100644
--- a/ietf/doc/storage_utils.py
+++ b/ietf/doc/storage_utils.py
@@ -12,6 +12,14 @@
from ietf.utils.log import log
+class StorageUtilsError(Exception):
+ pass
+
+
+class AlreadyExistsError(StorageUtilsError):
+ pass
+
+
def _get_storage(kind: str) -> Storage:
if kind in settings.ARTIFACT_STORAGE_NAMES:
return storages[kind]
@@ -70,7 +78,7 @@ def store_file(
# debug.show('f"Asked to store {name} in {kind}: is_new={is_new}, allow_overwrite={allow_overwrite}"')
if not allow_overwrite and not is_new:
debug.show('f"Failed to save {kind}:{name} - name already exists in store"')
- raise RuntimeError(f"Failed to save {kind}:{name} - name already exists in store")
+ raise AlreadyExistsError(f"Failed to save {kind}:{name} - name already exists in store")
new_name = _get_storage(kind).save(
name,
StoredObjectFile(
@@ -85,7 +93,7 @@ def store_file(
if new_name != name:
complaint = f"Error encountered saving '{name}' - results stored in '{new_name}' instead."
debug.show("complaint")
- raise RuntimeError(complaint)
+ raise StorageUtilsError(complaint)
except Exception as err:
log(f"Blobstore Error: Failed to store file {kind}:{name}: {repr(err)}")
if settings.SERVER_MODE == "development":
diff --git a/ietf/doc/views_material.py b/ietf/doc/views_material.py
index 6f8b8a8f12..eefac0ca61 100644
--- a/ietf/doc/views_material.py
+++ b/ietf/doc/views_material.py
@@ -22,6 +22,7 @@
from ietf.doc.utils import add_state_change_event, check_common_doc_name_rules
from ietf.group.models import Group
from ietf.group.utils import can_manage_materials
+from ietf.meeting.utils import resolve_uploaded_material
from ietf.utils import log
from ietf.utils.decorators import ignore_view_kwargs
from ietf.utils.meetecho import MeetechoAPIError, SlidesManager
@@ -179,6 +180,9 @@ def edit_material(request, name=None, acronym=None, action=None, doc_type=None):
"There was an error creating a hardlink at %s pointing to %s: %s"
% (ftp_filepath, filepath, ex)
)
+ else:
+ for meeting in set([s.meeting for s in doc.session_set.all()]):
+ resolve_uploaded_material(meeting=meeting, doc=doc)
if prev_rev != doc.rev:
e = NewRevisionDocEvent(type="new_revision", doc=doc, rev=doc.rev)
diff --git a/ietf/meeting/resources.py b/ietf/meeting/resources.py
index ede2b5b993..88562a88fe 100644
--- a/ietf/meeting/resources.py
+++ b/ietf/meeting/resources.py
@@ -11,11 +11,15 @@
from ietf import api
-from ietf.meeting.models import ( Meeting, ResourceAssociation, Constraint, Room, Schedule, Session,
- TimeSlot, SchedTimeSessAssignment, SessionPresentation, FloorPlan,
- UrlResource, ImportantDate, SlideSubmission, SchedulingEvent,
- BusinessConstraint, ProceedingsMaterial, MeetingHost, Attended,
- Registration, RegistrationTicket)
+from ietf.meeting.models import (Meeting, ResourceAssociation, Constraint, Room,
+ Schedule, Session,
+ TimeSlot, SchedTimeSessAssignment, SessionPresentation,
+ FloorPlan,
+ UrlResource, ImportantDate, SlideSubmission,
+ SchedulingEvent,
+ BusinessConstraint, ProceedingsMaterial, MeetingHost,
+ Attended,
+ Registration, RegistrationTicket)
from ietf.name.resources import MeetingTypeNameResource
class MeetingResource(ModelResource):
diff --git a/ietf/meeting/tasks.py b/ietf/meeting/tasks.py
index 784eb00d87..c361325f9a 100644
--- a/ietf/meeting/tasks.py
+++ b/ietf/meeting/tasks.py
@@ -1,13 +1,20 @@
-# Copyright The IETF Trust 2024, All Rights Reserved
+# Copyright The IETF Trust 2024-2025, All Rights Reserved
#
# Celery task definitions
#
+import datetime
+
from celery import shared_task
+# from django.db.models import QuerySet
from django.utils import timezone
from ietf.utils import log
from .models import Meeting
-from .utils import generate_proceedings_content
+from .utils import (
+ generate_proceedings_content,
+ resolve_materials_for_one_meeting,
+ store_blobs_for_one_meeting,
+)
from .views import generate_agenda_data
from .utils import fetch_attendance_from_meetings
@@ -61,3 +68,123 @@ def fetch_meeting_attendance_task():
meeting_stats['processed']
)
)
+
+
+def _select_meetings(
+ meetings: list[str] | None = None,
+ meetings_since: str | None = None,
+ meetings_until: str | None = None
+): # nyah
+ """Select meetings by number or date range"""
+ # IETF-1 = 1986-01-16
+ EARLIEST_MEETING_DATE = datetime.datetime(1986, 1, 1)
+ meetings_since_dt: datetime.datetime | None = None
+ meetings_until_dt: datetime.datetime | None = None
+
+ if meetings_since == "zero":
+ meetings_since_dt = EARLIEST_MEETING_DATE
+ elif meetings_since is not None:
+ try:
+ meetings_since_dt = datetime.datetime.fromisoformat(meetings_since)
+ except ValueError:
+ log.log(
+ "Failed to parse meetings_since='{meetings_since}' with fromisoformat"
+ )
+ raise
+
+ if meetings_until is not None:
+ try:
+ meetings_until_dt = datetime.datetime.fromisoformat(meetings_until)
+ except ValueError:
+ log.log(
+ "Failed to parse meetings_until='{meetings_until}' with fromisoformat"
+ )
+ raise
+ if meetings_since_dt is None:
+ # if we only got meetings_until, start from the first meeting
+ meetings_since_dt = EARLIEST_MEETING_DATE
+
+ if meetings is None:
+ if meetings_since_dt is None:
+ log.log("No meetings requested, doing nothing.")
+ return Meeting.objects.none()
+ meetings_qs = Meeting.objects.filter(date__gte=meetings_since_dt)
+ if meetings_until_dt is not None:
+ meetings_qs = meetings_qs.filter(date__lte=meetings_until_dt)
+ log.log(
+ "Selecting meetings between "
+ f"{meetings_since_dt} and {meetings_until_dt}"
+ )
+ else:
+ log.log(f"Selecting meetings since {meetings_since_dt}")
+ else:
+ if meetings_since_dt is not None:
+ log.log(
+ "Ignoring meetings_since and meetings_until "
+ "because specific meetings were requested."
+ )
+ meetings_qs = Meeting.objects.filter(number__in=meetings)
+ return meetings_qs
+
+
+@shared_task
+def resolve_meeting_materials_task(
+ *, # only allow kw arguments
+ meetings: list[str] | None=None,
+ meetings_since: str | None=None,
+ meetings_until: str | None=None
+):
+ """Run materials resolver on meetings
+
+ Can request a set of meetings by number by passing a list in the meetings arg, or
+ by range by passing an iso-format timestamps in meetings_since / meetings_until.
+ To select all meetings, set meetings_since="zero" and omit other parameters.
+ """
+ meetings_qs = _select_meetings(meetings, meetings_since, meetings_until)
+ for meeting in meetings_qs.order_by("date"):
+ log.log(
+ f"Resolving materials for {meeting.type_id} "
+ f"meeting {meeting.number} ({meeting.date})..."
+ )
+ mark = timezone.now()
+ try:
+ resolve_materials_for_one_meeting(meeting)
+ except Exception as err:
+ log.log(
+ "Exception raised while resolving materials for "
+ f"meeting {meeting.number}: {err}"
+ )
+ else:
+ log.log(f"Resolved in {(timezone.now() - mark).total_seconds():0.3f} seconds.")
+
+
+@shared_task
+def store_meeting_materials_as_blobs_task(
+ *, # only allow kw arguments
+ meetings: list[str] | None = None,
+ meetings_since: str | None = None,
+ meetings_until: str | None = None
+):
+ """Push meeting materials into the blob store
+
+ Can request a set of meetings by number by passing a list in the meetings arg, or
+ by range by passing an iso-format timestamps in meetings_since / meetings_until.
+ To select all meetings, set meetings_since="zero" and omit other parameters.
+ """
+ meetings_qs = _select_meetings(meetings, meetings_since, meetings_until)
+ for meeting in meetings_qs.order_by("date"):
+ log.log(
+ f"Creating blobs for materials for {meeting.type_id} "
+ f"meeting {meeting.number} ({meeting.date})..."
+ )
+ mark = timezone.now()
+ try:
+ store_blobs_for_one_meeting(meeting)
+ except Exception as err:
+ log.log(
+ "Exception raised while creating blobs for "
+ f"meeting {meeting.number}: {err}"
+ )
+ else:
+ log.log(
+ f"Blobs created in {(timezone.now() - mark).total_seconds():0.3f} seconds.")
diff --git a/ietf/meeting/utils.py b/ietf/meeting/utils.py
index afcf7656f2..bdf3d3d3d3 100644
--- a/ietf/meeting/utils.py
+++ b/ietf/meeting/utils.py
@@ -2,6 +2,9 @@
# -*- coding: utf-8 -*-
import datetime
import itertools
+from contextlib import suppress
+from dataclasses import dataclass
+
import jsonschema
import os
import requests
@@ -26,16 +29,33 @@
import debug # pyflakes:ignore
from ietf.dbtemplate.models import DBTemplate
-from ietf.doc.storage_utils import store_bytes, store_str
-from ietf.meeting.models import (Session, SchedulingEvent, TimeSlot,
- Constraint, SchedTimeSessAssignment, SessionPresentation, Attended,
- Registration, Meeting, RegistrationTicket)
-from ietf.doc.models import Document, State, NewRevisionDocEvent, StateDocEvent
+from ietf.doc.storage_utils import store_bytes, store_str, AlreadyExistsError
+from ietf.meeting.models import (
+ Session,
+ SchedulingEvent,
+ TimeSlot,
+ Constraint,
+ SchedTimeSessAssignment,
+ SessionPresentation,
+ Attended,
+ Registration,
+ Meeting,
+ RegistrationTicket,
+)
+from ietf.blobdb.models import ResolvedMaterial
+from ietf.doc.models import (
+ Document,
+ State,
+ NewRevisionDocEvent,
+ StateDocEvent,
+ StoredObject,
+)
from ietf.doc.models import DocEvent
from ietf.group.models import Group
from ietf.group.utils import can_manage_materials
from ietf.name.models import SessionStatusName, ConstraintName, DocTypeName
from ietf.person.models import Person
+from ietf.utils import markdown
from ietf.utils.html import clean_html
from ietf.utils.log import log
from ietf.utils.timezone import date_today
@@ -220,6 +240,7 @@ def save_bluesheet(request, session, file, encoding='utf-8'):
save_error = handle_upload_file(file, filename, session.meeting, 'bluesheets', request=request, encoding=encoding)
if not save_error:
doc.save_with_history([e])
+ resolve_uploaded_material(meeting=session.meeting, doc=doc)
return save_error
@@ -832,6 +853,330 @@ def write_doc_for_session(session, type_id, filename, contents):
store_str(type_id, filename.name, contents)
return None
+
+@dataclass
+class BlobSpec:
+ bucket: str
+ name: str
+
+
+def resolve_one_material(
+ doc: Document, rev: str | None, ext: str | None
+) -> BlobSpec | None:
+ if doc.type_id is None:
+ log(f"Cannot resolve a doc with no type: {doc.name}")
+ return None
+
+ # Get the Document's base name. It may or may not have an extension.
+ if rev is None:
+ basename = Path(doc.get_base_name())
+ else:
+ basename = Path(f"{doc.name}-{int(rev):02d}")
+
+ # If the document's file exists, the blob is _always_ named with this stem,
+ # even if it's different from the original.
+ blob_stem = Path(f"{doc.name}-{rev or doc.rev}")
+
+ # If we have an extension, either from the URL or the Document's base name, look up
+ # the blob or file or return 404. N.b. the suffix check needs adjustment to handle
+ # a bare "." extension when we reach py3.14.
+ if ext or basename.suffix != "":
+ if ext:
+ blob_name = str(blob_stem.with_suffix(ext))
+ else:
+ blob_name = str(blob_stem.with_suffix(basename.suffix))
+
+ # See if we have a stored object under that name
+ preferred_blob = (
+ StoredObject.objects.exclude_deleted()
+ .filter(store=doc.type_id, name=blob_name)
+ .first()
+ )
+ if preferred_blob is not None:
+ return BlobSpec(
+ bucket=preferred_blob.store,
+ name=preferred_blob.name,
+ )
+ # No stored object, fall back to the file system.
+ filename = Path(doc.get_file_path()) / basename # use basename for file
+ if filename.is_file():
+ return BlobSpec(
+ bucket=doc.type_id,
+ name=str(blob_stem.with_suffix(filename.suffix)),
+ )
+ else:
+ return None
+
+ # No extension has been specified so far, so look one up.
+ matching_stored_objects = (
+ StoredObject.objects.exclude_deleted()
+ .filter(
+ store=doc.type_id,
+ name__startswith=f"{blob_stem}.", # anchor to end with trailing "."
+ )
+ .order_by("name")
+ ) # orders by suffix
+ blob_ext_choices = {
+ Path(stored_obj.name).suffix: stored_obj
+ for stored_obj in matching_stored_objects
+ }
+
+ # Short-circuit to return pdf if present
+ if ".pdf" in blob_ext_choices:
+ pdf_blob = blob_ext_choices[".pdf"]
+ return BlobSpec(
+ bucket=pdf_blob.store,
+ name=str(blob_stem.with_suffix(".pdf")),
+ )
+
+ # Now look for files
+ filename = Path(doc.get_file_path()) / basename
+ file_ext_choices = {
+ # Construct a map from suffix to full filename
+ fn.suffix: fn.name
+ for fn in sorted(filename.parent.glob(filename.stem + ".*"))
+ }
+
+ # Short-circuit to return pdf if we have the file
+ if ".pdf" in file_ext_choices:
+ return BlobSpec(
+ bucket=doc.type_id,
+ name=str(blob_stem.with_suffix(".pdf")),
+ )
+
+ all_exts = set(blob_ext_choices.keys()).union(file_ext_choices.keys())
+ if len(all_exts) > 0:
+ preferred_ext = sorted(all_exts)[0]
+ if preferred_ext in blob_ext_choices:
+ preferred_blob = blob_ext_choices[preferred_ext]
+ return BlobSpec(
+ bucket=preferred_blob.store,
+ name=preferred_blob.name,
+ )
+ else:
+ return BlobSpec(
+ bucket=doc.type_id,
+ name=str(blob_stem.with_suffix(preferred_ext)),
+ )
+
+ return None
+
+
+def resolve_materials_for_one_meeting(meeting: Meeting):
+ start_time = timezone.now()
+ meeting_documents = (
+ Document.objects.filter(
+ type_id__in=settings.MATERIALS_TYPES_SERVED_BY_WORKER
+ ).filter(
+ Q(session__meeting=meeting) | Q(proceedingsmaterial__meeting=meeting)
+ )
+ ).distinct()
+
+ resolved = []
+ for doc in meeting_documents:
+ # request by doc name with no rev
+ blob = resolve_one_material(doc, rev=None, ext=None)
+ if blob is not None:
+ resolved.append(
+ ResolvedMaterial(
+ name=doc.name,
+ meeting_number=meeting.number,
+ bucket=blob.bucket,
+ blob=blob.name,
+ )
+ )
+ # request by doc name + rev
+ blob = resolve_one_material(doc, rev=doc.rev, ext=None)
+ if blob is not None:
+ resolved.append(
+ ResolvedMaterial(
+ name=f"{doc.name}-{doc.rev:02}",
+ meeting_number=meeting.number,
+ bucket=blob.bucket,
+ blob=blob.name,
+ )
+ )
+ # for other revisions, only need request by doc name + rev
+ other_revisions = doc.revisions_by_newrevisionevent()
+ other_revisions.remove(doc.rev)
+ for rev in other_revisions:
+ blob = resolve_one_material(doc, rev=rev, ext=None)
+ if blob is not None:
+ resolved.append(
+ ResolvedMaterial(
+ name=f"{doc.name}-{rev:02}",
+ meeting_number=meeting.number,
+ bucket=blob.bucket,
+ blob=blob.name,
+ )
+ )
+ ResolvedMaterial.objects.bulk_create(
+ resolved,
+ update_conflicts=True,
+ unique_fields=["name", "meeting_number"],
+ update_fields=["bucket", "blob"],
+ )
+ # Warn if any files were updated during the above process
+ last_update = meeting_documents.aggregate(Max("time"))["time__max"]
+ if last_update and last_update > start_time:
+ log(
+ f"Warning: materials for meeting {meeting.number} "
+ "changed during ResolvedMaterial update"
+ )
+
+def resolve_uploaded_material(meeting: Meeting, doc: Document):
+ resolved = []
+ blob = resolve_one_material(doc, rev=None, ext=None)
+ if blob is not None:
+ resolved.append(
+ ResolvedMaterial(
+ name=doc.name,
+ meeting_number=meeting.number,
+ bucket=blob.bucket,
+ blob=blob.name,
+ )
+ )
+ # request by doc name + rev
+ blob = resolve_one_material(doc, rev=doc.rev, ext=None)
+ if blob is not None:
+ resolved.append(
+ ResolvedMaterial(
+ name=f"{doc.name}-{doc.rev:02}",
+ meeting_number=meeting.number,
+ bucket=blob.bucket,
+ blob=blob.name,
+ )
+ )
+ ResolvedMaterial.objects.bulk_create(
+ resolved,
+ update_conflicts=True,
+ unique_fields=["name", "meeting_number"],
+ update_fields=["bucket", "blob"],
+ )
+
+
+def store_blob_for_one_material_file(doc: Document, rev: str, filepath: Path):
+ if not settings.ENABLE_BLOBSTORAGE:
+ raise RuntimeError("Cannot store blobs: ENABLE_BLOBSTORAGE is False")
+
+ bucket = doc.type_id
+ if bucket not in settings.MATERIALS_TYPES_SERVED_BY_WORKER:
+ raise ValueError(f"Bucket {bucket} not found for doc {doc.name}.")
+ blob_stem = f"{doc.name}-{rev}"
+ suffix = filepath.suffix # includes leading "."
+
+ # Store the file
+ try:
+ file_bytes = filepath.read_bytes()
+ except Exception as err:
+ log(f"Failed to read {filepath}: {err}")
+ raise
+ with suppress(AlreadyExistsError):
+ store_bytes(
+ kind=bucket,
+ name= blob_stem + suffix,
+ content=file_bytes,
+ mtime=datetime.datetime.fromtimestamp(
+ filepath.stat().st_mtime,
+ tz=datetime.UTC,
+ ),
+ allow_overwrite=False,
+ doc_name=doc.name,
+ doc_rev=rev,
+ )
+
+ # Special case: pre-render markdown into HTML as .md.html
+ if suffix == ".md":
+ try:
+ markdown_source = file_bytes.decode("utf-8")
+ except UnicodeDecodeError as err:
+ log(f"Unable to decode {filepath} as UTF-8, treating as latin-1: {err}")
+ markdown_source = file_bytes.decode("latin-1")
+ # render the markdown
+ try:
+ html = render_to_string(
+ "minimal.html",
+ {
+ "content": markdown.markdown(markdown_source),
+ "title": blob_stem,
+ "static_ietf_org": settings.STATIC_IETF_ORG,
+ },
+ )
+ except Exception as err:
+ log(f"Failed to render markdown for {filepath}: {err}")
+ else:
+ # Don't overwrite, but don't fail if the blob exists
+ with suppress(AlreadyExistsError):
+ store_str(
+ kind=bucket,
+ name=blob_stem + ".md.html",
+ content=html,
+ allow_overwrite=False,
+ doc_name=doc.name,
+ doc_rev=rev,
+ content_type="text/html;charset=utf-8",
+ )
+
+
+def store_blobs_for_one_material_doc(doc: Document):
+ """Ensure that all files related to a materials Document are in the blob store"""
+ if doc.type_id not in settings.MATERIALS_TYPES_SERVED_BY_WORKER:
+ log(f"This method does not handle docs of type {doc.name}")
+ return
+
+ # Store files for current Document / rev
+ file_path = Path(doc.get_file_path())
+ base_name = Path(doc.get_base_name())
+ # .stem would remove directories, so use .with_suffix("")
+ base_name_stem = str(base_name.with_suffix(""))
+ if base_name_stem.endswith(".") and base_name.suffix == "":
+ # In Python 3.14, a trailing "." is a valid suffix, but in prior versions
+ # it is left as part of the stem. The suffix check ensures that either way,
+ # only a single "." will be removed.
+ base_name_stem = base_name_stem[:-1]
+ # Add any we find without the rev
+ for file_to_store in file_path.glob(base_name_stem + ".*"):
+ if not (file_to_store.is_file()):
+ continue
+ try:
+ store_blob_for_one_material_file(doc, doc.rev, file_to_store)
+ except Exception as err:
+ log(
+ f"Failed to store blob for {doc} rev {doc.rev} "
+ f"from {file_to_store}: {err}"
+ )
+
+ # Get other revisions
+ for rev in doc.revisions_by_newrevisionevent():
+ if rev == doc.rev:
+ continue # already handled this
+
+ # Add some that have the rev
+ for file_to_store in file_path.glob(doc.name + f"-{rev}.*"):
+ if not file_to_store.is_file():
+ continue
+ try:
+ store_blob_for_one_material_file(doc, rev, file_to_store)
+ except Exception as err:
+ log(
+ f"Failed to store blob for {doc} rev {rev} "
+ f"from {file_to_store}: {err}"
+ )
+
+
+def store_blobs_for_one_meeting(meeting: Meeting):
+ meeting_documents = (
+ Document.objects.filter(
+ type_id__in=settings.MATERIALS_TYPES_SERVED_BY_WORKER
+ ).filter(
+ Q(session__meeting=meeting) | Q(proceedingsmaterial__meeting=meeting)
+ )
+ ).distinct()
+
+ for doc in meeting_documents:
+ store_blobs_for_one_material_doc(doc)
+
+
def create_recording(session, url, title=None, user=None):
'''
Creates the Document type=recording, setting external_url and creating
diff --git a/ietf/meeting/views.py b/ietf/meeting/views.py
index fcc9312609..cf6fed596b 100644
--- a/ietf/meeting/views.py
+++ b/ietf/meeting/views.py
@@ -9,6 +9,7 @@
import json
import math
import os
+
import pytz
import re
import tarfile
@@ -27,10 +28,12 @@
from django import forms
from django.core.cache import caches
+from django.core.files.storage import storages
from django.shortcuts import render, redirect, get_object_or_404
from django.http import (HttpResponse, HttpResponseRedirect, HttpResponseForbidden,
HttpResponseNotFound, Http404, HttpResponseBadRequest,
- JsonResponse, HttpResponseGone, HttpResponseNotAllowed)
+ JsonResponse, HttpResponseGone, HttpResponseNotAllowed,
+ FileResponse)
from django.conf import settings
from django.contrib import messages
from django.contrib.auth.decorators import login_required
@@ -48,18 +51,25 @@
from django.views.decorators.cache import cache_page
from django.views.decorators.csrf import ensure_csrf_cookie, csrf_exempt
from django.views.generic import RedirectView
+from rest_framework.status import HTTP_404_NOT_FOUND
import debug # pyflakes:ignore
from ietf.doc.fields import SearchableDocumentsField
from ietf.doc.models import Document, State, DocEvent, NewRevisionDocEvent
-from ietf.doc.storage_utils import remove_from_storage, retrieve_bytes, store_file
+from ietf.doc.storage_utils import (
+ remove_from_storage,
+ retrieve_bytes,
+ store_file,
+)
from ietf.group.models import Group
from ietf.group.utils import can_manage_session_materials, can_manage_some_groups, can_manage_group
from ietf.person.models import Person, User
from ietf.ietfauth.utils import role_required, has_role, user_is_person
from ietf.mailtrigger.utils import gather_address_lists
-from ietf.meeting.models import Meeting, Session, Schedule, FloorPlan, SessionPresentation, TimeSlot, SlideSubmission, Attended
+from ietf.meeting.models import Meeting, Session, Schedule, FloorPlan, \
+ SessionPresentation, TimeSlot, SlideSubmission, Attended
+from ..blobdb.models import ResolvedMaterial
from ietf.meeting.models import ImportantDate, SessionStatusName, SchedulingEvent, SchedTimeSessAssignment, Room, TimeSlotTypeName
from ietf.meeting.models import Registration
from ietf.meeting.forms import ( CustomDurationField, SwapDaysForm, SwapTimeslotsForm, ImportMinutesForm,
@@ -83,7 +93,8 @@
finalize,
generate_proceedings_content,
organize_proceedings_sessions,
- sort_accept_tuple,
+ resolve_uploaded_material,
+ sort_accept_tuple, store_blobs_for_one_material_doc,
)
from ietf.meeting.utils import add_event_info_to_session_qs
from ietf.meeting.utils import session_time_for_sorting
@@ -120,6 +131,8 @@
from icalendar import Calendar, Event
from ietf.doc.templatetags.ietf_filters import absurl
+from ..api.ietf_utils import requires_api_token
+from ..blobdb.storage import BlobdbStorage, BlobFile
request_summary_exclude_group_types = ['team']
@@ -245,21 +258,32 @@ def current_materials(request):
raise Http404('No such meeting')
-def _get_materials_doc(meeting, name):
+def _get_materials_doc(name, meeting=None):
"""Get meeting materials document named by name
- Raises Document.DoesNotExist if a match cannot be found.
+ Raises Document.DoesNotExist if a match cannot be found. If meeting is None,
+ matches a name that is associated with _any_ meeting.
"""
+
+ def _matches_meeting(doc, meeting=None):
+ if meeting is None:
+ return doc.get_related_meeting() is not None
+ return doc.get_related_meeting() == meeting
+
# try an exact match first
doc = Document.objects.filter(name=name).first()
- if doc is not None and doc.get_related_meeting() == meeting:
+ if doc is not None and _matches_meeting(doc, meeting):
return doc, None
+
# try parsing a rev number
if "-" in name:
docname, rev = name.rsplit("-", 1)
if len(rev) == 2 and rev.isdigit():
doc = Document.objects.get(name=docname) # may raise Document.DoesNotExist
- if doc.get_related_meeting() == meeting and rev in doc.revisions_by_newrevisionevent():
+ if (
+ _matches_meeting(doc, meeting)
+ and rev in doc.revisions_by_newrevisionevent()
+ ):
return doc, rev
# give up
raise Document.DoesNotExist
@@ -277,7 +301,7 @@ def materials_document(request, document, num=None, ext=None):
meeting = get_meeting(num, type_in=["ietf", "interim"])
num = meeting.number
try:
- doc, rev = _get_materials_doc(meeting=meeting, name=document)
+ doc, rev = _get_materials_doc(name=document, meeting=meeting)
except Document.DoesNotExist:
raise Http404("No such document for meeting %s" % num)
@@ -320,6 +344,7 @@ def materials_document(request, document, num=None, ext=None):
{
"content": markdown.markdown(bytes.decode(encoding=chset)),
"title": filename.name,
+ "static_ietf_org": settings.STATIC_IETF_ORG,
},
)
content_type = content_type.replace("plain", "html", 1)
@@ -334,6 +359,133 @@ def materials_document(request, document, num=None, ext=None):
return HttpResponseRedirect(redirect_to=doc.get_href(meeting=meeting))
+@requires_api_token("ietf.meeting.views.api_resolve_materials_name")
+def api_resolve_materials_name_cached(request, document, num=None, ext=None):
+ """Resolve materials name into document to a blob spec
+
+ Returns the bucket/name of a blob in the blob store that corresponds to the named
+ document. Handles resolution of revision if it is not specified and determines the
+ best extension if one is not provided. Response is JSON.
+
+ As of 2025-10-10 we do not have blobs for all materials documents or for every
+ format of every document. This API still returns the bucket/name as if the blob
+ exists. Another API will allow the caller to obtain the file contents using that
+ name if it cannot be retrieved from the blob store.
+ """
+
+ def _error_response(status: int, detail: str):
+ return JsonResponse(
+ {
+ "status": status,
+ "title": "Error",
+ "detail": detail,
+ },
+ status=status,
+ )
+
+ def _response(bucket: str, name: str):
+ return JsonResponse(
+ {
+ "bucket": bucket,
+ "name": name,
+ }
+ )
+
+ try:
+ resolved = ResolvedMaterial.objects.get(
+ meeting_number=num, name=document
+ )
+ except ResolvedMaterial.DoesNotExist:
+ return _error_response(
+ HTTP_404_NOT_FOUND, f"No suitable file for {document} for meeting {num}"
+ )
+ return _response(bucket=resolved.bucket, name=resolved.blob)
+
+
+@requires_api_token
+def api_retrieve_materials_blob(request, bucket, name):
+ """Retrieve contents of a meeting materials blob
+
+ This is intended as a fallback if the web worker cannot retrieve a blob from
+ the blobstore itself. The most likely cause is retrieving an old materials document
+ that has not been backfilled.
+
+ If a blob is requested that does not exist, this checks for it on the filesystem
+ and if found, adds it to the blobstore, creates a StoredObject record, and returns
+ the contents as it would have done if the blob was already present.
+
+ As a special case, if a requested file with extension `.md.html` does not exist
+ but a file with the same name but extension `.md` does, `.md` file will be rendered
+ from markdown to html and returned / stored.
+ """
+ DEFAULT_CONTENT_TYPES = {
+ ".html": "text/html;charset=utf-8",
+ ".md": "text/markdown;charset=utf-8",
+ ".pdf": "application/pdf",
+ ".txt": "text/plain;charset=utf-8",
+ }
+
+ def _default_content_type(blob_name: str):
+ return DEFAULT_CONTENT_TYPES.get(Path(name).suffix, "application/octet-stream")
+
+ if not (
+ settings.ENABLE_BLOBSTORAGE
+ and bucket in settings.MATERIALS_TYPES_SERVED_BY_WORKER
+ ):
+ return HttpResponseNotFound(f"Bucket {bucket} not found.")
+ storage = storages[bucket] # if not configured, a server error will result
+ assert isinstance(storage, BlobdbStorage)
+ try:
+ blob = storage.open(name, "rb")
+ except FileNotFoundError:
+ pass
+ else:
+ # found the blob - return it
+ assert isinstance(blob, BlobFile)
+ return FileResponse(
+ blob,
+ filename=name,
+ content_type=blob.content_type or _default_content_type(name),
+ )
+
+ # Did not find the blob. Create it if we can
+ name_as_path = Path(name)
+ if name_as_path.suffixes == [".md", ".html"]:
+ # special case: .md.html means we want to create the .md and the .md.html
+ # will come along as a bonus
+ name_to_store = name_as_path.stem # removes the .html
+ else:
+ name_to_store = name
+
+ # See if we have a meeting-related document that matches the requested bucket and
+ # name.
+ try:
+ doc, rev = _get_materials_doc(Path(name_to_store).stem)
+ if doc.type_id != bucket:
+ raise Document.DoesNotExist
+ except Document.DoesNotExist:
+ return HttpResponseNotFound(
+ f"Document corresponding to {bucket}:{name} not found."
+ )
+ else:
+ # create all missing blobs for the doc while we're at it
+ store_blobs_for_one_material_doc(doc)
+
+ # If we can make the blob at all, it now exists, so return it or a 404
+ try:
+ blob = storage.open(name, "rb")
+ except FileNotFoundError:
+ return HttpResponseNotFound(f"Object {bucket}:{name} not found.")
+ else:
+ # found the blob - return it
+ assert isinstance(blob, BlobFile)
+ return FileResponse(
+ blob,
+ filename=name,
+ content_type=blob.content_type or _default_content_type(name),
+ )
+
+
@login_required
def materials_editable_groups(request, num=None):
meeting = get_meeting(num)
@@ -2949,6 +3101,7 @@ def upload_session_minutes(request, session_id, num):
form.add_error(None, str(err))
else:
# no exception -- success!
+ resolve_uploaded_material(meeting=session.meeting, doc=session.minutes())
messages.success(request, f'Successfully uploaded minutes as revision {session.minutes().rev}.')
return redirect('ietf.meeting.views.session_details', num=num, acronym=session.group.acronym)
else:
@@ -3008,6 +3161,7 @@ def upload_session_narrativeminutes(request, session_id, num):
form.add_error(None, str(err))
else:
# no exception -- success!
+ resolve_uploaded_material(meeting=session.meeting, doc=session.narrative_minutes())
messages.success(request, f'Successfully uploaded narrative minutes as revision {session.narrative_minutes().rev}.')
return redirect('ietf.meeting.views.session_details', num=num, acronym=session.group.acronym)
else:
@@ -3154,6 +3308,7 @@ def upload_session_agenda(request, session_id, num):
form.add_error(None, save_error)
else:
doc.save_with_history([e])
+ resolve_uploaded_material(meeting=session.meeting, doc=doc)
messages.success(request, f'Successfully uploaded agenda as revision {doc.rev}.')
return redirect('ietf.meeting.views.session_details',num=num,acronym=session.group.acronym)
else:
@@ -3337,6 +3492,7 @@ def upload_session_slides(request, session_id, num, name=None):
else:
doc.save_with_history([e])
post_process(doc)
+ resolve_uploaded_material(meeting=session.meeting, doc=doc)
# Send MeetEcho updates even if we had a problem saving - that will keep it in sync with the
# SessionPresentation, which was already saved regardless of problems saving the file.
@@ -4737,6 +4893,7 @@ def err(code, text):
write_doc_for_session(session, 'chatlog', filename, json.dumps(apidata['chatlog']))
e = NewRevisionDocEvent.objects.create(doc=doc, rev=doc.rev, by=request.user.person, type='new_revision', desc='New revision available: %s'%doc.rev)
doc.save_with_history([e])
+ resolve_uploaded_material(meeting=session.meeting, doc=doc)
return HttpResponse(
"Done",
status=200,
@@ -4785,6 +4942,7 @@ def err(code, text):
write_doc_for_session(session, 'polls', filename, json.dumps(apidata['polls']))
e = NewRevisionDocEvent.objects.create(doc=doc, rev=doc.rev, by=request.user.person, type='new_revision', desc='New revision available: %s'%doc.rev)
doc.save_with_history([e])
+ resolve_uploaded_material(meeting=session.meeting, doc=doc)
return HttpResponse(
"Done",
status=200,
@@ -5167,6 +5325,7 @@ def approve_proposed_slides(request, slidesubmission_id, num):
doc.store_bytes(target_filename, retrieve_bytes("staging", submission.filename))
remove_from_storage("staging", submission.filename)
post_process(doc)
+ resolve_uploaded_material(meeting=submission.session.meeting, doc=doc)
DocEvent.objects.create(type="approved_slides", doc=doc, rev=doc.rev, by=request.user.person, desc="Slides approved")
# update meetecho slide info if configured
diff --git a/ietf/settings.py b/ietf/settings.py
index 5e576430ed..eb5f9d2161 100644
--- a/ietf/settings.py
+++ b/ietf/settings.py
@@ -786,29 +786,29 @@ def skip_unreadable_post(record):
# Storages for artifacts stored as blobs
ARTIFACT_STORAGE_NAMES: list[str] = [
- "bofreq",
- "charter",
- "conflrev",
"active-draft",
- "draft",
- "slides",
- "minutes",
"agenda",
+ "bibxml-ids",
"bluesheets",
- "procmaterials",
- "narrativeminutes",
- "statement",
- "statchg",
- "liai-att",
+ "bofreq",
+ "charter",
"chatlog",
- "polls",
- "staging",
- "bibxml-ids",
- "indexes",
+ "conflrev",
+ "draft",
"floorplan",
+ "indexes",
+ "liai-att",
"meetinghostlogo",
+ "minutes",
+ "narrativeminutes",
"photo",
+ "polls",
+ "procmaterials",
"review",
+ "slides",
+ "staging",
+ "statchg",
+ "statement",
]
for storagename in ARTIFACT_STORAGE_NAMES:
STORAGES[storagename] = {
@@ -816,6 +816,20 @@ def skip_unreadable_post(record):
"OPTIONS": {"bucket_name": storagename},
}
+# Buckets / doc types of meeting materials the CF worker is allowed to serve. This
+# differs from the list in Session.meeting_related() by the omission of "recording"
+MATERIALS_TYPES_SERVED_BY_WORKER = [
+ "agenda",
+ "bluesheets",
+ "chatlog",
+ "minutes",
+ "narrativeminutes",
+ "polls",
+ "procmaterials",
+ "slides",
+]
+
+
# Override this in settings_local.py if needed
# *_PATH variables ends with a slash/ .
diff --git a/ietf/templates/minimal.html b/ietf/templates/minimal.html
index 87f661f501..15c432505e 100644
--- a/ietf/templates/minimal.html
+++ b/ietf/templates/minimal.html
@@ -9,8 +9,8 @@
{{ title }}
-
-
+
+
{# load this in the head, to prevent flickering #}
From af0bcc743f6e449f93e0c7a7e4f2e2eec3ec76ae Mon Sep 17 00:00:00 2001
From: Nicolas Giard
Date: Thu, 23 Oct 2025 17:14:39 -0400
Subject: [PATCH 013/187] docs: Update PostgreSQL version badge in README
---
README.md | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/README.md b/README.md
index 4e1b7e1a45..dfaf871052 100644
--- a/README.md
+++ b/README.md
@@ -8,7 +8,7 @@
[](#prerequisites)
[](#prerequisites)
[](#prerequisites)
-[](#prerequisites)
+[](#prerequisites)
##### The day-to-day front-end to the IETF database for people who work on IETF standards.
From f9dea7df9d562ba818cf9224c1594f0e0983cdbe Mon Sep 17 00:00:00 2001
From: Nicolas Giard
Date: Thu, 23 Oct 2025 17:24:58 -0400
Subject: [PATCH 014/187] docs: Update Python version badge to 3.12 in README
---
README.md | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/README.md b/README.md
index dfaf871052..baffc311e7 100644
--- a/README.md
+++ b/README.md
@@ -5,7 +5,7 @@
[](https://github.com/ietf-tools/datatracker/releases)
[](https://github.com/ietf-tools/datatracker/blob/main/LICENSE)
[](https://codecov.io/gh/ietf-tools/datatracker)
-[](#prerequisites)
+[](#prerequisites)
[](#prerequisites)
[](#prerequisites)
[](#prerequisites)
From e0691c17121d2324d812bc68c3943d963d1c5d4d Mon Sep 17 00:00:00 2001
From: Nicolas Giard
Date: Thu, 23 Oct 2025 17:30:50 -0400
Subject: [PATCH 015/187] ci: remove assets rsync sync job from
dev-assets-sync-nightly workflow
Removed the nightly sync job for assets in the workflow.
---
.github/workflows/dev-assets-sync-nightly.yml | 14 --------------
1 file changed, 14 deletions(-)
diff --git a/.github/workflows/dev-assets-sync-nightly.yml b/.github/workflows/dev-assets-sync-nightly.yml
index 19933bddfd..4cfbf6365b 100644
--- a/.github/workflows/dev-assets-sync-nightly.yml
+++ b/.github/workflows/dev-assets-sync-nightly.yml
@@ -47,17 +47,3 @@ jobs:
file: dev/shared-assets-sync/Dockerfile
push: true
tags: ghcr.io/ietf-tools/datatracker-rsync-assets:latest
-
- sync:
- name: Run assets rsync
- if: ${{ always() }}
- runs-on: [self-hosted, dev-server]
- needs: [build]
- steps:
- - name: Run rsync
- env:
- DEBIAN_FRONTEND: noninteractive
- run: |
- docker pull ghcr.io/ietf-tools/datatracker-rsync-assets:latest
- docker run --rm -v dt-assets:/assets ghcr.io/ietf-tools/datatracker-rsync-assets:latest
- docker image prune -a -f
From 354d83d2fa22f817384a792bcbdef9757771f70a Mon Sep 17 00:00:00 2001
From: Nicolas Giard
Date: Thu, 23 Oct 2025 17:34:00 -0400
Subject: [PATCH 016/187] ci: remove sandbox-refresh workflow
---
.github/workflows/sandbox-refresh.yml | 35 ---------------------------
1 file changed, 35 deletions(-)
delete mode 100644 .github/workflows/sandbox-refresh.yml
diff --git a/.github/workflows/sandbox-refresh.yml b/.github/workflows/sandbox-refresh.yml
deleted file mode 100644
index 3ddb119e4f..0000000000
--- a/.github/workflows/sandbox-refresh.yml
+++ /dev/null
@@ -1,35 +0,0 @@
-name: Sandbox Refresh
-
-on:
- # Run every night
- schedule:
- - cron: '0 9 * * *'
-
- workflow_dispatch:
-
-jobs:
- main:
- name: Refresh DBs
- runs-on: [self-hosted, dev-server]
- permissions:
- contents: read
-
- steps:
- - uses: actions/checkout@v4
-
- - name: Refresh DBs
- env:
- DEBIAN_FRONTEND: noninteractive
- run: |
- echo "Install Deploy to Container CLI dependencies..."
- cd dev/deploy-to-container
- npm ci
- cd ../..
- echo "Start Refresh..."
- node ./dev/deploy-to-container/refresh.js
-
- - name: Cleanup old docker resources
- env:
- DEBIAN_FRONTEND: noninteractive
- run: |
- docker image prune -a -f
From 4e6168607cb49abc9341b27049f458bc9363297a Mon Sep 17 00:00:00 2001
From: Jennifer Richards
Date: Thu, 23 Oct 2025 20:43:04 -0300
Subject: [PATCH 017/187] ci: proceedings cache cfg for prod/tests (#9784)
---
ietf/settings_testcrawl.py | 4 +++-
k8s/settings_local.py | 11 +++++++++++
2 files changed, 14 insertions(+), 1 deletion(-)
diff --git a/ietf/settings_testcrawl.py b/ietf/settings_testcrawl.py
index a1b5ce8946..40744a228d 100644
--- a/ietf/settings_testcrawl.py
+++ b/ietf/settings_testcrawl.py
@@ -27,9 +27,11 @@
'MAX_ENTRIES': 10000,
},
},
+ 'proceedings': {
+ 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
+ },
'sessions': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
- # No version-specific VERSION setting.
},
'htmlized': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
diff --git a/k8s/settings_local.py b/k8s/settings_local.py
index c09bd70c86..f8ffacc83f 100644
--- a/k8s/settings_local.py
+++ b/k8s/settings_local.py
@@ -301,6 +301,17 @@ def _multiline_to_list(s):
"LOCATION": f"{MEMCACHED_HOST}:{MEMCACHED_PORT}",
"VERSION": __version__,
"KEY_PREFIX": "ietf:dt",
+ # Key function is default except with sha384-encoded key
+ "KEY_FUNCTION": lambda key, key_prefix, version: (
+ f"{key_prefix}:{version}:{sha384(str(key).encode('utf8')).hexdigest()}"
+ ),
+ },
+ "proceedings": {
+ "BACKEND": "ietf.utils.cache.LenientMemcacheCache",
+ "LOCATION": f"{MEMCACHED_HOST}:{MEMCACHED_PORT}",
+ # No release-specific VERSION setting.
+ "KEY_PREFIX": "ietf:dt:proceedings",
+ # Key function is default except with sha384-encoded key
"KEY_FUNCTION": lambda key, key_prefix, version: (
f"{key_prefix}:{version}:{sha384(str(key).encode('utf8')).hexdigest()}"
),
From 6db7d4afbe2b876192d0aa4a63a0bbe98a3806be Mon Sep 17 00:00:00 2001
From: Jennifer Richards
Date: Tue, 28 Oct 2025 20:06:53 -0300
Subject: [PATCH 018/187] fix: don't trust libmagic charset recognition (#9815)
---
ietf/meeting/views.py | 17 +++++++++++++----
1 file changed, 13 insertions(+), 4 deletions(-)
diff --git a/ietf/meeting/views.py b/ietf/meeting/views.py
index cf6fed596b..d6b5a1c0db 100644
--- a/ietf/meeting/views.py
+++ b/ietf/meeting/views.py
@@ -329,7 +329,7 @@ def materials_document(request, document, num=None, ext=None):
old_proceedings_format = meeting.number.isdigit() and int(meeting.number) <= 96
if settings.MEETING_MATERIALS_SERVE_LOCALLY or old_proceedings_format:
bytes = filename.read_bytes()
- mtype, chset = get_mime_type(bytes)
+ mtype, chset = get_mime_type(bytes) # chset does not consider entire file!
content_type = "%s; charset=%s" % (mtype, chset)
if filename.suffix == ".md" and mtype == "text/plain":
@@ -339,15 +339,24 @@ def materials_document(request, document, num=None, ext=None):
content_type = content_type.replace("plain", "markdown", 1)
break
elif atype[0] == "text/html":
+ # Render markdown, allowing that charset may be inaccurate.
+ try:
+ md_src = bytes.decode(
+ "utf-8" if chset in ["ascii", "us-ascii"] else chset
+ )
+ except UnicodeDecodeError:
+ # latin-1, aka iso8859-1, accepts all 8-bit code points
+ md_src = bytes.decode("latin-1")
+ content = markdown.markdown(md_src) # a string
bytes = render_to_string(
"minimal.html",
{
- "content": markdown.markdown(bytes.decode(encoding=chset)),
+ "content": content,
"title": filename.name,
"static_ietf_org": settings.STATIC_IETF_ORG,
},
- )
- content_type = content_type.replace("plain", "html", 1)
+ ).encode("utf-8")
+ content_type = "text/html; charset=utf-8"
break
elif atype[0] == "text/plain":
break
From 3e34efe74950d7f237171e9ea5cedc24d8d08615 Mon Sep 17 00:00:00 2001
From: Jennifer Richards
Date: Tue, 28 Oct 2025 20:09:27 -0300
Subject: [PATCH 019/187] chore: update names fixture (#9807)
* chore(dev): update names fixture
* chore(dev): update names fixture again
---
ietf/name/fixtures/names.json | 28 ++++++++++++++--------------
1 file changed, 14 insertions(+), 14 deletions(-)
diff --git a/ietf/name/fixtures/names.json b/ietf/name/fixtures/names.json
index 58deb01f0c..64e26e503a 100644
--- a/ietf/name/fixtures/names.json
+++ b/ietf/name/fixtures/names.json
@@ -650,7 +650,7 @@
},
{
"fields": {
- "desc": "4.2.1. Call for Adoption by WG Issued\r\n\r\n The \"Call for Adoption by WG Issued\" state should be used to indicate when an I-D is being considered for adoption by an IETF WG. An I-D that is in this state is actively being considered for adoption and has not yet achieved consensus, preference, or selection in the WG.\r\n\r\n This state may be used to describe an I-D that someone has asked a WG to consider for adoption, if the WG Chair has agreed with the request. This state may also be used to identify an I-D that a WG Chair asked an author to write specifically for consideration as a candidate WG item [WGDTSPEC], and/or an I-D that is listed as a 'candidate draft' in the WG's charter.\r\n\r\n Under normal conditions, it should not be possible for an I-D to be in the \"Call for Adoption by WG Issued\" state in more than one working group at the same time. This said, it is not uncommon for authors to \"shop\" their I-Ds to more than one WG at a time, with the hope of getting their documents adopted somewhere.\r\n\r\n After this state is implemented in the Datatracker, an I-D that is in the \"Call for Adoption by WG Issued\" state will not be able to be \"shopped\" to any other WG without the consent of the WG Chairs and the responsible ADs impacted by the shopping.\r\n\r\n Note that Figure 1 includes an arc leading from this state to outside of the WG state machine. This illustrates that some I-Ds that are considered do not get adopted as WG drafts. An I-D that is not adopted as a WG draft will transition out of the WG state machine and revert back to having no stream-specific state; however, the status change history log of the I-D will record that the I-D was previously in the \"Call for Adoption by WG Issued\" state.",
+ "desc": "A call for adoption of the individual submission document has been issued by the Working Group (WG) chairs. This call is still running but the WG has not yet reached consensus for adoption.",
"name": "Call For Adoption By WG Issued",
"next_states": [
36,
@@ -666,7 +666,7 @@
},
{
"fields": {
- "desc": "4.2.2. Adopted by a WG\r\n\r\n The \"Adopted by a WG\" state describes an individual submission I-D that an IETF WG has agreed to adopt as one of its WG drafts.\r\n\r\n WG Chairs who use this state will be able to clearly indicate when their WGs adopt individual submission I-Ds. This will facilitate the Datatracker's ability to correctly capture \"Replaces\" information for WG drafts and correct \"Replaced by\" information for individual submission I-Ds that have been replaced by WG drafts.\r\n\r\n This state is needed because the Datatracker uses the filename of an I-D as a key to search its database for status information about the I-D, and because the filename of a WG I-D is supposed to be different from the filename of an individual submission I-D. The filename of an individual submission I-D will typically be formatted as 'draft-author-wgname-topic-nn'.\r\n\r\n The filename of a WG document is supposed to be formatted as 'draft- ietf-wgname-topic-nn'.\r\n\r\n An individual I-D that is adopted by a WG may take weeks or months to be resubmitted by the author as a new (version-00) WG draft. If the \"Adopted by a WG\" state is not used, the Datatracker has no way to determine that an I-D has been adopted until a new version of the I-D is submitted to the WG by the author and until the I-D is approved for posting by a WG Chair.",
+ "desc": "The individual submission document has been adopted by the Working Group (WG), but a WG document replacing this document with the typical naming convention of 'draft- ietf-wgname-topic-nn' has not yet been submitted.",
"name": "Adopted by a WG",
"next_states": [
38
@@ -681,7 +681,7 @@
},
{
"fields": {
- "desc": "4.2.3. Adopted for WG Info Only\r\n\r\n The \"Adopted for WG Info Only\" state describes a document that contains useful information for the WG that adopted it, but the document is not intended to be published as an RFC. The WG will not actively develop the contents of the I-D or progress it for publication as an RFC. The only purpose of the I-D is to provide information for internal use by the WG.",
+ "desc": "The document is adopted by the Working Group (WG) for its internal use. The WG has decided that it will not pursue publication of it as an RFC.",
"name": "Adopted for WG Info Only",
"next_states": [],
"order": 3,
@@ -694,7 +694,7 @@
},
{
"fields": {
- "desc": "4.2.4. WG Document\r\n\r\n The \"WG Document\" state describes an I-D that has been adopted by an IETF WG and is being actively developed.\r\n\r\n A WG Chair may transition an I-D into the \"WG Document\" state at any time as long as the I-D is not being considered or developed in any other WG.\r\n\r\n Alternatively, WG Chairs may rely upon new functionality to be added to the Datatracker to automatically move version-00 drafts into the \"WG Document\" state as described in Section 4.1.\r\n\r\n Under normal conditions, it should not be possible for an I-D to be in the \"WG Document\" state in more than one WG at a time. This said, I-Ds may be transferred from one WG to another with the consent of the WG Chairs and the responsible ADs.",
+ "desc": "The document has been adopted by the Working Group (WG) and is under development. A document can only be adopted by one WG at a time. However, a document may be transferred between WGs.",
"name": "WG Document",
"next_states": [
39,
@@ -712,7 +712,7 @@
},
{
"fields": {
- "desc": "4.2.5. Parked WG Document\r\n\r\n A \"Parked WG Document\" is an I-D that has lost its author or editor, is waiting for another document to be written or for a review to be completed, or cannot be progressed by the working group for some other reason.\r\n\r\n Some of the annotation tags described in Section 4.3 may be used in conjunction with this state to indicate why an I-D has been parked, and/or what may need to happen for the I-D to be un-parked.\r\n\r\n Parking a WG draft will not prevent it from expiring; however, this state can be used to indicate why the I-D has stopped progressing in the WG.\r\n\r\n A \"Parked WG Document\" that is not expired may be transferred from one WG to another with the consent of the WG Chairs and the responsible ADs.",
+ "desc": "The Working Group (WG) document is in a temporary state where it will not be actively developed. The reason for the pause is explained via a datatracker comments section.",
"name": "Parked WG Document",
"next_states": [
38
@@ -727,7 +727,7 @@
},
{
"fields": {
- "desc": "4.2.6. Dead WG Document\r\n\r\n A \"Dead WG Document\" is an I-D that has been abandoned. Note that 'Dead' is not always a final state for a WG I-D. If consensus is subsequently achieved, a \"Dead WG Document\" may be resurrected. A \"Dead WG Document\" that is not resurrected will eventually expire.\r\n\r\n Note that an I-D that is declared to be \"Dead\" in one WG and that is not expired may be transferred to a non-dead state in another WG with the consent of the WG Chairs and the responsible ADs.",
+ "desc": "The Working Group (WG) document has been abandoned by the WG. No further development is planned in this WG. A decision to resume work on this document and move it out of this state is possible.",
"name": "Dead WG Document",
"next_states": [
38
@@ -742,7 +742,7 @@
},
{
"fields": {
- "desc": "4.2.7. In WG Last Call\r\n\r\n A document \"In WG Last Call\" is an I-D for which a WG Last Call (WGLC) has been issued and is in progress.\r\n\r\n Note that conducting a WGLC is an optional part of the IETF WG process, per Section 7.4 of RFC 2418 [RFC2418].\r\n\r\n If a WG Chair decides to conduct a WGLC on an I-D, the \"In WG Last Call\" state can be used to track the progress of the WGLC. The Chair may configure the Datatracker to send a WGLC message to one or more mailing lists when the Chair moves the I-D into this state. The WG Chair may also be able to select a different set of mailing lists for a different document undergoing a WGLC; some documents may deserve coordination with other WGs.\r\n\r\n A WG I-D in this state should remain \"In WG Last Call\" until the WG Chair moves it to another state. The WG Chair may configure the Datatracker to send an e-mail after a specified period of time to remind or 'nudge' the Chair to conclude the WGLC and to determine the next state for the document.\r\n\r\n It is possible for one WGLC to lead into another WGLC for the same document. For example, an I-D that completed a WGLC as an \"Informational\" document may need another WGLC if a decision is taken to convert the I-D into a Standards Track document.",
+ "desc": "The Working Group (WG) document is currently subject to an active WG Last Call (WGLC) review per Section 7.4 of RFC2418.",
"name": "In WG Last Call",
"next_states": [
38,
@@ -759,7 +759,7 @@
},
{
"fields": {
- "desc": "4.2.8. Waiting for WG Chair Go-Ahead\r\n\r\n A WG Chair may wish to place an I-D that receives a lot of comments during a WGLC into the \"Waiting for WG Chair Go-Ahead\" state. This state describes an I-D that has undergone a WGLC; however, the Chair is not yet ready to call consensus on the document.\r\n\r\n If comments from the WGLC need to be responded to, or a revision to the I-D is needed, the Chair may place an I-D into this state until all of the WGLC comments are adequately addressed and the (possibly revised) document is in the I-D repository.",
+ "desc": "The Working Group (WG) document has completed Working Group Last Call (WGLC), but the WG chair(s) are not yet ready to call consensus on the document. The reasons for this may include comments from the WGLC need to be responded to, or a revision to the document is needed",
"name": "Waiting for WG Chair Go-Ahead",
"next_states": [
41,
@@ -775,7 +775,7 @@
},
{
"fields": {
- "desc": "4.2.9. WG Consensus: Waiting for Writeup\r\n\r\n A document in the \"WG Consensus: Waiting for Writeup\" state has essentially completed its development within the working group, and is nearly ready to be sent to the IESG for publication. The last thing to be done is the preparation of a protocol writeup by a Document Shepherd. The IESG requires that a document shepherd writeup be completed before publication of the I-D is requested. The IETF document shepherding process and the role of a WG Document Shepherd is described in RFC 4858 [RFC4858]\r\n\r\n A WG Chair may call consensus on an I-D without a formal WGLC and transition an I-D that was in the \"WG Document\" state directly into this state.\r\n\r\n The name of this state includes the words \"Waiting for Writeup\" because a good document shepherd writeup takes time to prepare.",
+ "desc": "The Working Group (WG) document has consensus to proceed to publication. However, the document is waiting for a document shepherd write-up per RFC4858.",
"name": "WG Consensus: Waiting for Write-Up",
"next_states": [
44
@@ -790,7 +790,7 @@
},
{
"fields": {
- "desc": "4.2.10. Submitted to IESG for Publication\r\n\r\n This state describes a WG document that has been submitted to the IESG for publication and that has not been sent back to the working group for revision.\r\n\r\n An I-D in this state may be under review by the IESG, it may have been approved and be in the RFC Editor's queue, or it may have been published as an RFC. Other possibilities exist too. The document may be \"Dead\" (in the IESG state machine) or in a \"Do Not Publish\" state.",
+ "desc": "The Working Group (WG) document has left the WG and been submitted to the Internet Engineering Steering Group (IESG) for evaluation and publication. See the “IESG State” or “RFC Editor State” for further details on the state of the document.",
"name": "Submitted to IESG for Publication",
"next_states": [
38
@@ -2020,7 +2020,7 @@
},
{
"fields": {
- "desc": "The document has been marked as a candidate for WG adoption by the WG Chair. This state can be used before a call for adoption is issued (and the document is put in the \"Call For Adoption By WG Issued\" state), to indicate that the document is in the queue for a call for adoption, even if none has been issued yet.",
+ "desc": "The individual submission document has been marked by the Working Group (WG) chairs as a candidate for adoption by the WG, but no adoption call has been started.",
"name": "Candidate for WG Adoption",
"next_states": [
35
@@ -2152,7 +2152,7 @@
},
{
"fields": {
- "desc": "In some areas, it can be desirable to wait for multiple interoperable implementations before progressing a draft to be an RFC, and in some WGs this is required. This state should be entered after WG Last Call has completed.",
+ "desc": "The progression of this Working Group (WG) document towards publication is paused as it awaits implementation. The process governing the approach to implementations is WG-specific.",
"name": "Waiting for Implementation",
"next_states": [],
"order": 8,
@@ -2165,7 +2165,7 @@
},
{
"fields": {
- "desc": "Held by WG, see document history for details.",
+ "desc": "Held by Working Group (WG) chairs for administrative reasons. See document history for details.",
"name": "Held by WG",
"next_states": [],
"order": 9,
@@ -4473,6 +4473,7 @@
],
"session_purposes": [
"coding",
+ "open_meeting",
"presentation",
"social",
"tutorial"
@@ -5535,7 +5536,6 @@
],
"desc": "Recipients for a message when a new incoming liaison statement is posted",
"to": [
- "liaison_from_contact",
"liaison_to_contacts"
]
},
From 145b9f76c19030b67628432b5f811a1c3c55c749 Mon Sep 17 00:00:00 2001
From: Jennifer Richards
Date: Tue, 28 Oct 2025 20:11:52 -0300
Subject: [PATCH 020/187] chore(dev): bump dev blobdb to pg17 (#9806)
---
docker-compose.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docker-compose.yml b/docker-compose.yml
index 8c6e0ea486..2440faf121 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -116,7 +116,7 @@ services:
- "minio-data:/data"
blobdb:
- image: postgres:16
+ image: postgres:17
restart: unless-stopped
environment:
POSTGRES_DB: blob
From cbb0e2e3db4cc9e591b4397b7bc6cdebb51cfc8c Mon Sep 17 00:00:00 2001
From: Jennifer Richards
Date: Wed, 29 Oct 2025 11:18:47 -0300
Subject: [PATCH 021/187] feat: logs in api_retrieve_materials_blob() (#9818)
---
ietf/meeting/views.py | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/ietf/meeting/views.py b/ietf/meeting/views.py
index d6b5a1c0db..69635d6219 100644
--- a/ietf/meeting/views.py
+++ b/ietf/meeting/views.py
@@ -451,6 +451,7 @@ def _default_content_type(blob_name: str):
else:
# found the blob - return it
assert isinstance(blob, BlobFile)
+ log(f"Materials blob: directly returning {bucket}:{name}")
return FileResponse(
blob,
filename=name,
@@ -473,17 +474,20 @@ def _default_content_type(blob_name: str):
if doc.type_id != bucket:
raise Document.DoesNotExist
except Document.DoesNotExist:
+ log(f"Materials blob: no doc for {bucket}:{name}")
return HttpResponseNotFound(
f"Document corresponding to {bucket}:{name} not found."
)
else:
# create all missing blobs for the doc while we're at it
+ log(f"Materials blob: storing blobs for {doc.name}-{doc.rev}")
store_blobs_for_one_material_doc(doc)
# If we can make the blob at all, it now exists, so return it or a 404
try:
blob = storage.open(name, "rb")
except FileNotFoundError:
+ log(f"Materials blob: no blob for {bucket}:{name}")
return HttpResponseNotFound(f"Object {bucket}:{name} not found.")
else:
# found the blob - return it
From c47fe34b0e409f4811e2f96fc45ec87bc1b7931f Mon Sep 17 00:00:00 2001
From: Robert Sparks
Date: Mon, 3 Nov 2025 09:05:30 -0500
Subject: [PATCH 022/187] fix: include punctuation when tablesorting (#9855)
---
ietf/static/js/list.js | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/ietf/static/js/list.js b/ietf/static/js/list.js
index 756a75001a..c03368cd72 100644
--- a/ietf/static/js/list.js
+++ b/ietf/static/js/list.js
@@ -16,7 +16,7 @@ function text_sort(a, b, options) {
// sort by text content
return prep(a, options).localeCompare(prep(b, options), "en", {
sensitivity: "base",
- ignorePunctuation: true,
+ ignorePunctuation: false,
numeric: true
});
}
From 87c3a9db06b784d2cf1484a547171a9783e50fdc Mon Sep 17 00:00:00 2001
From: Kesara Rathnayake
Date: Mon, 3 Nov 2025 09:08:53 -0500
Subject: [PATCH 023/187] feat(agenda): Show calendar links to all the events
(#9843)
* feat(agenda): Show calendar links to all the events
* test: Update playwright tests
---
client/agenda/AgendaScheduleList.vue | 20 ++++++++++----------
playwright/tests/meeting/agenda.spec.js | 7 ++++++-
2 files changed, 16 insertions(+), 11 deletions(-)
diff --git a/client/agenda/AgendaScheduleList.vue b/client/agenda/AgendaScheduleList.vue
index fc8b5fd30f..bbe5dfee8b 100644
--- a/client/agenda/AgendaScheduleList.vue
+++ b/client/agenda/AgendaScheduleList.vue
@@ -398,16 +398,6 @@ const meetingEvents = computed(() => {
color: 'teal'
})
}
- // -> Calendar item
- if (item.links.calendar) {
- links.push({
- id: `lnk-${item.id}-calendar`,
- label: 'Calendar (.ics) entry for this session',
- icon: 'calendar-check',
- href: item.links.calendar,
- color: 'pink'
- })
- }
} else {
// -> Post event
if (meetingNumberInt >= 60) {
@@ -484,6 +474,16 @@ const meetingEvents = computed(() => {
}
}
}
+ // Add Calendar item for all events that has a calendar link
+ if (item.adjustedEnd > current && item.links.calendar) {
+ links.push({
+ id: `lnk-${item.id}-calendar`,
+ label: 'Calendar (.ics) entry for this session',
+ icon: 'calendar-check',
+ href: item.links.calendar,
+ color: 'pink'
+ })
+ }
// Event icon
let icon = null
diff --git a/playwright/tests/meeting/agenda.spec.js b/playwright/tests/meeting/agenda.spec.js
index 412a3fe9b8..2248027a38 100644
--- a/playwright/tests/meeting/agenda.spec.js
+++ b/playwright/tests/meeting/agenda.spec.js
@@ -1219,7 +1219,12 @@ test.describe('future - desktop', () => {
await expect(eventButtons.locator(`#btn-lnk-${event.id}-calendar > i.bi`)).toBeVisible()
}
} else {
- await expect(eventButtons).toHaveCount(0)
+ if (event.links.calendar) {
+ await expect(eventButtons.locator(`#btn-lnk-${event.id}-calendar`)).toHaveAttribute('href', event.links.calendar)
+ await expect(eventButtons.locator(`#btn-lnk-${event.id}-calendar > i.bi`)).toBeVisible()
+ } else {
+ await expect(eventButtons).toHaveCount(0)
+ }
}
}
}
From 8da45cb8488345a1f449e6fc7442098cff81e3ff Mon Sep 17 00:00:00 2001
From: Jennifer Richards
Date: Mon, 3 Nov 2025 09:10:59 -0500
Subject: [PATCH 024/187] feat: optionally hide room-only schedule diffs
(#9861)
* feat: optionally hide room-only schedule diffs
* test: update test
---
ietf/meeting/tests_views.py | 194 +++++++++++++++++++++++++-----------
ietf/meeting/views.py | 13 +++
2 files changed, 151 insertions(+), 56 deletions(-)
diff --git a/ietf/meeting/tests_views.py b/ietf/meeting/tests_views.py
index b1bbc62907..50960b5143 100644
--- a/ietf/meeting/tests_views.py
+++ b/ietf/meeting/tests_views.py
@@ -49,7 +49,11 @@
from ietf.meeting.helpers import send_interim_minutes_reminder, populate_important_dates, update_important_dates
from ietf.meeting.models import Session, TimeSlot, Meeting, SchedTimeSessAssignment, Schedule, SessionPresentation, SlideSubmission, SchedulingEvent, Room, Constraint, ConstraintName
from ietf.meeting.test_data import make_meeting_test_data, make_interim_meeting, make_interim_test_data
-from ietf.meeting.utils import condition_slide_order, generate_proceedings_content
+from ietf.meeting.utils import (
+ condition_slide_order,
+ generate_proceedings_content,
+ diff_meeting_schedules,
+)
from ietf.meeting.utils import add_event_info_to_session_qs, participants_for_meeting
from ietf.meeting.utils import create_recording, delete_recording, get_next_sequence, bluesheet_data
from ietf.meeting.views import session_draft_list, parse_agenda_filter_params, sessions_post_save, agenda_extract_schedule
@@ -4765,73 +4769,151 @@ def test_list_schedules(self):
self.assertTrue(r.status_code, 200)
def test_diff_schedules(self):
- meeting = make_meeting_test_data()
-
- url = urlreverse('ietf.meeting.views.diff_schedules',kwargs={'num':meeting.number})
- login_testing_unauthorized(self,"secretary", url)
- r = self.client.get(url)
- self.assertTrue(r.status_code, 200)
-
- from_schedule = Schedule.objects.get(meeting=meeting, name="test-unofficial-schedule")
-
- session1 = Session.objects.filter(meeting=meeting, group__acronym='mars').first()
- session2 = Session.objects.filter(meeting=meeting, group__acronym='ames').first()
- session3 = SessionFactory(meeting=meeting, group=Group.objects.get(acronym='mars'),
- attendees=10, requested_duration=datetime.timedelta(minutes=70),
- add_to_schedule=False)
- SchedulingEvent.objects.create(session=session3, status_id='schedw', by=Person.objects.first())
-
- slot2 = TimeSlot.objects.filter(meeting=meeting, type='regular').order_by('-time').first()
- slot3 = TimeSlot.objects.create(
- meeting=meeting, type_id='regular', location=slot2.location,
- duration=datetime.timedelta(minutes=60),
- time=slot2.time + datetime.timedelta(minutes=60),
+ # Create meeting and some time slots
+ meeting = MeetingFactory(type_id="ietf", populate_schedule=False)
+ rooms = RoomFactory.create_batch(2, meeting=meeting)
+ # first index is room, second is time
+ timeslots = [
+ [
+ TimeSlotFactory(
+ location=room,
+ meeting=meeting,
+ time=datetime.datetime.combine(
+ meeting.date, datetime.time(9, 0, tzinfo=datetime.UTC)
+ )
+ ),
+ TimeSlotFactory(
+ location=room,
+ meeting=meeting,
+ time=datetime.datetime.combine(
+ meeting.date, datetime.time(10, 0, tzinfo=datetime.UTC)
+ )
+ ),
+ TimeSlotFactory(
+ location=room,
+ meeting=meeting,
+ time=datetime.datetime.combine(
+ meeting.date, datetime.time(11, 0, tzinfo=datetime.UTC)
+ )
+ ),
+ ]
+ for room in rooms
+ ]
+ sessions = SessionFactory.create_batch(
+ 5, meeting=meeting, add_to_schedule=False
)
- # copy
- new_url = urlreverse("ietf.meeting.views.new_meeting_schedule", kwargs=dict(num=meeting.number, owner=from_schedule.owner_email(), name=from_schedule.name))
- r = self.client.post(new_url, {
- 'name': "newtest",
- 'public': "on",
- })
- self.assertNoFormPostErrors(r)
+ from_schedule = ScheduleFactory(meeting=meeting)
+ to_schedule = ScheduleFactory(meeting=meeting)
- to_schedule = Schedule.objects.get(meeting=meeting, name='newtest')
+ # sessions[0]: not scheduled in from_schedule, scheduled in to_schedule
+ SchedTimeSessAssignment.objects.create(
+ schedule=to_schedule,
+ session=sessions[0],
+ timeslot=timeslots[0][0],
+ )
+ # sessions[1]: scheduled in from_schedule, not scheduled in to_schedule
+ SchedTimeSessAssignment.objects.create(
+ schedule=from_schedule,
+ session=sessions[1],
+ timeslot=timeslots[0][0],
+ )
+ # sessions[2]: moves rooms, not time
+ SchedTimeSessAssignment.objects.create(
+ schedule=from_schedule,
+ session=sessions[2],
+ timeslot=timeslots[0][1],
+ )
+ SchedTimeSessAssignment.objects.create(
+ schedule=to_schedule,
+ session=sessions[2],
+ timeslot=timeslots[1][1],
+ )
+ # sessions[3]: moves time, not room
+ SchedTimeSessAssignment.objects.create(
+ schedule=from_schedule,
+ session=sessions[3],
+ timeslot=timeslots[1][1],
+ )
+ SchedTimeSessAssignment.objects.create(
+ schedule=to_schedule,
+ session=sessions[3],
+ timeslot=timeslots[1][2],
+ )
+ # sessions[4]: moves room and time
+ SchedTimeSessAssignment.objects.create(
+ schedule=from_schedule,
+ session=sessions[4],
+ timeslot=timeslots[1][0],
+ )
+ SchedTimeSessAssignment.objects.create(
+ schedule=to_schedule,
+ session=sessions[4],
+ timeslot=timeslots[0][2],
+ )
- # make some changes
+ # Check the raw diffs
+ raw_diffs = diff_meeting_schedules(from_schedule, to_schedule)
+ self.assertCountEqual(
+ raw_diffs,
+ [
+ {
+ "change": "schedule",
+ "session": sessions[0].pk,
+ "to": timeslots[0][0].pk,
+ },
+ {
+ "change": "unschedule",
+ "session": sessions[1].pk,
+ "from": timeslots[0][0].pk,
+ },
+ {
+ "change": "move",
+ "session": sessions[2].pk,
+ "from": timeslots[0][1].pk,
+ "to": timeslots[1][1].pk,
+ },
+ {
+ "change": "move",
+ "session": sessions[3].pk,
+ "from": timeslots[1][1].pk,
+ "to": timeslots[1][2].pk,
+ },
+ {
+ "change": "move",
+ "session": sessions[4].pk,
+ "from": timeslots[1][0].pk,
+ "to": timeslots[0][2].pk,
+ },
+ ]
+ )
- edit_url = urlreverse("ietf.meeting.views.edit_meeting_schedule", kwargs=dict(num=meeting.number, owner=to_schedule.owner_email(), name=to_schedule.name))
+ # Check the view
+ url = urlreverse("ietf.meeting.views.diff_schedules",
+ kwargs={"num": meeting.number})
+ login_testing_unauthorized(self, "secretary", url)
+ r = self.client.get(url)
+ self.assertTrue(r.status_code, 200)
- # schedule session
- r = self.client.post(edit_url, {
- 'action': 'assign',
- 'timeslot': slot3.pk,
- 'session': session3.pk,
- })
- self.assertEqual(json.loads(r.content)['success'], True)
- # unschedule session
- r = self.client.post(edit_url, {
- 'action': 'unassign',
- 'session': session1.pk,
- })
- self.assertEqual(json.loads(r.content)['success'], True)
- # move session
- r = self.client.post(edit_url, {
- 'action': 'assign',
- 'timeslot': slot2.pk,
- 'session': session2.pk,
+ # with show room changes disabled - does not show sessions[2] because it did
+ # not change time
+ r = self.client.get(url, {
+ "from_schedule": from_schedule.name,
+ "to_schedule": to_schedule.name,
})
- self.assertEqual(json.loads(r.content)['success'], True)
+ self.assertTrue(r.status_code, 200)
+ q = PyQuery(r.content)
+ self.assertEqual(len(q(".schedule-diffs tr")), 4 + 1)
- # now get differences
+ # with show room changes enabled - shows all changes
r = self.client.get(url, {
- 'from_schedule': from_schedule.name,
- 'to_schedule': to_schedule.name,
+ "from_schedule": from_schedule.name,
+ "to_schedule": to_schedule.name,
+ "show_room_changes": "on",
})
self.assertTrue(r.status_code, 200)
-
q = PyQuery(r.content)
- self.assertEqual(len(q(".schedule-diffs tr")), 3+1)
+ self.assertEqual(len(q(".schedule-diffs tr")), 5 + 1)
def test_delete_schedule(self):
url = urlreverse('ietf.meeting.views.delete_schedule',
diff --git a/ietf/meeting/views.py b/ietf/meeting/views.py
index 69635d6219..b0c46cb05a 100644
--- a/ietf/meeting/views.py
+++ b/ietf/meeting/views.py
@@ -1675,6 +1675,11 @@ def list_schedules(request, num):
class DiffSchedulesForm(forms.Form):
from_schedule = forms.ChoiceField()
to_schedule = forms.ChoiceField()
+ show_room_changes = forms.BooleanField(
+ initial=False,
+ required=False,
+ help_text="Include changes to room without a date or time change",
+ )
def __init__(self, meeting, user, *args, **kwargs):
super().__init__(*args, **kwargs)
@@ -1707,6 +1712,14 @@ def diff_schedules(request, num):
raw_diffs = diff_meeting_schedules(from_schedule, to_schedule)
diffs = prefetch_schedule_diff_objects(raw_diffs)
+ if not form.cleaned_data["show_room_changes"]:
+ # filter out room-only changes
+ diffs = [
+ d
+ for d in diffs
+ if (d["change"] != "move") or (d["from"].time != d["to"].time)
+ ]
+
for d in diffs:
s = d['session']
s.session_label = s.short_name
From 9546e15224df7d8d9f385a8f670cd27012d7aee5 Mon Sep 17 00:00:00 2001
From: Jennifer Richards
Date: Mon, 3 Nov 2025 09:11:32 -0500
Subject: [PATCH 025/187] fix: no autoescape for bluesheet template (#9858)
---
ietf/templates/meeting/bluesheet.txt | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/ietf/templates/meeting/bluesheet.txt b/ietf/templates/meeting/bluesheet.txt
index dd3bf36ac7..5b3960f3aa 100644
--- a/ietf/templates/meeting/bluesheet.txt
+++ b/ietf/templates/meeting/bluesheet.txt
@@ -1,7 +1,8 @@
-Bluesheet for {{session}}
+{% autoescape off %}Bluesheet for {{session}}
========================================================================
{{ data|length }} attendees.
{% for item in data %}
{{ item.name }} {{ item.affiliation }}{% endfor %}
+{% endautoescape %}
From 7b4035d7fcd1130cdf8e08b3aa54efda35087a8a Mon Sep 17 00:00:00 2001
From: Tero Kivinen
Date: Mon, 3 Nov 2025 18:16:33 +0200
Subject: [PATCH 026/187] fix: Change add period button to save new period.
(#9847)
---
ietf/templates/group/change_reviewer_settings.html | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/ietf/templates/group/change_reviewer_settings.html b/ietf/templates/group/change_reviewer_settings.html
index 9ecec5633c..75451fdd75 100644
--- a/ietf/templates/group/change_reviewer_settings.html
+++ b/ietf/templates/group/change_reviewer_settings.html
@@ -89,7 +89,7 @@
Unavailable periods
+ value="add_period">Save new period
History of settings
From 1ba63977c00121572048c506289f88d41ce67291 Mon Sep 17 00:00:00 2001
From: Matthew Holloway
Date: Tue, 4 Nov 2025 06:26:25 +1300
Subject: [PATCH 027/187] fix: ask google not to index noscript content (#9844)
---
ietf/templates/base.html | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/ietf/templates/base.html b/ietf/templates/base.html
index aa44955527..d8ff85f86e 100644
--- a/ietf/templates/base.html
+++ b/ietf/templates/base.html
@@ -96,7 +96,7 @@
{% endif %}